16 from __future__
import with_statement, print_function
17 from CoolRunQuery.AtlRunQueryQueryConfig
import QC
18 from CoolRunQuery.selector.AtlRunQuerySelectorBase
import DataKey
21 from CoolRunQuery.AtlRunQueryRun
import DataEntry, DataEntryList, Run
28 print(
"Creating results in path '%s'" % QC.datapath )
34 header += [DataKey(x)
for x
in [
'Run',
'Links',
'#LB']]
36 header += [DataKey(
'Start and endtime')]
38 header += [DataKey(
'Duration')]
39 header += [k
for k
in Run.ShowOrder
if k
not in excludelist]
62 Run.totevents[0] +=
int(run.result[
"#Events"])
70 f =
open(
'%s/QueryResult.txt' % QC.datapath,
'w' )
71 print (
"data keys:",
', '.
join([h.ResultKey
for h
in header]), file=f)
72 print (
'number of runs: %i' % len(runlist), file=f)
78 line += [
"%i" % r.runNr,
"",
"%i" % r.lastlb]
80 line += [
"%s" % r.timestr(
'seconds')]
82 line += [
"%s" % r.durationstr()]
83 for k
in Run.ShowOrder:
84 line += [r.data[k.ResultKey]]
85 for head,item
in zip(header,line):
86 if isinstance(item,tuple):
87 item =
'|'.
join([
str(x)
for x
in item])
88 print (
'%40s: %s' % (head.ResultKey, item), file=f)
97 pf =
open(
'%s/atlrunquery.pickle' % QC.datapath,
'wb' )
100 except Exception
as e:
101 print (
'ERROR: could not pickle results dictionary: "%r"' % e)
115 "olc:beam1intensity",
116 "olc:beam2intensity",
123 runs = [r[
"runNr"]
for r
in result[DataKey(
"Run")]]
124 store = { runNr:{}
for runNr
in runs}
126 for datakey
in result:
127 key = datakey.pickled()
128 if key
in ignoreForNow:
129 print(
"Not storing in json file: ", key)
132 for (runNr, x)
in zip(runs, result[datakey]):
133 if isinstance(x, (DataEntry,DataEntryList)):
134 store[runNr][key] = x.json()
136 store[runNr][key] = x
138 with open(
'%s/atlrunquery.json' % QC.datapath,
'w' )
as pf:
142 except Exception
as e:
143 print (
'ERROR: could not create json file with results: "%r"' % e)
152 scontent = {
"runNr": r.runNr,
"lastLB": r.lastlb,
"dataPeriod":
"tbd",
"lhcRun": r.lhcRun}
156 scontent = (r.lastlb, [(lbtime[0]-r.lbtimes[0][0])*1e-9
for lbtime
in r.lbtimes] + [ (r.lbtimes[-1][1]-r.lbtimes[0][0])*1e-9 ] )
157 elif k ==
'Start and endtime':
158 scontent = r.timestr(
'seconds')
159 elif k ==
'Duration':
160 scontent = r.durationstr()
162 scontent = r.data[k.ResultKey]
163 dic.setdefault(k,[]).
append(scontent)
168 if hasattr(v,
'pickled'):
173 dic_basic = {
'Run': [ r[
"runNr"]
for r
in dic[DataKey(
'Run')]]}
175 for i,r
in enumerate(dic_basic[
'Run']):
176 dic_basic[r] = dict([ ( k.pickled(),
basic(v[i]) )
for k,v
in dic.items()])
187 for key, results
in dic.items():
188 if key.ResultKey
in [
'SMK',
'HLT PSK',
'L1 PSK',
'TorCurrent',
'SolCurrent',
'BGS Key',
'#LB']:
190 if key.Type==DataKey.DETECTOR:
194 dicsum.setdefault(key,0)
196 elif key.Type == DataKey.STREAM:
198 if entry
is None or entry.value ==
'n.a.':
200 dicsum.setdefault(key,[0,0])
201 dicsum[key][0] += entry.value[0]
202 dicsum[key][1] += entry.value[1]
206 if key
not in dicsum:
209 except (ValueError, TypeError):