ATLAS Offline Software
AtlRunQuerySave.py
Go to the documentation of this file.
1 #!/usr/env python
2 
3 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
4 #
5 # ----------------------------------------------------------------
6 # Script : AtlRunQuerySave.py
7 # Project: AtlRunQuery
8 # Purpose: Utility to save pickled dictionary
9 # Authors: Andreas Hoecker (CERN), Joerg Stelzer (DESY)
10 # Created: Dec 2, 2008
11 # ----------------------------------------------------------------
12 #
13 # ---------------------------------------------------------------------------------------------------
14 # Creation of Pickled dictionary for output
15 # ---------------------------------------------------------------------------------------------------
16 from __future__ import with_statement, print_function
17 from CoolRunQuery.AtlRunQueryQueryConfig import QC
18 from CoolRunQuery.selector.AtlRunQuerySelectorBase import DataKey
19 
20 import pickle,sys
21 from CoolRunQuery.AtlRunQueryRun import DataEntry, DataEntryList, Run
22 
23 def CreateResultDict( runlist ):
24 
25  if len(runlist)==0:
26  return {}, {}
27 
28  print("Creating results in path '%s'" % QC.datapath )
29 
30  # define the header
31  header = []
32  excludelist = []
33  if Run.showrunnr:
34  header += [DataKey(x) for x in ['Run', 'Links', '#LB']]
35  if Run.showtime:
36  header += [DataKey('Start and endtime')]
37  if Run.showduration:
38  header += [DataKey('Duration')]
39  header += [k for k in Run.ShowOrder if k not in excludelist]
40 
41  AddUpEvents(runlist)
42 
43  SaveResultTxt(runlist, header)
44 
45  dic = CreateDic(runlist, header)
46 
47  SaveResultAsJson(dic)
48 
49  summary = CreateSummary(dic)
50 
51  dic_basic = CreateDictForPickling(dic)
52 
53  SaveTypelessPickleResult(dic_basic)
54 
55  return dic, summary
56 
57 
58 def AddUpEvents(runlist):
59  # Sum up total event numbers
60  for run in runlist:
61  try:
62  Run.totevents[0] += int(run.result["#Events"])
63  except ValueError:
64  Run.totevents[1] += 1
65 
66 
67 
68 def SaveResultTxt(runlist, header):
69  # write header to text file
70  f = open( '%s/QueryResult.txt' % QC.datapath, 'w' )
71  print ("data keys:", ', '.join([h.ResultKey for h in header]), file=f)
72  print ('number of runs: %i' % len(runlist), file=f)
73 
74  # now get the values for each run and write to file
75  for r in runlist:
76  line = []
77  if Run.showrunnr:
78  line += ["%i" % r.runNr, "", "%i" % r.lastlb]
79  if Run.showtime:
80  line += ["%s" % r.timestr('seconds')]
81  if Run.showduration:
82  line += ["%s" % r.durationstr()]
83  for k in Run.ShowOrder:
84  line += [r.data[k.ResultKey]]
85  for head,item in zip(header,line):
86  if isinstance(item,tuple):
87  item = '|'.join([str(x) for x in item])
88  print ('%40s: %s' % (head.ResultKey, item), file=f)
89  print ('\n', file=f)
90  f.close()
91 
92 
93 
94 
95 def SaveTypelessPickleResult(pdic, filename = 'atlrunquery.pickle'):
96  # write pickle output
97  pf = open( '%s/atlrunquery.pickle' % QC.datapath, 'wb' )
98  try:
99  pickle.dump(pdic, pf)
100  except Exception as e:
101  print ('ERROR: could not pickle results dictionary: "%r"' % e)
102  sys.exit(1)
103  pf.close()
104 
105 def SaveResultAsJson( result, filename = 'atlrunquery.json'):
106  # write json output
107 
108  # ignoring a few large ones for now and one that is not working
109  # large ones should be stored better
110  ignoreForNow = [
111  # "lhc:fillnumber",
112  # "lhc:stablebeams",
113  "lhc:beamenergy", # large
114  "olc:lumi:0", # large
115  "olc:beam1intensity", # large
116  "olc:beam2intensity", # large
117  # "olc:beam1bunches",
118  # "olc:beam2bunches",
119  # "olc:collbunches",
120  "olc:bcidmask" # broken
121  ]
122 
123  runs = [r["runNr"] for r in result[DataKey("Run")]]
124  store = { runNr:{} for runNr in runs}
125 
126  for datakey in result:
127  key = datakey.pickled()
128  if key in ignoreForNow:
129  print("Not storing in json file: ", key)
130  continue
131 
132  for (runNr, x) in zip(runs, result[datakey]):
133  if isinstance(x, (DataEntry,DataEntryList)):
134  store[runNr][key] = x.json()
135  else:
136  store[runNr][key] = x
137 
138  with open( '%s/atlrunquery.json' % QC.datapath, 'w' ) as pf:
139  try:
140  import json
141  json.dump(store, pf)
142  except Exception as e:
143  print ('ERROR: could not create json file with results: "%r"' % e)
144 
145 
146 def CreateDic(runlist, header):
147  # create keys for pickle dictionary
148  dic = {}
149  for r in runlist:
150  for k in header:
151  if k == 'Run':
152  scontent = {"runNr": r.runNr, "lastLB": r.lastlb, "dataPeriod": "tbd", "lhcRun": r.lhcRun}
153  elif k == 'Links':
154  scontent = ""
155  elif k == '#LB':
156  scontent = (r.lastlb, [(lbtime[0]-r.lbtimes[0][0])*1e-9 for lbtime in r.lbtimes] + [ (r.lbtimes[-1][1]-r.lbtimes[0][0])*1e-9 ] )
157  elif k == 'Start and endtime':
158  scontent = r.timestr('seconds')
159  elif k == 'Duration':
160  scontent = r.durationstr()
161  else:
162  scontent = r.data[k.ResultKey]
163  dic.setdefault(k,[]).append(scontent)
164  return dic
165 
166 
167 def basic(v):
168  if hasattr(v,'pickled'):
169  return v.pickled()
170  return v
171 
173  dic_basic = {'Run': [ r["runNr"] for r in dic[DataKey('Run')]]}
174 
175  for i,r in enumerate(dic_basic['Run']):
176  dic_basic[r] = dict([ ( k.pickled(), basic(v[i]) ) for k,v in dic.items()])
177 
178  return dic_basic
179 
180 
181 
182 
183 
184 def CreateSummary(dic):
185  # create summary
186  dicsum = {}
187  for key, results in dic.items():
188  if key.ResultKey in ['SMK','HLT PSK','L1 PSK','TorCurrent','SolCurrent','BGS Key','#LB']:
189  continue
190  if key.Type==DataKey.DETECTOR:
191  continue
192  for r in results:
193  if key=='Run':
194  dicsum.setdefault(key,0)
195  dicsum[key] += 1
196  elif key.Type == DataKey.STREAM:
197  entry = r[0]
198  if entry is None or entry.value == 'n.a.':
199  continue
200  dicsum.setdefault(key,[0,0])
201  dicsum[key][0] += entry.value[0]
202  dicsum[key][1] += entry.value[1]
203  else:
204  try:
205  ir = int(r)
206  if key not in dicsum:
207  dicsum[key] = 0
208  dicsum[key] += ir
209  except (ValueError, TypeError):
210  pass
211  return dicsum
python.output.AtlRunQuerySave.CreateDic
def CreateDic(runlist, header)
Definition: AtlRunQuerySave.py:146
CaloCellPos2Ntuple.int
int
Definition: CaloCellPos2Ntuple.py:24
python.output.AtlRunQuerySave.CreateResultDict
def CreateResultDict(runlist)
Definition: AtlRunQuerySave.py:23
python.output.AtlRunQuerySave.AddUpEvents
def AddUpEvents(runlist)
Definition: AtlRunQuerySave.py:58
dumpHVPathFromNtuple.append
bool append
Definition: dumpHVPathFromNtuple.py:91
python.output.AtlRunQuerySave.SaveTypelessPickleResult
def SaveTypelessPickleResult(pdic, filename='atlrunquery.pickle')
Definition: AtlRunQuerySave.py:95
python.output.AtlRunQuerySave.SaveResultAsJson
def SaveResultAsJson(result, filename='atlrunquery.json')
Definition: AtlRunQuerySave.py:105
python.output.AtlRunQuerySave.SaveResultTxt
def SaveResultTxt(runlist, header)
Definition: AtlRunQuerySave.py:68
python.output.AtlRunQuerySave.CreateSummary
def CreateSummary(dic)
Definition: AtlRunQuerySave.py:184
python.output.AtlRunQuerySave.CreateDictForPickling
def CreateDictForPickling(dic)
Definition: AtlRunQuerySave.py:172
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
Trk::open
@ open
Definition: BinningType.h:40
Muon::print
std::string print(const MuPatSegment &)
Definition: MuonTrackSteering.cxx:28
str
Definition: BTagTrackIpAccessor.cxx:11
python.output.AtlRunQuerySave.basic
def basic(v)
Definition: AtlRunQuerySave.py:167