ATLAS Offline Software
UpdateHVReference.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
2 
3 from __future__ import print_function
4 
5 import sys
6 import time
7 import struct
8 
9 from PyCool import cool
10 from optparse import OptionParser
11 
13 
14  def __init__(self):
15 
16  self.reference_HV = {}
18  self.forced_HV = {}
19  self.NLayers = {}
20  self.LayerName = {}
21  self.NCells = {}
22  self.UNIX2COOL = 1000000000
23 
24  def read_forced_list(self,forced_file_name):
25  print (" Loading forced file", forced_file_name)
26 
27  new_hv = []
28  myfile = open(forced_file_name,'r')
29  for line in myfile.readlines():
30  line.rstrip()
31  line.lstrip()
32  line_cont = line.split(' ')
33  line_cont = [iii for iii in line_cont if not iii == '']
34  rec_chan = line_cont[0]
35 
36  new_hv = [float(line_cont[iii]) for iii in range(2,6)]
37 
38  self.forced_HV[rec_chan]=new_hv
39  print ("updating channel ", rec_chan, " to HV ", new_hv )
40 
41  return
42 
43  def read_HV_reference(self,ref_file_name):
44  print (" Loading reference HV file", ref_file_name)
45  # get database service and open database
46  dbSvc = cool.DatabaseSvcFactory.databaseService()
47 
48  dbString='sqlite://;schema='+ref_file_name+';dbname=L1CALO'
49  try:
50  db = dbSvc.openDatabase(dbString, False)
51  except Exception as e:
52  print ('Error: Problem opening database', e)
53  sys.exit(1)
54 
55  folder_name = '/TRIGGER/L1Calo/V1/Results/HVCorrections'
56  folder=db.getFolder(folder_name)
57 
58  startUtime = int(time.time())
59  endUtime = int(time.time())
60  startValKey = startUtime * self.UNIX2COOL
61  endValKey = endUtime * self.UNIX2COOL
62  chsel = cool.ChannelSelection(0,sys.maxint)
63 
64  try:
65  itr=folder.browseObjects(startValKey, endValKey, chsel)
66  except Exception as e:
67  print (e)
68  sys.exit(1)
69 
70  for row in itr:
71  HV_corrections=4*[0]
72  Affected_cells = 4*[0]
73 
74  CoolId = hex(int(row.channelId()))
75  payload = row.payload()
76 
77  HV_corrections[0] = payload['LayerMean1']
78  HV_corrections[1] = payload['LayerMean2']
79  HV_corrections[2] = payload['LayerMean3']
80  HV_corrections[3] = payload['LayerMean4']
81 
82  Affected_cells[0] = struct.unpack('B',payload['AffectedCells1'])[0]
83  Affected_cells[1] = struct.unpack('B',payload['AffectedCells2'])[0]
84  Affected_cells[2] = struct.unpack('B',payload['AffectedCells3'])[0]
85  Affected_cells[3] = struct.unpack('B',payload['AffectedCells4'])[0]
86 
87  self.reference_AffectedCells[CoolId] = Affected_cells
88  self.reference_HV[CoolId] = HV_corrections
89 
90 # for iii in self.reference_AffectedCells.keys():
91 # print (iii," ",self.reference_AffectedCells[iii] )
92 
93 # for iii in self.reference_HV.keys():
94 # print (iii," ",self.reference_HV[iii] )
95 
96  # close database
97  db.closeDatabase()
98 
99  return 0
100 
101 
102  def read_HV_geometry(self,ref_file_name):
103  print (" Reading HV geometry from file", ref_file_name)
104  # get database service and open database
105  dbSvc = cool.DatabaseSvcFactory.databaseService()
106 
107  dbString='sqlite://;schema='+ref_file_name+';dbname=L1CALO'
108  try:
109  db = dbSvc.openDatabase(dbString, False)
110  except Exception as e:
111  print ('Error: Problem opening database', e)
112  sys.exit(1)
113 
114  folder_name = '/TRIGGER/L1Calo/V1/Results/RxLayers'
115  folder=db.getFolder(folder_name)
116 
117  startUtime = int(time.time())
118  endUtime = int(time.time())
119  startValKey = startUtime * self.UNIX2COOL
120  endValKey = endUtime * self.UNIX2COOL
121  chsel = cool.ChannelSelection(0,sys.maxint)
122 
123  try:
124  itr=folder.browseObjects(startValKey, endValKey, chsel)
125  except Exception as e:
126  print (e)
127  sys.exit(1)
128 
129  for row in itr:
130  HV_layer_names=4*[0]
131  HV_Ncells = 4*[0]
132 
133  CoolId = hex(int(row.channelId()))
134  payload = row.payload()
135 
136  self.NLayers[CoolId] = struct.unpack('B',payload['NLayers'])[0]
137 
138  HV_layer_names[0] = struct.unpack('B',payload['Name1'])[0]
139  HV_layer_names[1] = struct.unpack('B',payload['Name2'])[0]
140  HV_layer_names[2] = struct.unpack('B',payload['Name3'])[0]
141  HV_layer_names[3] = struct.unpack('B',payload['Name4'])[0]
142 
143  HV_Ncells[0] = struct.unpack('B',payload['NCells1'])[0]
144  HV_Ncells[1] = struct.unpack('B',payload['NCells2'])[0]
145  HV_Ncells[2] = struct.unpack('B',payload['NCells3'])[0]
146  HV_Ncells[3] = struct.unpack('B',payload['NCells4'])[0]
147 
148  self.LayerName[CoolId] = HV_layer_names
149  self.NCells[CoolId] = HV_Ncells
150 
151 
152 # for iii in self.NLayers.keys():
153 # print (iii," NLayers: ",self.NLayers[iii], " Names: ",self.LayerName[iii], " NCells: ",self.NCells[iii])
154 
155 
156  # close database
157  db.closeDatabase()
158 
159  return 0
160 
161 
162 
163  def update_HV(self):
164 
165  print (" Updating internal HV list")
166 
167  for iii in self.forced_HV.keys():
168 
169  isNominal = False
170  if (self.forced_HV[iii][0] == 0 or self.forced_HV[iii][0] == 1.) and \
171  (self.forced_HV[iii][1] == 0 or self.forced_HV[iii][1] == 1.) and \
172  (self.forced_HV[iii][2] == 0 or self.forced_HV[iii][2] == 1.) and \
173  (self.forced_HV[iii][3] == 0 or self.forced_HV[iii][3] == 1.) :
174 
175  isNominal = True
176 
177  if isNominal: # if the channel is on nominal HV, doesn't go to the file
178  del self.reference_HV[iii]
179  del self.reference_AffectedCells[iii]
180  else: #
181  self.reference_HV[iii] = self.forced_HV[iii]
182  self.reference_AffectedCells[iii] = [99,99,99,99] # don't know what to put there, hope this is OK
183 
184 
185 # print ("***************************************************************")
186 # print (" Now changed HV dictionary")
187 # print ("***************************************************************")
188 # for iii in self.reference_HV.keys():
189 # print (iii," ",self.reference_HV[iii] )
190 
191  return
192 
193  def write_geometry(self,new_file_name):
194  print (" Writing geometry to file " , new_file_name)
195 
196  dbSvc = cool.DatabaseSvcFactory.databaseService()
197  connectString = 'sqlite://;schema='+new_file_name+';dbname=L1CALO'
198 
199 
200  print ('Writing into database file',new_file_name)
201  db = dbSvc.openDatabase(connectString, False)
202 
203  spec = cool.RecordSpecification()
204  spec.extend('NLayers',cool.StorageType.UChar)
205 
206  spec.extend( 'Name1', cool.StorageType.UChar)
207  spec.extend( 'Name2', cool.StorageType.UChar)
208  spec.extend( 'Name3', cool.StorageType.UChar)
209  spec.extend( 'Name4', cool.StorageType.UChar)
210 
211  spec.extend('NCells1',cool.StorageType.UChar)
212  spec.extend('NCells2',cool.StorageType.UChar)
213  spec.extend('NCells3',cool.StorageType.UChar)
214  spec.extend('NCells4',cool.StorageType.UChar)
215 
216  now = int(time.time())
217 
218  since = now*self.UNIX2COOL
219  until = cool.ValidityKeyMax
220 
221  folder_description = "<timeStamp>time</timeStamp><addrHeader><address_header service_type=\"71\" clid=\"1238547719\"/></addrHeader><typeName>CondAttrListCollection</typeName>"
222  f = db.createFolder( "/TRIGGER/L1Calo/V1/Results/RxLayers" , spec,folder_description)
223 
224  for i in self.NLayers.keys():
225 
226  data = cool.Record( spec )
227 
228  data['NLayers'] = struct.pack('B',self.NLayers[i])
229  data['Name1'] = struct.pack('B',self.LayerName[i][0])
230  data['Name2'] = struct.pack('B',self.LayerName[i][1])
231  data['Name3'] = struct.pack('B',self.LayerName[i][2])
232  data['Name4'] = struct.pack('B',self.LayerName[i][3])
233 
234  data['NCells1'] = struct.pack('B',self.NCells[i][0])
235  data['NCells2'] = struct.pack('B',self.NCells[i][1])
236  data['NCells3'] = struct.pack('B',self.NCells[i][2])
237  data['NCells4'] = struct.pack('B',self.NCells[i][3])
238 
239  f.storeObject(since,until, data, int(i,16) )
240 
241  db.closeDatabase()
242 
243  return
244 
245 
246  def write_new_file(self,new_file_name):
247 
248  print (" Writing output to file " , new_file_name)
249 
250  dbSvc = cool.DatabaseSvcFactory.databaseService()
251  connectString = 'sqlite://;schema='+new_file_name+';dbname=L1CALO'
252 
253  print ('recreating database file',new_file_name)
254  dbSvc.dropDatabase( connectString )
255  db = dbSvc.createDatabase( connectString )
256 
257  spec = cool.RecordSpecification()
258  spec.extend('RxMean',cool.StorageType.Float)
259 
260  spec.extend( 'AffectedCells1', cool.StorageType.UChar)
261  spec.extend( 'AffectedCells2', cool.StorageType.UChar)
262  spec.extend( 'AffectedCells3', cool.StorageType.UChar)
263  spec.extend( 'AffectedCells4', cool.StorageType.UChar)
264 
265  spec.extend('LayerMean1',cool.StorageType.Float)
266  spec.extend('LayerMean2',cool.StorageType.Float)
267  spec.extend('LayerMean3',cool.StorageType.Float)
268  spec.extend('LayerMean4',cool.StorageType.Float)
269 
270  now = int(time.time())
271 
272  since = now*self.UNIX2COOL
273  until = cool.ValidityKeyMax
274  db.createFolderSet('/TRIGGER')
275  db.createFolderSet('/TRIGGER/L1Calo')
276  db.createFolderSet('/TRIGGER/L1Calo/V1')
277  db.createFolderSet('/TRIGGER/L1Calo/V1/Results')
278 # db.createFolderSet('/TRIGGER/L1Calo/V1/Results/HVCorrections')
279 
280  folder_description = "<timeStamp>time</timeStamp><addrHeader><address_header service_type=\"71\" clid=\"1238547719\"/></addrHeader><typeName>CondAttrListCollection</typeName>"
281  f = db.createFolder( "/TRIGGER/L1Calo/V1/Results/HVCorrections" , spec,folder_description)
282 # f = db.createFolder( folder_name , spec, True )
283 
284  for i in self.reference_HV.keys():
285 
286  data = cool.Record( spec )
287 
288  MeanHV=0.
289  TotalNrCells=0
290 
291  for ilayer in range(4):
292  if self.NCells[i][ilayer]>0:
293  MeanHV += self.reference_HV[i][ilayer]*self.NCells[i][ilayer]
294  TotalNrCells += self.NCells[i][ilayer]
295 
296  MeanHV /= TotalNrCells
297 
298 # data['RxMean'] =(self.reference_HV[i][0]+self.reference_HV[i][1]+self.reference_HV[i][2]+self.reference_HV[i][3])/4.
299  data['RxMean'] = MeanHV
300 
301  data['LayerMean1'] = self.reference_HV[i][0]
302  data['LayerMean2'] = self.reference_HV[i][1]
303  data['LayerMean3'] = self.reference_HV[i][2]
304  data['LayerMean4'] = self.reference_HV[i][3]
305 
306  data['AffectedCells1'] = struct.pack('B',self.reference_AffectedCells[i][0])
307  data['AffectedCells2'] = struct.pack('B',self.reference_AffectedCells[i][1])
308  data['AffectedCells3'] = struct.pack('B',self.reference_AffectedCells[i][2])
309  data['AffectedCells4'] = struct.pack('B',self.reference_AffectedCells[i][3])
310 
311  f.storeObject(since,until, data, int(i,16) )
312 
313  db.closeDatabase()
314 
315  return
316 
317 if __name__ == "__main__":
318 
319  print ("Starting UpdateHVReference")
320 
321  parser = OptionParser()
322 
323  parser.add_option("-r","--ReferenceFile",action="store",type="string",dest="reference_file",help="Name of input file")
324  parser.add_option("-f","--ForcedChannels",action="store",type="string",dest="forced_file",help="Name of input file")
325  parser.add_option("-o","--OutputFile" ,action="store",type="string",dest="output_file",help="Name of input file")
326 
327  (options, args) = parser.parse_args()
328 
329 
330  HVUpdatingTool = HVRefUpdator()
331 
332  if options.forced_file:
333  HVUpdatingTool.read_forced_list(options.forced_file)
334  else:
335  print ("No forced channel list given, run python UpdateHVReference.py -h for more information")
336  sys.exit(1)
337 
338 
339  if options.reference_file:
340  HVUpdatingTool.read_HV_reference(options.reference_file)
341  HVUpdatingTool.read_HV_geometry(options.reference_file)
342  else:
343  print ("No HV reference file given, run python UpdateHVReference.py -h for more information")
344  sys.exit(1)
345 
346  HVUpdatingTool.update_HV()
347 
348  if options.output_file:
349  print ("Writing output into file ", options.output_file)
350  HVUpdatingTool.write_new_file(options.output_file)
351  HVUpdatingTool.write_geometry(options.output_file)
352  else:
353  print ("Writing output into file new_hv_ref.sqlite" )
354  HVUpdatingTool.write_new_file("new_hv_ref.sqlite")
355  HVUpdatingTool.write_geometry("new_hv_ref.sqlite")
356 
357  print ("Done!")
UpdateHVReference.HVRefUpdator.update_HV
def update_HV(self)
Definition: UpdateHVReference.py:163
UpdateHVReference.HVRefUpdator.reference_HV
reference_HV
Definition: UpdateHVReference.py:16
UpdateHVReference.HVRefUpdator.read_HV_geometry
def read_HV_geometry(self, ref_file_name)
Definition: UpdateHVReference.py:102
CaloCellPos2Ntuple.int
int
Definition: CaloCellPos2Ntuple.py:24
UpdateHVReference.HVRefUpdator
Definition: UpdateHVReference.py:12
UpdateHVReference.HVRefUpdator.read_HV_reference
def read_HV_reference(self, ref_file_name)
Definition: UpdateHVReference.py:43
UpdateHVReference.HVRefUpdator.UNIX2COOL
UNIX2COOL
Definition: UpdateHVReference.py:22
UpdateHVReference.HVRefUpdator.NLayers
NLayers
Definition: UpdateHVReference.py:19
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
UpdateHVReference.HVRefUpdator.LayerName
LayerName
Definition: UpdateHVReference.py:20
UpdateHVReference.HVRefUpdator.NCells
NCells
Definition: UpdateHVReference.py:21
UpdateHVReference.HVRefUpdator.__init__
def __init__(self)
Definition: UpdateHVReference.py:14
Trk::open
@ open
Definition: BinningType.h:40
python.Bindings.keys
keys
Definition: Control/AthenaPython/python/Bindings.py:790
UpdateHVReference.HVRefUpdator.write_new_file
def write_new_file(self, new_file_name)
Definition: UpdateHVReference.py:246
UpdateHVReference.HVRefUpdator.read_forced_list
def read_forced_list(self, forced_file_name)
Definition: UpdateHVReference.py:24
UpdateHVReference.HVRefUpdator.write_geometry
def write_geometry(self, new_file_name)
Definition: UpdateHVReference.py:193
UpdateHVReference.HVRefUpdator.reference_AffectedCells
reference_AffectedCells
Definition: UpdateHVReference.py:17
UpdateHVReference.HVRefUpdator.forced_HV
forced_HV
Definition: UpdateHVReference.py:18
readCCLHist.float
float
Definition: readCCLHist.py:83