ATLAS Offline Software
Loading...
Searching...
No Matches
UpdateHVReference.py
Go to the documentation of this file.
1# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
2
3
4import sys
5import time
6import struct
7
8from PyCool import cool
9from optparse import OptionParser
10
12
13 def __init__(self):
14
15 self.reference_HV = {}
17 self.forced_HV = {}
18 self.NLayers = {}
19 self.LayerName = {}
20 self.NCells = {}
21 self.UNIX2COOL = 1000000000
22
23 def read_forced_list(self,forced_file_name):
24 print (" Loading forced file", forced_file_name)
25
26 new_hv = []
27 myfile = open(forced_file_name,'r')
28 for line in myfile.readlines():
29 line.rstrip()
30 line.lstrip()
31 line_cont = line.split(' ')
32 line_cont = [iii for iii in line_cont if not iii == '']
33 rec_chan = line_cont[0]
34
35 new_hv = [float(line_cont[iii]) for iii in range(2,6)]
36
37 self.forced_HV[rec_chan]=new_hv
38 print ("updating channel ", rec_chan, " to HV ", new_hv )
39
40 return
41
42 def read_HV_reference(self,ref_file_name):
43 print (" Loading reference HV file", ref_file_name)
44 # get database service and open database
45 dbSvc = cool.DatabaseSvcFactory.databaseService()
46
47 dbString='sqlite://;schema='+ref_file_name+';dbname=L1CALO'
48 try:
49 db = dbSvc.openDatabase(dbString, False)
50 except Exception as e:
51 print ('Error: Problem opening database', e)
52 sys.exit(1)
53
54 folder_name = '/TRIGGER/L1Calo/V1/Results/HVCorrections'
55 folder=db.getFolder(folder_name)
56
57 startUtime = int(time.time())
58 endUtime = int(time.time())
59 startValKey = startUtime * self.UNIX2COOL
60 endValKey = endUtime * self.UNIX2COOL
61 chsel = cool.ChannelSelection(0,sys.maxint)
62
63 try:
64 itr=folder.browseObjects(startValKey, endValKey, chsel)
65 except Exception as e:
66 print (e)
67 sys.exit(1)
68
69 for row in itr:
70 HV_corrections=4*[0]
71 Affected_cells = 4*[0]
72
73 CoolId = hex(int(row.channelId()))
74 payload = row.payload()
75
76 HV_corrections[0] = payload['LayerMean1']
77 HV_corrections[1] = payload['LayerMean2']
78 HV_corrections[2] = payload['LayerMean3']
79 HV_corrections[3] = payload['LayerMean4']
80
81 Affected_cells[0] = struct.unpack('B',payload['AffectedCells1'])[0]
82 Affected_cells[1] = struct.unpack('B',payload['AffectedCells2'])[0]
83 Affected_cells[2] = struct.unpack('B',payload['AffectedCells3'])[0]
84 Affected_cells[3] = struct.unpack('B',payload['AffectedCells4'])[0]
85
86 self.reference_AffectedCells[CoolId] = Affected_cells
87 self.reference_HV[CoolId] = HV_corrections
88
89# for iii in self.reference_AffectedCells.keys():
90# print (iii," ",self.reference_AffectedCells[iii] )
91
92# for iii in self.reference_HV.keys():
93# print (iii," ",self.reference_HV[iii] )
94
95 # close database
96 db.closeDatabase()
97
98 return 0
99
100
101 def read_HV_geometry(self,ref_file_name):
102 print (" Reading HV geometry from file", ref_file_name)
103 # get database service and open database
104 dbSvc = cool.DatabaseSvcFactory.databaseService()
105
106 dbString='sqlite://;schema='+ref_file_name+';dbname=L1CALO'
107 try:
108 db = dbSvc.openDatabase(dbString, False)
109 except Exception as e:
110 print ('Error: Problem opening database', e)
111 sys.exit(1)
112
113 folder_name = '/TRIGGER/L1Calo/V1/Results/RxLayers'
114 folder=db.getFolder(folder_name)
115
116 startUtime = int(time.time())
117 endUtime = int(time.time())
118 startValKey = startUtime * self.UNIX2COOL
119 endValKey = endUtime * self.UNIX2COOL
120 chsel = cool.ChannelSelection(0,sys.maxint)
121
122 try:
123 itr=folder.browseObjects(startValKey, endValKey, chsel)
124 except Exception as e:
125 print (e)
126 sys.exit(1)
127
128 for row in itr:
129 HV_layer_names=4*[0]
130 HV_Ncells = 4*[0]
131
132 CoolId = hex(int(row.channelId()))
133 payload = row.payload()
134
135 self.NLayers[CoolId] = struct.unpack('B',payload['NLayers'])[0]
136
137 HV_layer_names[0] = struct.unpack('B',payload['Name1'])[0]
138 HV_layer_names[1] = struct.unpack('B',payload['Name2'])[0]
139 HV_layer_names[2] = struct.unpack('B',payload['Name3'])[0]
140 HV_layer_names[3] = struct.unpack('B',payload['Name4'])[0]
141
142 HV_Ncells[0] = struct.unpack('B',payload['NCells1'])[0]
143 HV_Ncells[1] = struct.unpack('B',payload['NCells2'])[0]
144 HV_Ncells[2] = struct.unpack('B',payload['NCells3'])[0]
145 HV_Ncells[3] = struct.unpack('B',payload['NCells4'])[0]
146
147 self.LayerName[CoolId] = HV_layer_names
148 self.NCells[CoolId] = HV_Ncells
149
150
151# for iii in self.NLayers.keys():
152# print (iii," NLayers: ",self.NLayers[iii], " Names: ",self.LayerName[iii], " NCells: ",self.NCells[iii])
153
154
155 # close database
156 db.closeDatabase()
157
158 return 0
159
160
161
162 def update_HV(self):
163
164 print (" Updating internal HV list")
165
166 for iii in self.forced_HV.keys():
167
168 isNominal = False
169 if (self.forced_HV[iii][0] == 0 or self.forced_HV[iii][0] == 1.) and \
170 (self.forced_HV[iii][1] == 0 or self.forced_HV[iii][1] == 1.) and \
171 (self.forced_HV[iii][2] == 0 or self.forced_HV[iii][2] == 1.) and \
172 (self.forced_HV[iii][3] == 0 or self.forced_HV[iii][3] == 1.) :
173
174 isNominal = True
175
176 if isNominal: # if the channel is on nominal HV, doesn't go to the file
177 del self.reference_HV[iii]
178 del self.reference_AffectedCells[iii]
179 else: #
180 self.reference_HV[iii] = self.forced_HV[iii]
181 self.reference_AffectedCells[iii] = [99,99,99,99] # don't know what to put there, hope this is OK
182
183
184# print ("***************************************************************")
185# print (" Now changed HV dictionary")
186# print ("***************************************************************")
187# for iii in self.reference_HV.keys():
188# print (iii," ",self.reference_HV[iii] )
189
190 return
191
192 def write_geometry(self,new_file_name):
193 print (" Writing geometry to file " , new_file_name)
194
195 dbSvc = cool.DatabaseSvcFactory.databaseService()
196 connectString = 'sqlite://;schema='+new_file_name+';dbname=L1CALO'
197
198
199 print ('Writing into database file',new_file_name)
200 db = dbSvc.openDatabase(connectString, False)
201
202 spec = cool.RecordSpecification()
203 spec.extend('NLayers',cool.StorageType.UChar)
204
205 spec.extend( 'Name1', cool.StorageType.UChar)
206 spec.extend( 'Name2', cool.StorageType.UChar)
207 spec.extend( 'Name3', cool.StorageType.UChar)
208 spec.extend( 'Name4', cool.StorageType.UChar)
209
210 spec.extend('NCells1',cool.StorageType.UChar)
211 spec.extend('NCells2',cool.StorageType.UChar)
212 spec.extend('NCells3',cool.StorageType.UChar)
213 spec.extend('NCells4',cool.StorageType.UChar)
214
215 now = int(time.time())
216
217 since = now*self.UNIX2COOL
218 until = cool.ValidityKeyMax
219
220 folder_description = "<timeStamp>time</timeStamp><addrHeader><address_header service_type=\"71\" clid=\"1238547719\"/></addrHeader><typeName>CondAttrListCollection</typeName>"
221 f = db.createFolder( "/TRIGGER/L1Calo/V1/Results/RxLayers" , spec,folder_description)
222
223 for i in self.NLayers.keys():
224
225 data = cool.Record( spec )
226
227 data['NLayers'] = struct.pack('B',self.NLayers[i])
228 data['Name1'] = struct.pack('B',self.LayerName[i][0])
229 data['Name2'] = struct.pack('B',self.LayerName[i][1])
230 data['Name3'] = struct.pack('B',self.LayerName[i][2])
231 data['Name4'] = struct.pack('B',self.LayerName[i][3])
232
233 data['NCells1'] = struct.pack('B',self.NCells[i][0])
234 data['NCells2'] = struct.pack('B',self.NCells[i][1])
235 data['NCells3'] = struct.pack('B',self.NCells[i][2])
236 data['NCells4'] = struct.pack('B',self.NCells[i][3])
237
238 f.storeObject(since,until, data, int(i,16) )
239
240 db.closeDatabase()
241
242 return
243
244
245 def write_new_file(self,new_file_name):
246
247 print (" Writing output to file " , new_file_name)
248
249 dbSvc = cool.DatabaseSvcFactory.databaseService()
250 connectString = 'sqlite://;schema='+new_file_name+';dbname=L1CALO'
251
252 print ('recreating database file',new_file_name)
253 dbSvc.dropDatabase( connectString )
254 db = dbSvc.createDatabase( connectString )
255
256 spec = cool.RecordSpecification()
257 spec.extend('RxMean',cool.StorageType.Float)
258
259 spec.extend( 'AffectedCells1', cool.StorageType.UChar)
260 spec.extend( 'AffectedCells2', cool.StorageType.UChar)
261 spec.extend( 'AffectedCells3', cool.StorageType.UChar)
262 spec.extend( 'AffectedCells4', cool.StorageType.UChar)
263
264 spec.extend('LayerMean1',cool.StorageType.Float)
265 spec.extend('LayerMean2',cool.StorageType.Float)
266 spec.extend('LayerMean3',cool.StorageType.Float)
267 spec.extend('LayerMean4',cool.StorageType.Float)
268
269 now = int(time.time())
270
271 since = now*self.UNIX2COOL
272 until = cool.ValidityKeyMax
273 db.createFolderSet('/TRIGGER')
274 db.createFolderSet('/TRIGGER/L1Calo')
275 db.createFolderSet('/TRIGGER/L1Calo/V1')
276 db.createFolderSet('/TRIGGER/L1Calo/V1/Results')
277# db.createFolderSet('/TRIGGER/L1Calo/V1/Results/HVCorrections')
278
279 folder_description = "<timeStamp>time</timeStamp><addrHeader><address_header service_type=\"71\" clid=\"1238547719\"/></addrHeader><typeName>CondAttrListCollection</typeName>"
280 f = db.createFolder( "/TRIGGER/L1Calo/V1/Results/HVCorrections" , spec,folder_description)
281# f = db.createFolder( folder_name , spec, True )
282
283 for i in self.reference_HV.keys():
284
285 data = cool.Record( spec )
286
287 MeanHV=0.
288 TotalNrCells=0
289
290 for ilayer in range(4):
291 if self.NCells[i][ilayer]>0:
292 MeanHV += self.reference_HV[i][ilayer]*self.NCells[i][ilayer]
293 TotalNrCells += self.NCells[i][ilayer]
294
295 MeanHV /= TotalNrCells
296
297# data['RxMean'] =(self.reference_HV[i][0]+self.reference_HV[i][1]+self.reference_HV[i][2]+self.reference_HV[i][3])/4.
298 data['RxMean'] = MeanHV
299
300 data['LayerMean1'] = self.reference_HV[i][0]
301 data['LayerMean2'] = self.reference_HV[i][1]
302 data['LayerMean3'] = self.reference_HV[i][2]
303 data['LayerMean4'] = self.reference_HV[i][3]
304
305 data['AffectedCells1'] = struct.pack('B',self.reference_AffectedCells[i][0])
306 data['AffectedCells2'] = struct.pack('B',self.reference_AffectedCells[i][1])
307 data['AffectedCells3'] = struct.pack('B',self.reference_AffectedCells[i][2])
308 data['AffectedCells4'] = struct.pack('B',self.reference_AffectedCells[i][3])
309
310 f.storeObject(since,until, data, int(i,16) )
311
312 db.closeDatabase()
313
314 return
315
316if __name__ == "__main__":
317
318 print ("Starting UpdateHVReference")
319
320 parser = OptionParser()
321
322 parser.add_option("-r","--ReferenceFile",action="store",type="string",dest="reference_file",help="Name of input file")
323 parser.add_option("-f","--ForcedChannels",action="store",type="string",dest="forced_file",help="Name of input file")
324 parser.add_option("-o","--OutputFile" ,action="store",type="string",dest="output_file",help="Name of input file")
325
326 (options, args) = parser.parse_args()
327
328
329 HVUpdatingTool = HVRefUpdator()
330
331 if options.forced_file:
332 HVUpdatingTool.read_forced_list(options.forced_file)
333 else:
334 print ("No forced channel list given, run python UpdateHVReference.py -h for more information")
335 sys.exit(1)
336
337
338 if options.reference_file:
339 HVUpdatingTool.read_HV_reference(options.reference_file)
340 HVUpdatingTool.read_HV_geometry(options.reference_file)
341 else:
342 print ("No HV reference file given, run python UpdateHVReference.py -h for more information")
343 sys.exit(1)
344
345 HVUpdatingTool.update_HV()
346
347 if options.output_file:
348 print ("Writing output into file ", options.output_file)
349 HVUpdatingTool.write_new_file(options.output_file)
350 HVUpdatingTool.write_geometry(options.output_file)
351 else:
352 print ("Writing output into file new_hv_ref.sqlite" )
353 HVUpdatingTool.write_new_file("new_hv_ref.sqlite")
354 HVUpdatingTool.write_geometry("new_hv_ref.sqlite")
355
356 print ("Done!")
write_geometry(self, new_file_name)
write_new_file(self, new_file_name)
read_forced_list(self, forced_file_name)
read_HV_geometry(self, ref_file_name)
read_HV_reference(self, ref_file_name)