ATLAS Offline Software
Loading...
Searching...
No Matches
sct_calib_tf.py
Go to the documentation of this file.
1#!/usr/bin/env python
2
3# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
4"""
5The SCT 24h Calibration Loop.
6This transformation will run the SCT 24 hours calibration loop.
7Mandatory values as to be given for the input ntuples
8as well as the output HitMaps and output stream file.
9"""
10
11import os,sys,time
12
13from PyJobTransforms.transform import transform
14from PyJobTransforms.trfArgs import addAthenaArguments
15from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
16from PyJobTransforms.trfExe import athenaExecutor
17from PyJobTransforms.trfExitCodes import trfExit
18from PyJobTransforms.trfLogger import stdLogLevels, msg
19from PyJobTransforms.trfSignal import resetTrfSignalHandlers
20from PyJobTransformsCore.trfutil import fileutil
21import PyJobTransforms.trfArgClasses as trfArgClasses
22import PyJobTransforms.trfExceptions as trfExceptions
23import PyJobTransforms.trfValidation as trfValidation
24
25from ROOT import TFile
26
27dsDict={'input': [] , 'output' : []}
28RunNumber=-1
29EventNumber=-1
30SvcClass=''
31Stream=''
32NumberOfEvents=0
33
34
35def getDsFileName(file,input=False):
36 """Returns dataset and logical file name for files of type ds##file or ds#dsfile."""
37 global RunNumber
38 global SvcClass
39 global Stream
40 global NumberOfEvents
41 if isinstance(file,dict):
42# name=file['pfn']
43 name=file['lfn']
44 ds=file['dsn']
45# SvcClass=file['svcclass']
46 SvcClass=file.get('svcclass','')
47 if 'events' in file:
48 NumberOfEvents+=file['events']
49 else:
50 if file.find('##')!=-1:
51 ds=file.split('##')[0]
52 name=file.replace('##','')
53 elif file.find('#')!=-1:
54 ds=file.split('#')[0]
55 name=file.split('#')[1]
56 else:
57 if file.find('/')!=-1:
58 fileparts=file.split('/')
59 ds=fileparts[len(fileparts)-1]
60 else:
61 ds=file
62 name=file
63
64 if '/eos/atlas/' in name:
65 name='root://eosatlas.cern.ch/'+name
66
67 dsDict['input'].append({'file' : name, 'dataset' : ds})
68 if RunNumber == -1:
69 nb = ds.count('.')
70 if nb >= 4:
71 RunNumber=int(ds.split('.')[1])
72 longStream=ds.split('.')[0]
73 #Stream is either cos or xxxeV energy
74 Stream=longStream.split('_')[1]
75
76 return name
77
78def checkFileList(filelist):
79 """Converts list of files of type ds#filename into a list of filenames, meanwhile
80 setting ds value. If check is true it also checks the existence of the files."""
81 # First check if type is list
82
83 if not isinstance(filelist,list):
84 filelist=[filelist]
85
86 for i,ifile in enumerate(filelist):
87 # extract ds,runnumber and svcclass
88 filename=getDsFileName(ifile)
89 # pass file check if file is on castor
90 if filename.find('/castor',0,8) != -1:
91 pass
92 elif not fileutil.exists(filename):
93 found = fileutil.exists_suffix_number(filename + '.')
94 if not found:
95
96 errMsg = filename+' not found'
97 raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_INPUT_FILE_VALIDATION_FAIL'), errMsg)
98
99 if found != filename:
100
101 filename = found
102 # correct filename in list
103 filelist[i]=filename
104 return filelist
105
106def updateLastRun(RunNumber):
107
108 if os.path.exists('/afs/cern.ch/work/s/sctcalib/lastRun'):
109 f = open('/afs/cern.ch/work/s/sctcalib/lastRun','w')
110 f.write(str(RunNumber)+' ')
111 f.close()
112
113@stdTrfExceptionHandler
114@sigUsrStackTrace
115def main():
116
117 msg.info("This is %s", sys.argv[0])
118 trf = getTransform()
119 trf.parseCmdLineArgs(sys.argv[1:])
120
121 addOutputArgs(trf.parser, trf._argdict)
122 trf.parseCmdLineArgs(sys.argv[1:])
123
124 trf.execute()
125 trf.generateReport()
126
127 msg.info("%s stopped at %s, trf exit code %d", sys.argv[0], time.asctime(), trf.exitCode)
128 sys.exit(trf.exitCode)
129
131
132 exeSet = set()
133 exeSet.add(SCTCalibExecutor('SCT_CalibAlgs.SCTCalib_Skeleton'))
134
135 trf = transform(executor=exeSet)
136
137 addAthenaArguments(trf.parser)
138 addSCTCalibArgs(trf.parser)
139
140 return trf
141
142def addSCTCalibArgs(parser):
143
144 parser.defineArgGroup('Calibration', 'Specific options related to the calibration configuration')
145
146 parser.add_argument('--input',
147 help = 'List of CSV input files',group='Calibration')
148 parser.add_argument('--prefix', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
149 help = 'Prefix for output files',group='Calibration')
150 parser.add_argument('--part', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
151 help = 'List of calibration algorithms to be run',group='Calibration')
152 parser.add_argument('--doRunSelector', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
153 help = 'Specifies if runSelector.py is executed',group='Calibration')
154 parser.add_argument('--doRunInfo', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
155 help = 'Specifies if runInfo.py is executed',group='Calibration')
156 parser.add_argument('--splitHitMap', type=trfArgClasses.argFactory(trfArgClasses.argInt,runarg=True),
157 help = 'Split task or not',group='Calibration')
158 parser.add_argument('--forceRefRunNumber', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
159 help = 'Force reference run to current run number when checking information uploaded to COOL for previous runs',group='Calibration')
160
161def addOutputArgs(parser,dict):
162
163 if 'part' not in dict:
164 checkPart = ['doNoisyStrip']
165 else:
166 checkPart = dict['part']._value
167
168 if 'splitHitMap' not in dict:
169 checkSplit= 0
170 else:
171 checkSplit = dict['splitHitMap']._value
172
173 if 'prefix' not in dict:
174 checkPrefix=''
175 else:
176 checkPrefix=dict['prefix']._value
177
178 # # # add outputfiles
179 if checkPrefix != '':
180 checkPrefix += '.'
181
182
183 if 'doHV' not in checkPart and checkSplit != 1:
184 parser.add_argument('--outputCOOL', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
185 help = 'COOL DB',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'mycool.db'],runarg=True))
186
187 #NOISY STRIPS OUTPUT FILES
188 if 'doNoisyStrip' in checkPart and checkSplit == 1:
189
190 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
191 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
192 parser.add_argument('--outputLBFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
193 help = 'LB output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTLB.root'],runarg=True))
194
195 if 'doNoisyStrip' in checkPart and checkSplit != 1:
196
197 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
198 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
199 parser.add_argument('--outputLBFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
200 help = 'LB output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTLB.root'],runarg=True))
201 parser.add_argument('--outputBSSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
202 help = 'Bad Strips summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'BadStripsSummaryFile.xml'],runarg=True))
203 parser.add_argument('--outputBSAllFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
204 help = 'Bad Strips All file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'BadStripsAllFile.xml'],runarg=True))
205 parser.add_argument('--outputBSNewFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
206 help = 'Bad Strips New file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'BadStripsNewFile.xml'],runarg=True))
207
208 if 'doHV' in checkPart:
209 parser.add_argument('--outputBadModulesFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
210 help = 'Bad Modules file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'BadModulesFile.xml'],runarg=True))
211
212 #DEAD CHIP OUTPUT FILES
213 if 'doDeadChip' in checkPart and checkSplit == 1:
214 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
215 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
216 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
217 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
218
219 if 'doDeadChip' in checkPart and checkSplit != 1:
220 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
221 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
222 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
223 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
224 parser.add_argument('--outputDeadChipFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
225 help = 'Dead Chip file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'DeadChipsFile.xml'],runarg=True))
226 parser.add_argument('--outputDeadSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
227 help = 'Dead Chip Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'DeadSummaryFile.xml'],runarg=True))
228
229 #DEAD STRIP OUTPUT FILES
230 if 'doDeadStrip' in checkPart and checkSplit == 1:
231 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
232 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
233 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
234 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
235
236 if 'doDeadStrip' in checkPart and checkSplit != 1:
237 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
238 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
239 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
240 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
241 parser.add_argument('--outputDeadStripFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
242 help = 'Dead Strip file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'DeadStripsFile.xml'],runarg=True))
243 parser.add_argument('--outputDeadSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
244 help = 'Dead Strip Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'DeadSummaryFile.xml'],runarg=True))
245
246 #QUIET CHIP OUTPUT FILES
247 if 'doQuietChip' in checkPart and checkSplit == 1:
248 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
249 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
250 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
251 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
252
253 if 'doQuietChip' in checkPart and checkSplit != 1:
254 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
255 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
256 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
257 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
258 parser.add_argument('--outputDeadChipFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
259 help = 'Dead Chip file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'QuietChipsFile.xml'],runarg=True))
260 parser.add_argument('--outputDeadSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
261 help = 'Dead Chip Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'QuietSummaryFile.xml'],runarg=True))
262
263 #QUIET STRIP OUTPUT FILES
264 if 'doQuietStrip' in checkPart and checkSplit == 1:
265 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
266 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
267 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
268 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
269
270 if 'doQuietStrip' in checkPart and checkSplit != 1:
271 parser.add_argument('--outputHITMapFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
272 help = 'HitMap output file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTHitMaps.root'],runarg=True))
273 parser.add_argument('--outputBSErrorsFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, runarg=True,io='output'),
274 help = 'BS Errors file',group='Calibration',default=trfArgClasses.argNTUPFile([checkPrefix+'SCTBSErrors.root'],runarg=True))
275 parser.add_argument('--outputDeadStripFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
276 help = 'Dead Strip file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'QuietStripsFile.xml'],runarg=True))
277 parser.add_argument('--outputDeadSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
278 help = 'Dead Strip Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'QuietSummaryFile.xml'],runarg=True))
279
280 #NOISE OCCUPANCY OUTPUT FILES
281 if 'doNoiseOccupancy' in checkPart:
282 parser.add_argument('--outputNOFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
283 help = 'Noise Occupancy file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'NoiseOccupancyFile.xml'],runarg=True))
284 parser.add_argument('--outputNOSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
285 help = 'Noise Occupancy Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'NoiseOccupancySummaryFile.xml'],runarg=True))
286
287 #LORENTZ ANGLE OUTPUT FILES
288 if 'doLorentzAngle' in checkPart:
289 parser.add_argument('--outputLAFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
290 help = 'Lorentz Angle file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'LorentzAngleFile.xml'],runarg=True))
291 parser.add_argument('--outputLASummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
292 help = 'Lorentz Angle Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'LorentzAngleSummaryFile.xml'],runarg=True))
293
294 #RAW OCCUPANCY OUTPUT FILES
295 if 'doRawOccupancy' in checkPart:
296 parser.add_argument('--outputROSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
297 help = 'Raw Occupancy Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'RawOccupancySummaryFile.xml'],runarg=True))
298
299 #EFFICIENCY OUTPUT FILES
300 if 'doEfficiency' in checkPart:
301 parser.add_argument('--outputEffModuleFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
302 help = 'Efficiency file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'EfficiencyModuleSummary.xml'],runarg=True))
303 parser.add_argument('--outputEffSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
304 help = 'Efficiency Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'EfficiencySummaryFile.xml'],runarg=True))
305
306 #BS ERRORS OUTPUT FILES
307 if 'doBSErrorDB' in checkPart:
308 parser.add_argument('--outputBSModuleFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
309 help = 'BS Errors Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'BSErrorModuleSummary.xml'],runarg=True))
310 parser.add_argument('--outputBSSummaryFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True,io='output'),
311 help = 'BS Errors Summary file',group='Calibration',default=trfArgClasses.argFile([checkPrefix+'BSErrorSummaryFile.xml'],runarg=True))
312
313class SCTCalibExecutor( athenaExecutor ):
314 def __init__(self, skeleton):
315 athenaExecutor.__init__(self,
316 name = 'sctcalib',
317 skeletonCA='SCT_CalibAlgs.SCTCalib_Skeleton')
318
319 def preExecute(self, input=set(), output=set()):
320
321 """ Execute runInfo, set environment and check inputtype"""
322 # Execute runInfo.py
323 runArgs=self.conf._argdict
324
325 checkFileList(runArgs['input'])
326 namelist=[]
327 for i in range(0,len(dsDict['input'])):
328 namelist.append(dsDict['input'][i]['file'])
329
330 self.conf.addToArgdict('inputNames', trfArgClasses.argList(namelist))
331
332 nName=namelist[0].count('/')
333 fileName=namelist[0].split('/')[nName]
334 projectName=str(fileName.split('.')[0])
335
336
338
339 if 'doRunInfo' not in runArgs:
340 self.conf.addToArgdict('doRunInfo', trfArgClasses.argBool(False))
341 else:
342 if runArgs['doRunInfo']._value:
343 import SCT_CalibAlgs.runInfo as runInfo
344
345 print ("RunNumber for the runInfo = ", str(RunNumber), " ", Stream)
346 runInfo.main(RunNumber, projectName)
347
348 if 'splitHitMap' not in runArgs:
349 self.conf.addToArgdict('splitHitMap', trfArgClasses.argInt(0))
350 if 'doRunSelector' not in runArgs:
351 self.conf.addToArgdict('doRunSelector', trfArgClasses.argBool(False))
352
353
354 if 'EventNumber' not in runArgs:
355 self.conf.addToArgdict('EventNumber', trfArgClasses.argInt(0))
356
357 # Set STAGE_SVCCLASS
358 if SvcClass != '' and SvcClass is not None:
359 os.environ['STAGE_SVCCLASS']=SvcClass
360
361 # Check input type
362 inputtype=dsDict['input'][0]['dataset'].split('.')[4]
363 print ("Input type = ", inputtype)
364 self.conf.addToArgdict('InputType', trfArgClasses.argString(inputtype))
365
366 # check which parts to be run
367 if 'part' not in runArgs:
368 self.conf.addToArgdict('part', trfArgClasses.argString('doNoisyStrip'))
369
370 part=runArgs['part']._value
371
372 for ipart in part:
373 if ipart not in ['doNoisyStrip','doNoiseOccupancy','doDeadChip','doDeadStrip','doQuietChip','doQuietStrip','doHV','doBSErrorDB','doRawOccupancy','doEfficiency','doLorentzAngle','doNoisyLB']:
374 self._errMsg = 'Argument part=%s does not match any of the possible candidates' % ipart
375 raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_ARG_ERRO'), self._errMsg)
376
377 # get prefix
378 if 'prefix' not in runArgs:
379 self.conf.addToArgdict('prefix', trfArgClasses.argString(''))
380
381 prefix=runArgs['prefix']._value
382
383 # set job number
384 jobnb=''
385 # find seperator for jobnumber
386 if prefix != '' :
387 sep=prefix.find('._')
388 if ( sep != -1 ) :
389 jobnb=prefix[sep+1:]
390 elif ( prefix.rfind('#_') != -1 ):
391 sep=prefix.rfind('#_')
392 jobnb=prefix[sep+1:]
393
394 # find seperator for prefix
395 sep=prefix.find('#')
396 if (sep != -1) :
397 prefix=prefix[:sep]
398 elif (prefix.find('._') != -1):
399 sep=prefix.rfind('._')
400 prefix=prefix[:sep]
401
402 # set prefix and jobnumber
403 prefix+='.'+jobnb
404 runArgs['prefix']._value = prefix
405
406 # When ATLAS is NOT in standby the SCT is, the hitmap root files have 0 events,
407 # even though the calibration_SCTNoise streams has 10k+ events.
408 # If the noisy strips task is generated, the jobs will fail. A.N has implemented
409 # a condition a t0 level so they won't be defined. However,
410 # when runSelector uses AtlRunQuery to look for the runs that have 10k+ events
411 # in the calibration_SCTNoise stream, those runs that failed or were skipped
412 # will appear as waiting to be uploaded, making the rest keep on hold.
413
414 # We include a protection against those cases: if the summed number of events
415 # of hitmap files is <10k, we don't execute the noisy strips. Rather, we exit
416 # with 'success' status, so the job won't fail at t0, and update the value
417 # of the last run uploaded as if this run had been uploaded, to avoid the
418 # next run being indefinitely on hold
419 # print 'Number of events: ', NumberOfEvents
420
421 if ('doNoisyStrip' in part or 'doDeadStrip' in part or 'doDeadChip' in part or 'doQuietStrip' in part or 'doQuietChip' in part) and runArgs['splitHitMap']._value==2 and NumberOfEvents<1:
422 self._isValidated = True
423 self._trf._exitCode = 0
424 self._trf._exitMsg = 'Noisy/dead/quiet strips/chips trying to read root files with 0 events. Gracefully exit and update lastRun counter to %s' %(RunNumber)
425
426 updateLastRun(RunNumber)
427 emptyDic = {}
428 self._trf._dataDictionary = emptyDic
429
430 resetTrfSignalHandlers()
431 self._trf.generateReport(fast=True)
432 sys.exit(0)
433
434 if jobnb != '':
435 self.conf.addToArgdict('JobNumber', trfArgClasses.argString(jobnb))
436
437 # get RunNumber from datasetName
438 if not RunNumber == -1:
439 self.conf.addToArgdict('RunNumber', trfArgClasses.argInt(RunNumber))
440 if not Stream == '':
441 self.conf.addToArgdict('Stream', trfArgClasses.argString(Stream))
442
443 # Do other prerun actions
444 super(SCTCalibExecutor, self).preExecute(input,output)
445
446 def execute(self):
447
448 runArgs=self.conf._argdict
449 # Check the run for criteria in runSelector
450 if runArgs['doRunSelector']._value:
451 import SCT_CalibAlgs.runSelector as runSelector
452 part=runArgs['part']._value
453 if runArgs['splitHitMap']._value == 1 :
454 skipQueue = 1
455 else:
456 skipQueue = 0
457 checkRun=runSelector.main(RunNumber,part,skipQueue,Stream)
458 if not checkRun:
459
460 print ("Run ", RunNumber, " didn't pass run selection criteria. It will not be processed and no output will be generated. Finish execution and exit gracefully")
461 emptyDic = {}
462 self._trf._dataDictionary = emptyDic
463
464 self._isValidated = True
465 self._trf._exitMsg = 'Did not pass run selection criteria. Finish execution and exit gracefully.'
466 self._trf._exitCode = 0
467 resetTrfSignalHandlers()
468 self._trf.generateReport(fast=True)
469 sys.exit(0)
470
471 rootHitmapFiles = []
472 rootLbFiles = []
473 rootBSerrFiles = []
474 for inputFileName in runArgs['input'] :
475 if inputFileName.find("SCTHitMaps") != -1:
476 rootHitmapFiles.append(inputFileName)
477 if inputFileName.find("SCTLB") != -1:
478 rootLbFiles.append(inputFileName)
479 if inputFileName.find("SCTBSErrors") != -1:
480 rootBSerrFiles.append(inputFileName)
481
482 if runArgs['splitHitMap']._value ==2 :
483
484 if len(rootHitmapFiles) > 0 :
485
486 fileutil.remove('SCTHitMaps.root')
487
488 cmd = "cp -v $ROOTSYS/bin/hadd . \n"
489 cmd += "hadd -n 10 SCTHitMaps.root "
490 for inputFileName in rootHitmapFiles :
491 cmd += "%s " %(inputFileName)
492 cmd += "\n"
493
494 print (cmd)
495 self._echologger.info('Merging Hitmap files!')
496 retcode=1
497 try:
498 retcode = os.system(cmd)
499 except OSError:
500 retcode = 1
501 if retcode == 0:
502 self._echologger.info('Root merge successful')
503 else:
504 self._echologger.error("FAILED to merge root files")
505
506 if ( len(rootLbFiles) > 0 and (len(rootLbFiles) == len(rootHitmapFiles)) ):
507
508 fileutil.remove('SCTLB.root')
509
510 cmd = "cp -v $ROOTSYS/bin/hadd . \n"
511 cmd += "hadd -n 10 SCTLB.root "
512 for inputFileName in rootLbFiles :
513 cmd += "%s " %(inputFileName)
514 cmd += "\n"
515
516 print (cmd)
517 self._echologger.info('Merging LBHitmap files!')
518 retcode=1
519 try:
520 retcode = os.system(cmd)
521 except OSError:
522 retcode = 1
523 if retcode == 0:
524 self._echologger.info('Root merge successful')
525 else:
526 self._echologger.error("FAILED to merge root files")
527
528 if ( len(rootBSerrFiles) > 0 and (len(rootBSerrFiles) == len(rootHitmapFiles)) ):
529
530 fileutil.remove('SCTBSErrors.root')
531
532 cmd = "cp -v $ROOTSYS/bin/hadd . \n"
533 cmd += "hadd -n 10 SCTBSErrors.root "
534 for inputFileName in rootBSerrFiles :
535 cmd += "%s " %(inputFileName)
536 cmd += "\n"
537
538 print (cmd)
539 self._echologger.info('Merging BSerr files!')
540 retcode=1
541 try:
542 retcode = os.system(cmd)
543 except OSError:
544 retcode = 1
545 if retcode == 0:
546 self._echologger.info('Root merge successful')
547 else:
548 self._echologger.error("FAILED to merge root files")
549
550 super(SCTCalibExecutor, self).execute()
551
552 if self._rc != 0:
553 try:
554 if 'less than the required minimum number of events' in open('log.sctcalib').read():
555 self._errMsg = 'Successful but warrants further investigation'
556 raise trfExceptions.TransformValidationException(trfExit.nameToCode('TRF_UNKOWN'), self._errMsg)
557 except trfExceptions.TransformValidationException:
558 pass
559
560 def postExecute(self):
561
562 runArgs=self.conf._argdict
563 prefix=runArgs['prefix']._value
564
565 # After processing Hitmaps, change Metadata of SCTHitMaps and SCTLB (and SCTBSErrors) files so
566 # they contain the number of events. This value can be used when processing
567 # noisy strips to avoid running over empty files
568
569 listOfKeys = self._trf.dataDictionary
570
571 if 'doNoisyStrip' in runArgs['part']._value and runArgs['splitHitMap']._value == 1:
572 outInstance0 = self.conf.dataDictionary[list(self._output)[0]]
573 outTFile0 = TFile(outInstance0._value[0])
574 print (outTFile0.GetName())
575 outNentries0 = int(outTFile0.Get('GENERAL/events').GetEntries())
576 outInstance0._setMetadata(outInstance0._value,{'nentries': outNentries0})
577
578 outInstance1 = self.conf.dataDictionary[list(self._output)[1]]
579 outTFile1 = TFile(outInstance1._value[0])
580 print (outTFile1.GetName())
581 outNentries1 = int(outTFile1.Get('GENERAL/events').GetEntries())
582 outInstance1._setMetadata(outInstance1._value,{'nentries': outNentries1})
583
584 if ('doDeadStrip' in runArgs['part']._value or 'doDeadChip' in runArgs['part']._value or 'doQuietStrip' in runArgs['part']._value or 'doQuietChip' in runArgs['part']._value ) and runArgs['splitHitMap']._value == 1:
585 outInstance0 = self.conf.dataDictionary[list(self._output)[0]]
586 outTFile0 = TFile(outInstance0._value[0])
587 print (outTFile0.GetName())
588 outNentries0 = int(outTFile0.Get('GENERAL/events').GetEntries())
589 outInstance0._setMetadata(outInstance0._value,{'nentries': outNentries0})
590
591 outInstance1 = self.conf.dataDictionary[list(self._output)[1]]
592 outTFile1 = TFile(outInstance1._value[0])
593 print (outTFile1.GetName())
594 outNentries1 = int(outTFile1.Get('GENERAL/events').GetEntries())
595 outInstance1._setMetadata(outInstance1._value,{'nentries': outNentries1})
596
597 if 'doDeadStrip' in runArgs['part']._value and runArgs['splitHitMap']._value != 1:
598 pwd=os.getcwd()
599 deadFile=pwd+'/'+prefix+'.DeadStripsFile.xml'
600 deadSummary=pwd+'/'+prefix+'.DeadSummaryFile.xml'
601
602 numLinesFile = 0
603 numLinesSummary = 0
604 if os.path.exists(deadFile):
605 numLinesFile = sum(1 for line in open(deadFile))
606 if os.path.exists(deadSummary):
607 numLinesSummary = sum(1 for line in open(deadSummary))
608
609 # if the files exist, but there were no dead strips there won't be COOL file, making the job fail
610 # remove the COOL file of the list of output files. Clunky, but temporal fix
611
612 if ( numLinesFile == 2 and numLinesSummary == 20 ):
613 dataDic = self._trf.dataDictionary
614 listOfKeys = []
615
616 for key in dataDic:
617 if key != 'COOL':
618 listOfKeys.append(key)
619
620 redDict = {key:dataDic[key] for key in listOfKeys}
621 self._trf._dataDictionary = redDict
622
623 if 'doDeadChip' in runArgs['part']._value and runArgs['splitHitMap']._value != 1:
624 pwd=os.getcwd()
625 deadFile=pwd+'/'+prefix+'.DeadChipsFile.xml'
626 deadSummary=pwd+'/'+prefix+'.DeadSummaryFile.xml'
627
628 numLinesFile = 0
629 numLinesSummary = 0
630 if os.path.exists(deadFile):
631 numLinesFile = sum(1 for line in open(deadFile))
632 if os.path.exists(deadSummary):
633 numLinesSummary = sum(1 for line in open(deadSummary))
634
635 # if the files exist, but there were no dead strips there won't be COOL file, making the job fail
636 # remove the COOL file of the list of output files. Clunky, but temporal fix
637
638 if ( numLinesFile == 2 and numLinesSummary == 20 ):
639 dataDic = self._trf.dataDictionary
640 listOfKeys = []
641
642 for key in dataDic:
643 if key != 'COOL':
644 listOfKeys.append(key)
645
646 redDict = {key:dataDic[key] for key in listOfKeys}
647 self._trf._dataDictionary = redDict
648
649 if 'doQuietStrip' in runArgs['part']._value and runArgs['splitHitMap']._value != 1:
650 pwd=os.getcwd()
651 deadFile=pwd+'/'+prefix+'.QuietStripsFile.xml'
652 deadSummary=pwd+'/'+prefix+'.QuietSummaryFile.xml'
653
654 numLinesFile = 0
655 numLinesSummary = 0
656 if os.path.exists(deadFile):
657 numLinesFile = sum(1 for line in open(deadFile))
658 if os.path.exists(deadSummary):
659 numLinesSummary = sum(1 for line in open(deadSummary))
660
661 # if the files exist, but there were no dead strips there won't be COOL file, making the job fail
662 # remove the COOL file of the list of output files. Clunky, but temporal fix
663
664 if ( numLinesFile == 2 and numLinesSummary == 20 ):
665 dataDic = self._trf.dataDictionary
666 listOfKeys = []
667
668 for key in dataDic:
669 if key != 'COOL':
670 listOfKeys.append(key)
671
672 redDict = {key:dataDic[key] for key in listOfKeys}
673 self._trf._dataDictionary = redDict
674
675 if 'doQuietChip' in runArgs['part']._value and runArgs['splitHitMap']._value != 1:
676 pwd=os.getcwd()
677 deadFile=pwd+'/'+prefix+'.QuietChipsFile.xml'
678 deadSummary=pwd+'/'+prefix+'.QuietSummaryFile.xml'
679
680 numLinesFile = 0
681 numLinesSummary = 0
682 if os.path.exists(deadFile):
683 numLinesFile = sum(1 for line in open(deadFile))
684 if os.path.exists(deadSummary):
685 numLinesSummary = sum(1 for line in open(deadSummary))
686
687 # if the files exist, but there were no dead strips there won't be COOL file, making the job fail
688 # remove the COOL file of the list of output files. Clunky, but temporal fix
689
690 if ( numLinesFile == 2 and numLinesSummary == 20 ):
691 dataDic = self._trf.dataDictionary
692 listOfKeys = []
693
694 for key in dataDic:
695 if key != 'COOL':
696 listOfKeys.append(key)
697
698 redDict = {key:dataDic[key] for key in listOfKeys}
699 self._trf._dataDictionary = redDict
700
701 if prefix != '':
702 try:
703 if runArgs['splitHitMap']._value !=1 and 'COOL' in listOfKeys:
704 os.rename('mycool.db',prefix+'.mycool.db')
705 if runArgs['splitHitMap']._value == 2:
706 os.rename('SCTHitMaps.root',prefix+'.SCTHitMaps.root')
707 if 'doNoisyStrip' in runArgs['part']._value:
708 os.rename('SCTLB.root',prefix+'.SCTLB.root')
709 if ('doDeadStrip' in runArgs['part']._value or 'doDeadChip' in runArgs['part']._value or 'doQuietStrip' in runArgs['part']._value or 'doQuietChip' in runArgs['part']._value ):
710 os.rename('SCTBSErrors.root',prefix+'.SCTBSErrors.root')
711 except OSError:
712 self._echologger.warning('failed to rename DB, ROOT or LOG file.' )
713
714 super(SCTCalibExecutor, self).postExecute()
715
716 def validate(self):
717 self._hasValidated = True
718 deferredException = None
719
720 if 'ignorePatterns' in self.conf._argdict:
721 igPat = self.conf.argdict['ignorePatterns'].value
722 else:
723 igPat = []
724 if 'ignoreFiles' in self.conf._argdict:
725 ignorePatterns = trfValidation.ignorePatterns(files = self.conf._argdict['ignoreFiles'].value, extraSearch=igPat)
726 elif self._errorMaskFiles is not None:
727 ignorePatterns = trfValidation.ignorePatterns(files = self._errorMaskFiles, extraSearch=igPat)
728 else:
729 ignorePatterns = trfValidation.ignorePatterns(files = athenaExecutor._defaultIgnorePatternFile, extraSearch=igPat)
730
731 # Now actually scan my logfile
732 msg.info('Scanning logfile {0} for errors'.format(self._logFileName))
733 self._logScan = trfValidation.athenaLogFileReport(logfile = self._logFileName, ignoreList = ignorePatterns)
734 worstError = self._logScan.worstError()
735
736 # In general we add the error message to the exit message, but if it's too long then don't do
737 # that and just say look in the jobReport
738 if worstError['firstError']:
739 if len(worstError['firstError']['message']) > athenaExecutor._exitMessageLimit:
740 if 'CoreDumpSvc' in worstError['firstError']['message']:
741 exitErrorMessage = "Core dump at line {0} (see jobReport for further details)".format(worstError['firstError']['firstLine'])
742 elif 'G4Exception' in worstError['firstError']['message']:
743 exitErrorMessage = "G4 exception at line {0} (see jobReport for further details)".format(worstError['firstError']['firstLine'])
744 else:
745 exitErrorMessage = "Long {0} message at line {1} (see jobReport for further details)".format(worstError['level'], worstError['firstError']['firstLine'])
746 else:
747 exitErrorMessage = "Logfile error in {0}: \"{1}\"".format(self._logFileName, worstError['firstError']['message'])
748 else:
749 exitErrorMessage = "Error level {0} found (see athena logfile for details)".format(worstError['level'])
750
751 # If we failed on the rc, then abort now
752 if deferredException is not None:
753 # Add any logfile information we have
754 if worstError['nLevel'] >= stdLogLevels['ERROR']:
755 deferredException.errMsg = deferredException.errMsg + "; {0}".format(exitErrorMessage)
756 raise deferredException
757
758 # ignore instances of "unknown offline id..."
759 # less than ~10/event are admisible
760 # if > 10/event, event is skipped in SCT_CalibEventInfo
761
762 if worstError['firstError'] is not None:
763 if 'ERROR Unknown offlineId for OnlineId' in worstError['firstError']['message']:
764 worstError['nLevel'] = 30
765 worstError['level'] = 'WARNING'
766
767 # Very simple: if we get ERROR or worse, we're dead, except if ignoreErrors=True
768 if worstError['nLevel'] == stdLogLevels['ERROR'] and ('ignoreErrors' in self.conf._argdict and self.conf._argdict['ignoreErrors'].value is True):
769 msg.warning('Found ERRORs in the logfile, but ignoring this as ignoreErrors=True (see jobReport for details)')
770 elif worstError['nLevel'] >= stdLogLevels['ERROR']:
771 self._isValidated = False
772 msg.error('Fatal error in athena logfile (level {0})'.format(worstError['level']))
773 raise trfExceptions.TransformLogfileErrorException(trfExit.nameToCode('TRF_EXEC_LOGERROR'),
774 ' Fatal error in athena logfile: "{0}"'.format(exitErrorMessage))
775
776 # Must be ok if we got here!
777 msg.info('Executor {0} has validated successfully'.format(self.name))
778 self._isValidated = True
779
780if __name__ == "__main__":
781 main()
TGraphErrors * GetEntries(TH2F *histo)
preExecute(self, input=set(), output=set())
STL class.
int count(std::string s, const std::string &regx)
count how many occurances of a regx are in a string
Definition hcg.cxx:146
std::vector< std::string > split(const std::string &s, const std::string &t=":")
Definition hcg.cxx:177
Main package for new style ATLAS job transforms.
Transform argument class definitions.
Transform execution functions.
Module for transform exit codes.
Logging configuration for ATLAS job transforms.
Signal handling utilities for ATLAS job transforms.
Validation control for job transforms.
main(runNum=None, projectName='')
Definition runInfo.py:10
main(runNum=None, procType=None, forceSkipQueue=0, Stream=None)
Definition runSelector.py:9
addSCTCalibArgs(parser)
addOutputArgs(parser, dict)
checkFileList(filelist)
updateLastRun(RunNumber)
getDsFileName(file, input=False)
IovVectorMap_t read(const Folder &theFolder, const SelectionCriterion &choice, const unsigned int limit=10)