ATLAS Offline Software
trfArgs.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2 
3 
6 
7 import logging
8 msg = logging.getLogger(__name__)
9 
10 import PyJobTransforms.trfArgClasses as trfArgClasses
11 from PyJobTransforms.trfArgClasses import argActionFactory, argFactory
12 
13 from PyJobTransforms.trfLogger import stdLogLevels
14 
15 
16 def addStandardTrfArgs(parser):
17  parser.add_argument('--CA', action=argActionFactory(trfArgClasses.argSubstepBool, runarg=False), nargs='*',
18  help='Use ComponentAccumulator base configuration')
19  parser.add_argument('--verbose', '--debug', action='store_true', help='Set transform loglevel to DEBUG')
20  parser.add_argument('--loglevel', choices=list(stdLogLevels), help='Set transform logging level')
21  parser.add_argument('--argJSON', '--argjson', metavar='FILE', help='File containing JSON serialised argument dictionary')
22  parser.add_argument('--dumpargs', action='store_true', help='Dump transform arguments and exit')
23  parser.add_argument('--showGraph', action='store_true', help='Show multi-step transform graph, then exit')
24  parser.add_argument('--showPath', action='store_true', help='Show execution path only, then exit')
25  parser.add_argument('--showSteps', action='store_true', help='Show list of executor steps only, then exit')
26  parser.add_argument('--dumpPickle', metavar='FILE', help='Interpret command line arguments and write them out as a pickle file')
27  parser.add_argument('--dumpJSON', metavar='FILE', help='Interpret command line arguments and write them out as a JSON file')
28  parser.add_argument('--reportName', type=argFactory(trfArgClasses.argString, runarg=False),
29  help='Base name for job reports (default name is "jobReport" for most reports, but "metadata" for classic prodsys XML)')
30  parser.add_argument('--reportType', type=argFactory(trfArgClasses.argList, runarg=False), nargs='+', metavar='TYPE',
31  help='Job reports to produce: valid values are "text", "json", "classic", "pilotPickle" and "gpickle"')
32  parser.add_argument('--execOnly', action='store_true',
33  help='Exec the first substep only, replacing the transform process (no job reports and the return code will be from the substep process)')
34  parser.add_argument('--env', type=argFactory(trfArgClasses.argSubstepList, runarg=False), metavar='substep:KEY=VALUE', nargs='+',
35  help='Explicitly set environment variables for an executor (default is all substeps).'
36  ' N.B. this setting is passed to the shell, so reference to shell variables is allowed, e.g.'
37  ' KEY=VALUE:$KEY')
38  parser.add_argument('--imf', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
39  help='Manually include/exclude the Intel IMF maths library '
40  '(otherwise this is disabled for base releases < 17.7, enabled otherwise)')
41  parser.add_argument('--tcmalloc', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
42  help='Switch preload of the tcmalloc library (disabled by default)')
43  parser.add_argument('--steering', type=argFactory(trfArgClasses.argSubstepSteering, runarg=False), nargs='+', metavar='substep:{in/out}{+-}DATA',
44  help='Steer the transform by manipulating the execution graph before the execution path is calculated. '
45  'Format is substep:{in,out}{+-}DATA,{in,out}{+-}DATA,... to modify the substep\'s input/output '
46  ' by adding/removing a data type. e.g. RAWtoALL:in-RDO,in+RDO_TRIG would remove RDO and add '
47  'RDO_TRIG to the list of valid input datatypes for the RAWtoALL substep. See current version of '
48  'trfArgClasses.argSubstepSteering.steeringAlises for supported aliases. '
49  'https://twiki.cern.ch/twiki/bin/view/AtlasComputing/TransformSteering')
50  addMetadataArguments(parser)
51 
52 
53 
59 def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True):
60  parser.defineArgGroup('Athena', 'General Athena Options')
61  parser.add_argument('--athenaopts', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=' ', runarg=False), nargs="+", metavar='substep:ATHENAOPTS',
62  help='Extra options to pass to athena. Opts will split on spaces. '
63  'Multiple substep options can be given with --athenaopts=\'sutbstep1:--opt1 --opt2[=foo] ...\' \'substep2:--opt3\''
64  'Without substep specified, options will be used for all substeps.')
65  parser.add_argument('--command', '-c', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='COMMAND',
66  help='Run %(metavar)s before all else')
67  parser.add_argument('--athena', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='ATHENA',
68  help='Use %(metavar)s as the athena executable')
69  parser.add_argument('--preExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+',
70  metavar='substep:PREEXEC',
71  help='Python code to execute before main job options are included (can be optionally limited to a single substep)')
72  parser.add_argument('--preInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
73  metavar='substep:PREINCLUDE',
74  help='Python configuration fragment to include before main job options (can be optionally limited to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
75  parser.add_argument('--postExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+',
76  metavar='substep:POSTEXEC',
77  help='Python code to execute after main job options are included (can be optionally limited to a single substep)')
78  parser.add_argument('--postInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
79  metavar='substep:POSTINCLUDE',
80  help='Python configuration fragment to include after main job options (can be optionally limited '
81  'to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
82  parser.add_argument('--splitConfig', group = 'Athena', type=argFactory(trfArgClasses.argSubstepString),
83  metavar='substep:SPLITCONFIG',
84  help='Configuration file to internally split job into multiple parts (can be optionally limited to a single substep)')
85  parser.add_argument('--maxEvents', group='Athena', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep=maxEventsDefaultSubstep),
86  nargs='+', metavar='substep:maxEvents',
87  help='Set maximum events for each processing step (default substep is "{0}")'.format(maxEventsDefaultSubstep))
88  parser.add_argument('--skipEvents', group='Athena', nargs='+', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep='first'),
89  help='Number of events to skip over in the first processing step (skipping substep can be overridden)')
90  parser.add_argument('--asetup', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:ASETUP',
91  help='asetup command string to be run before this substep is executed')
92  parser.add_argument('--runInContainer', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:CONTAINER_OS',
93  help='Set the substep to run in a container with the specified OS. Requires the --athena flag')
94  parser.add_argument('--eventAcceptanceEfficiency', type=trfArgClasses.argFactory(trfArgClasses.argSubstepFloat, min=0.0, max=1.0, runarg=False),
95  help='Allowed "efficiency" for processing events - used to ensure output file has enough events (default 1.0)')
96  parser.add_argument('--athenaMPMergeTargetSize', '--mts', type=trfArgClasses.argFactory(trfArgClasses.argKeyFloatValueList, runarg=False),
97  metavar='dataType:targetSizeInMegaBytes', nargs='+', group='Athena',
98  help='Set the target merge size for an AthenaMP output file type (give size in MB). '
99  'Note that the special value 0 means do not merge this output file; negative values mean '
100  'always merge to a single file. Globbing is supported, e.g. "DESD_*:500" is understood. '
101  'Special datatype "ALL" can be used as a default for all datatypes not explicitly '
102  'given their own value or glob matched.')
103  parser.add_argument('--athenaMPStrategy', type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=False),
104  nargs='+', metavar='substep:Strategy', group='Athena',
105  help='Set the AthenaMP scheduling strategy for a particular substep. Default is unset.')
106  parser.add_argument('--athenaMPUseEventOrders', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
107  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
108  help='Change AthenaMP setup to read event numbers from event orders files')
109  parser.add_argument('--athenaMPEventsBeforeFork', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False),
110  metavar='N', group='Athena',
111  help='Set AthenaMP to fork after processing N events (default is to fork immediately after '
112  'initialisation')
113  parser.add_argument('--sharedWriter', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
114  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
115  help='SharedWriter mode active')
116  parser.add_argument('--parallelCompression',
118  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
119  help='Delegate event data compression to the workers while using SharedWriter')
120  parser.add_argument('--eventService', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
121  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
122  help='Switch AthenaMP to the Event Service configuration')
123  parser.add_argument('--multithreaded', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
124  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
125  help='Multithreaded mode active')
126  parser.add_argument('--multiprocess', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
127  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
128  help='Multiprocess mode active')
129  parser.add_argument('--deleteIntermediateOutputfiles', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
130  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
131  help='Remove intermediate input/output files of multi step TRF')
132 
133  if addPerfMon:
134  addPerfMonArguments(parser)
135 
136  if addValgrind:
137  addValgrindArguments(parser)
138 
139  if addVTune:
140  addVTuneArguments(parser)
141 
142 
145  parser.defineArgGroup('PerfMon', 'General PerfMon Options')
146  parser.add_argument('--perfmon',
147  default=trfArgClasses.argString('fastmonmt'),
148  type=argFactory(trfArgClasses.argString),
149  help='Enable PerfMon (fastmonmt [default], fullmonmt, or none)',
150  group='PerfMon')
151 
152 
154  parser.defineArgGroup('Valgrind', 'General Valgrind Options')
155  parser.add_argument(
156  '--valgrind',
157  group = 'Valgrind',
158  type = argFactory(
160  runarg = False
161  ),
162  metavar = "substep:BOOL",
163  help = 'Enable Valgrind'
164  )
165  parser.add_argument(
166  '--valgrindDefaultOpts',
167  group = 'Valgrind',
168  type = argFactory(
170  runarg = False
171  ),
172  metavar = "substep:BOOL",
173  help = 'Enable default Valgrind options'
174  )
175  parser.add_argument(
176  '--valgrindExtraOpts',
177  group = 'Valgrind',
178  type = argFactory(
180  splitter = ',',
181  runarg = False
182  ),
183  metavar = 'OPT1,OPT2,OPT3',
184  help = 'Extra options passed to Valgrind when running Athena. ' +
185  'Options starting with "-" must be given as ' +
186  '--valgrindExtraOpts=\'--opt1=foo,--opt2=bar,...\''
187  )
188 
189 
190 def addVTuneArguments(parser):
191  parser.defineArgGroup('VTune', 'General VTune Options')
192  parser.add_argument(
193  '--vtune',
194  group = 'VTune',
195  type = argFactory(
197  runarg = False
198  ),
199  metavar = "substep:BOOL",
200  help = 'Enable VTune'
201  )
202  parser.add_argument(
203  '--vtuneDefaultOpts',
204  group = 'VTune',
205  type = argFactory(
207  runarg = False
208  ),
209  metavar = "substep:BOOL",
210  help = 'Enable default VTune options'
211  )
212  parser.add_argument(
213  '--vtuneExtraOpts',
214  group = 'VTune',
215  type = argFactory(
217  splitter = ',',
218  runarg = False
219  ),
220  metavar = 'OPT1,OPT2,OPT3',
221  help = 'Extra options passed to VTune when running Athena. ' +
222  'Options starting with "-" must be given as ' +
223  '--vtuneExtraOpts=\'-opt1=foo,-opt2=bar,...\''
224  )
225 
226 
230  parser.defineArgGroup('Detector', 'General detector configuration options, for simulation and reconstruction')
231  parser.add_argument('--DBRelease', group = 'Detector', type=argFactory(trfArgClasses.argSubstep, runarg=False), metavar='substep:DBRelease', nargs='+',
232  help='Use DBRelease instead of ORACLE. Give either a DBRelease tarball file (e.g., DBRelease-21.7.1.tar.gz) or cvmfs DBRelease directory (e.g., 21.7.1 or current')
233  parser.add_argument('--conditionsTag', group='Detector', type=argFactory(trfArgClasses.argSubstepConditions), metavar='substep:CondTag', nargs='+',
234  help='Conditions tag to set')
235  parser.add_argument('--geometryVersion', group='Detector', type=argFactory(trfArgClasses.argSubstep), metavar='substep:GeoVersion', nargs='+',
236  help='ATLAS geometry version tag')
237  parser.add_argument('--geometrySQLite', group='Detector', type=argFactory(trfArgClasses.argBool),
238  help='Switch to SQLite Geometry DB')
239  parser.add_argument('--geometrySQLiteFullPath', group='Detector', type=argFactory(trfArgClasses.argString),
240  help='Manual setting of SQLite Geometry DB path. For testing purposes only')
241  parser.add_argument('--beamType', group='Detector', type=argFactory(trfArgClasses.argString),
242  help='Manual beam type setting')
243  parser.add_argument('--runNumber', '--RunNumber', group='Detector', type=argFactory(trfArgClasses.argInt),
244  help='Manual run number setting')
245 
246 
249  parser.defineArgGroup('Metadata', 'Metadata arguments that will be passed into the transform')
250  parser.add_argument('--AMIConfig', '--AMI', type=argFactory(trfArgClasses.argString), help='Configure transform with AMI tag parameters', group="Metadata")
251  parser.add_argument('--AMITag', type=argFactory(trfArgClasses.argString), metavar='TAG', group="Metadata",
252  help='AMI tag from which this job was defined - this option simply writes the '
253  'relevant AMI tag value into the output metadata, it does not configure the job (use --AMIConfig for that)')
254  parser.add_argument('--taskid', type=argFactory(trfArgClasses.argString, runarg=False), help="Task identification number", group="Metadata")
255  parser.add_argument('--jobid', type=argFactory(trfArgClasses.argString, runarg=False), help="Job identification number", group="Metadata")
256  parser.add_argument('--attempt', type=argFactory(trfArgClasses.argString, runarg=False), help="Job attempt number", group="Metadata")
257 
258 
259 
268 def addPrimaryDPDArguments(parser, pick = None, transform = None, multipleOK=False):
269  parser.defineArgGroup('Primary DPDs', 'Primary DPD File Options')
270  # list* really gives just a list of DPD names
271  try:
272  # TODO: do we actually need all of those?
273  listRAWtoDPD = ['StreamDAOD_PIXELVALID', 'StreamDRAW_RPVLL', 'StreamDRAW_ZMUMU', 'StreamDRAW_DIMU', 'StreamDRAW_EGZ', 'StreamDRAW_TAULH', 'StreamDRAW_JET', 'StreamDRAW_EGJPSI', 'StreamDRAW_TAUMUH',
274  'StreamDRAW_EMU', 'StreamDRAW_BCID1', 'StreamDRAW_BCID2', 'StreamDRAW_BCID3', 'StreamDRAW_BCID4', 'StreamDRAW_TOPSLMU', 'StreamDAOD_IDNCB', 'StreamDAOD_SCTVALID']
275  listESDtoDPD = ['StreamDESDM_ALLCELLS', 'StreamDESDM_EOVERP', 'StreamDESDM_IDALIGN', 'StreamDESDM_EGAMMA', 'StreamDESDM_MCP', 'StreamDESDM_TILEMU', 'StreamDESDM_PHOJET', 'StreamDESDM_SGLEL', 'StreamDESDM_SLTTMU', 'StreamDESDM_CALJET',
276  'StreamDESDM_EXOTHIP', 'StreamDAOD_IDTRKVALID', 'StreamDAOD_IDTIDE', 'StreamDAOD_IDTRKLUMI', 'StreamDAOD_IDPIXLUMI', 'StreamDAOD_L1CALO1', 'StreamDAOD_L1CALO2', 'StreamDAOD_L1CALO3', 'StreamDAOD_L1CALO4', 'StreamDAOD_L1CALO5', 'StreamDESD_DEDX']
277  listAODtoDPD = []
278  matchedOutputList = [(['r2a'], listRAWtoDPD + listESDtoDPD), (['a2d'], listAODtoDPD)]
279  for substep, dpdList in matchedOutputList:
280  for dpdName in [ dpd.replace('Stream', '') for dpd in dpdList ]:
281  msg.debug('Handling {0}'.format(dpdName))
282  if pick is None or dpdName in pick:
283  # Need to decide which file type we actually have here
284  dpdType = dpdName.split('_')[0]
285  if 'RAW' in dpdType:
286  parser.add_argument('--output' + dpdName + 'File',
287  type=argFactory(trfArgClasses.argBSFile, multipleOK=multipleOK, type='RAW'),
288  group = 'Primary DPDs', metavar=dpdName.upper(),
289  help='DPD {0} output {1} file'.format(dpdType, dpdName))
290  elif 'AOD' in dpdType:
291  parser.add_argument('--output' + dpdName + 'File',
292  type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='AOD'),
293  group = 'Primary DPDs', metavar=dpdName.upper(),
294  help='DPD {0} output {1} file'.format(dpdType, dpdName))
295  elif 'ESD' in dpdType:
296  parser.add_argument('--output' + dpdName + 'File',
297  type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='ESD'),
298  group = 'Primary DPDs', metavar=dpdName.upper(),
299  help='DPD {0} output {1} file'.format(dpdType, dpdName))
300  else:
301  msg.warning('Unrecognised primary DPD type: {0}'.format(dpdName))
302  # Automatically add DPD as output data arguments of their relevant executors
303  if transform:
304  for executor in transform.executors:
305  if hasattr(executor, 'substep') and executor.substep in substep:
306  executor.outDataUpdate([dpdName])
307  if executor.name in substep:
308  executor.outDataUpdate([dpdName])
309 
310  except ImportError:
311  msg.warning('PrimaryDPDFlags not available - cannot add primary DPD arguments')
312 
313 
314 
323 def addD3PDArguments(parser, pick = None, transform = None, multipleOK=False, addD3PDMRGtypes = False):
324  parser.defineArgGroup('D3PD NTUPs', 'D3PD File Options')
325  # listAllKnownD3PD is a list of D3PD JobProperty type objects
326  try:
327  from D3PDMakerConfig.D3PDProdFlags import listAllKnownD3PD
328  for dpdWriter in listAllKnownD3PD:
329  dpdName = dpdWriter.StreamName.replace('Stream', '')
330 
331  if pick is None or dpdName in pick:
332  if addD3PDMRGtypes:
333  parser.add_argument('--input' + dpdName + 'File',
334  type=argFactory(trfArgClasses.argNTUPFile, treeNames=dpdWriter.TreeNames, io='input'),
335  group='D3PD NTUPs',
336  metavar=dpdName.upper(), help='D3PD input {0} file )'.format(dpdName), nargs='+')
337  parser.add_argument('--output' + dpdName + '_MRGFile',
338  type=argFactory(trfArgClasses.argNTUPFile, treeNames=dpdWriter.TreeNames),
339  group='D3PD NTUPs',
340  metavar=dpdName.upper(), help='D3PD merged output {0} file )'.format(dpdName))
341  else:
342  parser.add_argument('--output' + dpdName + 'File',
343  type=argFactory(trfArgClasses.argNTUPFile, treeNames=dpdWriter.TreeNames, multipleOK=multipleOK),
344  group='D3PD NTUPs', metavar=dpdName.upper(),
345  help='D3PD output {0} file (can be made in substeps {1})'.format(dpdName, ','.join(dpdWriter.SubSteps)))
346  # Automatically add D3PDs as data arguments of their relevant executors
347  if transform:
348  for executor in transform.executors:
349  if hasattr(executor, 'substep') and executor.substep in dpdWriter.SubSteps:
350  executor.outDataUpdate([dpdName])
351  if executor.name in dpdWriter.SubSteps:
352  executor.outDataUpdate([dpdName])
353 
354  except ImportError:
355  msg.warning('D3PDProdFlags not available - cannot add D3PD arguments')
356 
357 
358 
363 
364 
372  def __init__(self, name, type = None, substeps = [], argclass = None, treeNames = None, help = None):
373  self._name = name
374 
375 
378  if type is None:
379  if 'RAW' in name:
380  self._type = 'bs'
381  elif 'ESD' in name:
382  self._type = 'esd'
383  elif 'AOD' in name:
384  self._type = 'aod'
385  elif 'NTUP' in name:
386  self._type = 'ntup'
387  else:
388  self._type = type
389 
390 
392  if substeps == []:
393  if 'RAW' in name:
394  self._substeps = ['RAWtoALL']
395  elif 'ESD' in name:
396  self._substeps = ['RAWtoALL']
397  elif 'AOD' in name:
398  self._substeps = ['RAWtoALL']
399  elif 'NTUP' in name:
400  self._substeps = ['RAWtoALL', 'AODtoDPD']
401  else:
402  self._substeps = substeps
403 
404 
405  if argclass is None:
406  if 'RAW' in name:
408  elif 'ESD' in name:
410  elif 'AOD' in name:
412  elif 'NTUP' in name:
414  else:
415  self._argclass = argclass
416 
417  self._help = help
418  self._treeNames = treeNames
419 
420  @property
421  def name(self):
422  return self._name
423 
424  @property
425  def type(self):
426  return self._type
427 
428  @property
429  def substeps(self):
430  return self._substeps
431 
432  @property
433  def argclass(self):
434  return self._argclass
435 
436  @property
437  def help(self):
438  return self._help
439 
440  @property
441  def treeNames(self):
442  return self._treeNames
443 
444 def getExtraDPDList(NTUPOnly = False):
445  extraDPDs = []
446  extraDPDs.append(dpdType('NTUP_SCT', substeps=['r2e']))
447  extraDPDs.append(dpdType('NTUP_MUONCALIB', substeps=['r2e','r2a'], treeNames=['PatternNtupleMaker/Segments']))
448  extraDPDs.append(dpdType('NTUP_TRKVALID', substeps=['r2e']))
449  extraDPDs.append(dpdType('NTUP_FASTMON', substeps=['a2t','a2d','e2a']))
450  extraDPDs.append(dpdType('NTUP_LARNOISE', substeps=['e2d'], treeNames=['CollectionTree']))
451  extraDPDs.append(dpdType('NTUP_WZ', substeps=['e2d'], treeNames=['physics']))
452  extraDPDs.append(dpdType('NTUP_TRT', substeps=['e2d'], treeNames=['MyCollectionTree']))
453  extraDPDs.append(dpdType('NTUP_HECNOISE', substeps=['e2d'], treeNames=['HECNoise']))
454  extraDPDs.append(dpdType('NTUP_ENHBIAS', substeps=['e2d','e2a'], treeNames=['vertices']))
455  extraDPDs.append(dpdType('NTUP_TRUTH', substeps=['a2d'], treeNames=['truth']))
456  extraDPDs.append(dpdType('NTUP_SUSYTRUTH', substeps=['a2d'], treeNames=['truth']))
457  extraDPDs.append(dpdType('NTUP_HIGHMULT', substeps=['e2a'], treeNames=['MinBiasTree']))
458  extraDPDs.append(dpdType('NTUP_PROMPTPHOT', substeps=['e2d', 'a2d'], treeNames=["PAUReco","HggUserData"]))
459 
460  extraDPDs.append(dpdType('NTUP_MCPTP', substeps=['a2d'], help="Ntuple file for MCP Tag and Probe"))
461  extraDPDs.append(dpdType('NTUP_MCPScale', substeps=['a2d'], help="Ntuple file for MCP scale calibration"))
462 
463  extraDPDs.append(dpdType('NTUP_FastCaloSim', substeps=['e2d']))
464  extraDPDs.append(dpdType('NTUP_PILEUP', substeps=['a2da']))
465 
466  # Trigger NTUPs (for merging only!)
467  if NTUPOnly:
468  extraDPDs.append(dpdType('NTUP_TRIGCOST'))
469  extraDPDs.append(dpdType('NTUP_TRIGRATE', treeNames=['metadata']))
470  else:
471  extraDPDs.append(dpdType('DAOD_HSG2'))
472  extraDPDs.append(dpdType('DESDM_ZMUMU'))
473 
474  return extraDPDs
475 
476 
484 def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs = False):
485  parser.defineArgGroup('Additional DPDs', 'Extra DPD file types')
486 
487  extraDPDs = getExtraDPDList(NTUPOnly=NTUPMergerArgs)
488 
489  if NTUPMergerArgs:
490  for dpd in extraDPDs:
491  if pick is None or dpd.name in pick:
492  if dpd.name.startswith('NTUP'):
493  parser.add_argument('--input' + dpd.name + 'File',
494  type=argFactory(dpd.argclass, multipleOK=True, io='input', type=dpd.type, treeNames=dpd.treeNames),
495  group = 'Additional DPDs', metavar=dpd.name.upper(), nargs='+',
496  help=dpd.help if dpd.help else 'DPD input {0} file'.format(dpd.name))
497  parser.add_argument('--output' + dpd.name + '_MRGFile',
498  type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
499  group = 'Additional DPDs', metavar=dpd.name.upper(),
500  help=dpd.help if dpd.help else 'DPD output merged {0} file'.format(dpd.name))
501 
502  pass
503  else:
504  for dpd in extraDPDs:
505  if pick is None or dpd.name in pick:
506  msg.debug('Adding DPD {0} ({1}, {2}, {3}, {4})'.format(dpd.name, dpd.type, dpd.substeps, dpd.treeNames, dpd.argclass))
507  # NTUPs are a bit special as they can take a treeName to count events
508  if issubclass(dpd.argclass, trfArgClasses.argNTUPFile):
509  parser.add_argument('--output' + dpd.name + 'File',
510  type=argFactory(dpd.argclass, name=dpd.name.upper(), multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
511  group = 'Additional DPDs', metavar=dpd.name.upper(),
512  help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
513  else:
514  parser.add_argument('--output' + dpd.name + 'File',
515  type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type),
516  group = 'Additional DPDs', metavar=dpd.name.upper(),
517  help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
518  if transform:
519  for executor in transform.executors:
520  if hasattr(executor, 'substep') and executor.substep in dpd.substeps:
521  executor.outDataUpdate([dpd.name])
522  if executor.name in dpd.substeps:
523  executor.outDataUpdate([dpd.name])
524 
525 
527  parser.defineArgGroup('File Validation', 'Standard file validation switches')
528  parser.add_argument('--fileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
529  group='File Validation', help='If FALSE skip both input and output file validation (default TRUE; warning - do not use this option in production jobs!)')
530  parser.add_argument('--inputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
531  group='File Validation', help='If FALSE skip input file validation (default TRUE; warning - do not use this option in production jobs!)')
532  parser.add_argument('--outputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
533  group='File Validation', help='If FALSE skip output file validation (default TRUE; warning - do not use this option in production jobs!)')
534 
535  parser.add_argument('--parallelFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
536  nargs='?', const=trfArgClasses.argBool('True'),
537  group='File Validation', help='Parallelise file validation if True')
538  parser.add_argument('--multithreadedFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
539  nargs='?', const=trfArgClasses.argBool('True'),
540  group='File Validation', help='Use multithreaded ROOT file validation if True')
541 
543  parser.defineArgGroup('Parallel Job Processor', 'Parallel Job Processor arguments')
544  parser.add_argument('----parallelProcessPool', group='pool', type=argFactory(trfArgClasses.argInt, runarg=False), help='Number of processes in pool requested (int)')
545 
547  parser.defineArgGroup('Validation', 'Standard job validation switches')
548  parser.add_argument('--ignoreFiles', '--ignoreFilters', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
549  help='Files containing error patterns to be ignored during logfile scans (will split on commas; use "None" to disable the standard "atlas_error_mask.db")', nargs='+')
550  parser.add_argument('--ignorePatterns', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
551  help='Regexp error patterns to be ignored during logfile scans (will be applied as a search against the whole logfile line)', nargs='+')
552  parser.add_argument('--ignoreErrors', type=argFactory(trfArgClasses.argBool, runarg=False), metavar="BOOL", group='Validation',
553  help='Ignore ERROR lines in logfiles (use with care this can mask serious problems; --ignorePatterns is prefered)')
554  parser.add_argument('--checkEventCount', type=trfArgClasses.argFactory(trfArgClasses.argSubstepBool, defaultSubstep = 'all', runarg=False),
555  help='Enable check of output events against input events (default: True)', group='Validation',
556  metavar="BOOL")
557 
558 
559 def addTriggerArguments(parser, addTrigFilter=True):
560  parser.defineArgGroup('Trigger', 'Trigger Related Options')
561  parser.add_argument('--triggerConfig',
562  type=argFactory(trfArgClasses.argSubstep, defaultSubstep="RDOtoRDOTrigger", separator='='),
563  metavar='substep=triggerConf',
564  help='Trigger configuration string (substep aware argument - default is to run trigger in RDOtoRDOTrigger step, '
565  'use syntax SUBSTEP=TRIGCONF if you want to run trigger somewhere else). '
566  'N.B. This argument uses EQUALS (=) to separate the substep name from the value.',
567  group='Trigger', nargs='+')
568  if addTrigFilter:
569  parser.add_argument('--trigFilterList',
570  type=argFactory(trfArgClasses.argList), nargs="+",
571  help='Trigger filter list (multiple values can be given separately or split on commas; only understood in RAWtoALL)',
572  group='Trigger')
573 
574 
575 def addTeaArguments(parser):
576  parser.defineArgGroup('Tea', 'Tea Making Options (for testing)')
577  parser.add_argument('--cupsOfTea', group='Tea', type=argFactory(trfArgClasses.argInt), help='Number of cups of tea requested (int)')
578  parser.add_argument('--teaType', group='Tea', type=argFactory(trfArgClasses.argString), help='Requested flavour of tea (string)')
579  parser.add_argument('--mugVolume', group='Tea', type=argFactory(trfArgClasses.argFloat), help='How large a cup to use (float)')
580  parser.add_argument('--drinkers', group='Tea', nargs='+', type=argFactory(trfArgClasses.argList), help='Who is drinking tea (list)')
581 
582 
584  inputD3PDList = []
585  outputD3PDList = []
586  from D3PDMakerConfig.D3PDProdFlags import listAllKnownD3PD
587  for dpdWriter in listAllKnownD3PD:
588  dpdName = dpdWriter.StreamName.replace('Stream', '')
589  inputD3PDList.append(dpdName)
590  outputD3PDList.append(dpdName+'_MRG')
591 
592  return inputD3PDList, outputD3PDList
593 
python.trfArgs.addDetectorArguments
def addDetectorArguments(parser)
Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as re...
Definition: trfArgs.py:229
python.trfArgs.addTriggerArguments
def addTriggerArguments(parser, addTrigFilter=True)
Add trigger related arguments.
Definition: trfArgs.py:559
python.trfArgs.dpdType._treeNames
_treeNames
Definition: trfArgs.py:418
python.trfArgs.addExtraDPDTypes
def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs=False)
Add additional DPD arguments.
Definition: trfArgs.py:484
python.trfArgs.addPerfMonArguments
def addPerfMonArguments(parser)
Options for PerfMon.
Definition: trfArgs.py:144
python.trfArgs.dpdType.help
def help(self)
Definition: trfArgs.py:437
python.trfArgClasses.argPOOLFile
POOL file class.
Definition: trfArgClasses.py:1425
vtune_athena.format
format
Definition: vtune_athena.py:14
python.trfArgClasses.argNTUPFile
NTUP (plain ROOT) file class.
Definition: trfArgClasses.py:1679
python.trfArgs.addValidationArguments
def addValidationArguments(parser)
Definition: trfArgs.py:546
python.trfArgs.listKnownD3PDs
def listKnownD3PDs()
This method adds the current valid list of D3PDs to two lists.
Definition: trfArgs.py:583
python.trfArgs.addMetadataArguments
def addMetadataArguments(parser)
Options for passing metadata into the transform.
Definition: trfArgs.py:248
python.trfArgs.getExtraDPDList
def getExtraDPDList(NTUPOnly=False)
Definition: trfArgs.py:444
python.trfArgs.dpdType._name
_name
Definition: trfArgs.py:373
python.trfArgs.dpdType.substeps
def substeps(self)
Definition: trfArgs.py:429
PyJobTransforms.trfArgClasses
Transform argument class definitions.
python.trfArgs.addStandardTrfArgs
def addStandardTrfArgs(parser)
Add standard transform arguments to an argparse ArgumentParser.
Definition: trfArgs.py:16
python.trfArgs.dpdType._argclass
_argclass
Definition: trfArgs.py:407
python.trfArgClasses.argSubstepBool
Boolean substep argument.
Definition: trfArgClasses.py:2130
python.trfArgClasses.argFloat
Float type argument.
Definition: trfArgClasses.py:261
python.trfArgs.addParallelJobProcessorArguments
def addParallelJobProcessorArguments(parser)
Definition: trfArgs.py:542
python.trfArgClasses.argList
List of string arguments.
Definition: trfArgClasses.py:348
python.trfArgClasses.argFactory
Factory class used to generate argument class instances for argparse.
Definition: trfArgClasses.py:31
python.trfArgs.dpdType.argclass
def argclass(self)
Definition: trfArgs.py:433
python.trfArgs.addAthenaArguments
def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True)
Options related to running athena in general TODO: Some way to mask certain options (perExec,...
Definition: trfArgs.py:59
python.trfArgs.addValgrindArguments
def addValgrindArguments(parser)
Add Valgrind options.
Definition: trfArgs.py:153
python.trfArgs.dpdType.treeNames
def treeNames(self)
Definition: trfArgs.py:441
python.trfArgs.addD3PDArguments
def addD3PDArguments(parser, pick=None, transform=None, multipleOK=False, addD3PDMRGtypes=False)
Add D3PD arguments.
Definition: trfArgs.py:323
python.trfArgClasses.argBool
Boolean type argument.
Definition: trfArgClasses.py:317
python.trfArgs.dpdType._type
_type
Definition: trfArgs.py:380
python.trfArgClasses.argBSFile
ByteStream file class.
Definition: trfArgClasses.py:1358
python.trfArgClasses.argString
String type argument.
Definition: trfArgClasses.py:174
python.trfArgClasses.argSubstepList
Argument class for substep lists, suitable for preExec/postExec.
Definition: trfArgClasses.py:2028
python.trfArgClasses.argSubstepString
String substep argument.
Definition: trfArgClasses.py:2089
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
python.trfArgs.dpdType._substeps
_substeps
Definition: trfArgs.py:394
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
python.trfArgs.dpdType
Simple class to store information about extra DPD filetypes.
Definition: trfArgs.py:362
python.trfArgs.addFileValidationArguments
def addFileValidationArguments(parser)
Definition: trfArgs.py:526
python.trfArgs.dpdType._help
_help
Definition: trfArgs.py:417
python.trfArgClasses.argSubstepSteering
Special argument class to hold steering information.
Definition: trfArgClasses.py:2289
python.trfArgClasses.argSubstepFloat
Float substep argument.
Definition: trfArgClasses.py:2222
python.trfArgClasses.argKeyFloatValueList
Definition: trfArgClasses.py:458
python.trfArgClasses.argSubstepConditions
Substep class for conditionsTag.
Definition: trfArgClasses.py:2393
python.trfArgs.dpdType.__init__
def __init__(self, name, type=None, substeps=[], argclass=None, treeNames=None, help=None)
Class constructor for dpdType.
Definition: trfArgs.py:372
PyJobTransforms.trfLogger
Logging configuration for ATLAS job transforms.
python.trfArgs.addTeaArguments
def addTeaArguments(parser)
Tea for two and two for tea...
Definition: trfArgs.py:575
python.trfArgClasses.argSubstep
Base class for substep arguments.
Definition: trfArgClasses.py:1918
python.trfArgs.addVTuneArguments
def addVTuneArguments(parser)
Add VTune options.
Definition: trfArgs.py:190
pickleTool.object
object
Definition: pickleTool.py:30
python.trfArgClasses.argSubstepInt
Int substep argument.
Definition: trfArgClasses.py:2174
python.trfArgs.dpdType.type
def type(self)
Definition: trfArgs.py:425
python.trfArgClasses.argInt
Int type argument.
Definition: trfArgClasses.py:225
python.trfArgs.addPrimaryDPDArguments
def addPrimaryDPDArguments(parser, pick=None, transform=None, multipleOK=False)
Add primary DPD arguments.
Definition: trfArgs.py:268
python.trfArgs.dpdType.name
def name(self)
Definition: trfArgs.py:421