ATLAS Offline Software
trfArgs.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2 
3 
6 
7 import logging
8 msg = logging.getLogger(__name__)
9 
10 import PyJobTransforms.trfArgClasses as trfArgClasses
11 from PyJobTransforms.trfArgClasses import argActionFactory, argFactory
12 
13 from PyJobTransforms.trfLogger import stdLogLevels
14 
15 
16 def addStandardTrfArgs(parser):
17  parser.add_argument('--CA', action=argActionFactory(trfArgClasses.argSubstepBool, runarg=False), nargs='*',
18  help='Use ComponentAccumulator base configuration')
19  parser.add_argument('--verbose', '--debug', action='store_true', help='Set transform loglevel to DEBUG')
20  parser.add_argument('--loglevel', choices=list(stdLogLevels), help='Set transform logging level')
21  parser.add_argument('--argJSON', '--argjson', metavar='FILE', help='File containing JSON serialised argument dictionary')
22  parser.add_argument('--dumpargs', action='store_true', help='Dump transform arguments and exit')
23  parser.add_argument('--showGraph', action='store_true', help='Show multi-step transform graph, then exit')
24  parser.add_argument('--showPath', action='store_true', help='Show execution path only, then exit')
25  parser.add_argument('--showSteps', action='store_true', help='Show list of executor steps only, then exit')
26  parser.add_argument('--dumpPickle', metavar='FILE', help='Interpret command line arguments and write them out as a pickle file')
27  parser.add_argument('--dumpJSON', metavar='FILE', help='Interpret command line arguments and write them out as a JSON file')
28  parser.add_argument('--reportName', type=argFactory(trfArgClasses.argString, runarg=False),
29  help='Base name for job reports (default name is "jobReport" for most reports, but "metadata" for classic prodsys XML)')
30  parser.add_argument('--reportType', type=argFactory(trfArgClasses.argList, runarg=False), nargs='+', metavar='TYPE',
31  help='Job reports to produce: valid values are "text", "json", "classic", "pilotPickle" and "gpickle"')
32  parser.add_argument('--execOnly', action='store_true',
33  help='Exec the first substep only, replacing the transform process (no job reports and the return code will be from the substep process)')
34  parser.add_argument('--env', type=argFactory(trfArgClasses.argSubstepList, runarg=False), metavar='substep:KEY=VALUE', nargs='+',
35  help='Explicitly set environment variables for an executor (default is all substeps).'
36  ' N.B. this setting is passed to the shell, so reference to shell variables is allowed, e.g.'
37  ' KEY=VALUE:$KEY')
38  parser.add_argument('--imf', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
39  help='Manually include/exclude the Intel IMF maths library '
40  '(otherwise this is disabled for base releases < 17.7, enabled otherwise)')
41  parser.add_argument('--tcmalloc', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
42  help='Switch preload of the tcmalloc library (disabled by default)')
43  parser.add_argument('--steering', type=argFactory(trfArgClasses.argSubstepSteering, runarg=False), nargs='+', metavar='substep:{in/out}{+-}DATA',
44  help='Steer the transform by manipulating the execution graph before the execution path is calculated. '
45  'Format is substep:{in,out}{+-}DATA,{in,out}{+-}DATA,... to modify the substep\'s input/output '
46  ' by adding/removing a data type. e.g. RAWtoALL:in-RDO,in+RDO_TRIG would remove RDO and add '
47  'RDO_TRIG to the list of valid input datatypes for the RAWtoALL substep. See current version of '
48  'trfArgClasses.argSubstepSteering.steeringAlises for supported aliases. '
49  'https://twiki.cern.ch/twiki/bin/view/AtlasComputing/TransformSteering')
50  addMetadataArguments(parser)
51 
52 
53 
59 def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True):
60  parser.defineArgGroup('Athena', 'General Athena Options')
61  parser.add_argument('--athenaopts', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=' ', runarg=False), nargs="+", metavar='substep:ATHENAOPTS',
62  help='Extra options to pass to athena. Opts will split on spaces. '
63  'Multiple substep options can be given with --athenaopts=\'sutbstep1:--opt1 --opt2[=foo] ...\' \'substep2:--opt3\''
64  'Without substep specified, options will be used for all substeps.')
65  parser.add_argument('--command', '-c', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='COMMAND',
66  help='Run %(metavar)s before all else')
67  parser.add_argument('--athena', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='ATHENA',
68  help='Use %(metavar)s as the athena executable')
69  parser.add_argument('--preExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+', action='extend',
70  metavar='substep:PREEXEC',
71  help='Python code to execute before main job options are included (can be optionally limited to a single substep)')
72  parser.add_argument('--preInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
73  metavar='substep:PREINCLUDE',
74  help='Python configuration fragment to include before main job options (can be optionally limited to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
75  parser.add_argument('--postExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+', action='extend',
76  metavar='substep:POSTEXEC',
77  help='Python code to execute after main job options are included (can be optionally limited to a single substep)')
78  parser.add_argument('--postInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
79  metavar='substep:POSTINCLUDE',
80  help='Python configuration fragment to include after main job options (can be optionally limited '
81  'to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
82  parser.add_argument('--splitConfig', group = 'Athena', type=argFactory(trfArgClasses.argSubstepString),
83  metavar='substep:SPLITCONFIG',
84  help='Configuration file to internally split job into multiple parts (can be optionally limited to a single substep)')
85  parser.add_argument('--maxEvents', group='Athena', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep=maxEventsDefaultSubstep),
86  nargs='+', metavar='substep:maxEvents',
87  help='Set maximum events for each processing step (default substep is "{0}")'.format(maxEventsDefaultSubstep))
88  parser.add_argument('--skipEvents', group='Athena', nargs='+', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep='first'),
89  help='Number of events to skip over in the first processing step (skipping substep can be overridden)')
90  parser.add_argument('--asetup', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:ASETUP',
91  help='asetup command string to be run before this substep is executed')
92  parser.add_argument('--runInContainer', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:CONTAINER_OS',
93  help='Set the substep to run in a container with the specified OS. Requires the --athena flag')
94  parser.add_argument('--eventAcceptanceEfficiency', type=trfArgClasses.argFactory(trfArgClasses.argSubstepFloat, min=0.0, max=1.0, runarg=False),
95  help='Allowed "efficiency" for processing events - used to ensure output file has enough events (default 1.0)')
96  parser.add_argument('--athenaMPMergeTargetSize', '--mts', type=trfArgClasses.argFactory(trfArgClasses.argKeyFloatValueList, runarg=False),
97  metavar='dataType:targetSizeInMegaBytes', nargs='+', group='Athena',
98  help='Set the target merge size for an AthenaMP output file type (give size in MB). '
99  'Note that the special value 0 means do not merge this output file; negative values mean '
100  'always merge to a single file. Globbing is supported, e.g. "DESD_*:500" is understood. '
101  'Special datatype "ALL" can be used as a default for all datatypes not explicitly '
102  'given their own value or glob matched.')
103  parser.add_argument('--athenaMPStrategy', type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=False),
104  nargs='+', metavar='substep:Strategy', group='Athena',
105  help='Set the AthenaMP scheduling strategy for a particular substep. Default is unset.')
106  parser.add_argument('--athenaMPUseEventOrders', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
107  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
108  help='Change AthenaMP setup to read event numbers from event orders files')
109  parser.add_argument('--athenaMPEventsBeforeFork', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False),
110  metavar='N', group='Athena',
111  help='Set AthenaMP to fork after processing N events (default is to fork immediately after '
112  'initialisation')
113  parser.add_argument('--sharedWriter', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
114  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
115  help='SharedWriter mode active')
116  parser.add_argument('--parallelCompression',
118  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
119  help='Delegate event data compression to the workers while using SharedWriter')
120  parser.add_argument('--eventService', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
121  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
122  help='Switch AthenaMP to the Event Service configuration')
123  parser.add_argument('--multithreaded', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
124  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
125  help='Multithreaded mode active')
126  parser.add_argument("--mpi", type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True), nargs="?",
127  const=trfArgClasses.argBool("True"), help="MPI mode active",)
128  parser.add_argument('--multiprocess', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
129  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
130  help='Multiprocess mode active')
131  parser.add_argument('--deleteIntermediateOutputfiles', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
132  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
133  help='Remove intermediate input/output files of multi step TRF')
134 
135  if addPerfMon:
136  addPerfMonArguments(parser)
137 
138  if addValgrind:
139  addValgrindArguments(parser)
140 
141  if addVTune:
142  addVTuneArguments(parser)
143 
144 
147  parser.defineArgGroup('PerfMon', 'General PerfMon Options')
148  parser.add_argument('--perfmon',
149  default=trfArgClasses.argString('fastmonmt'),
150  type=argFactory(trfArgClasses.argString),
151  help='Enable PerfMon (fastmonmt [default], fullmonmt, or none)',
152  group='PerfMon')
153 
154 
156  parser.defineArgGroup('Valgrind', 'General Valgrind Options')
157  parser.add_argument(
158  '--valgrind',
159  group = 'Valgrind',
160  type = argFactory(
162  runarg = False
163  ),
164  metavar = "substep:BOOL",
165  help = 'Enable Valgrind'
166  )
167  parser.add_argument(
168  '--valgrindDefaultOpts',
169  group = 'Valgrind',
170  type = argFactory(
172  runarg = False
173  ),
174  metavar = "substep:BOOL",
175  help = 'Enable default Valgrind options'
176  )
177  parser.add_argument(
178  '--valgrindExtraOpts',
179  group = 'Valgrind',
180  type = argFactory(
182  splitter = ',',
183  runarg = False
184  ),
185  metavar = 'OPT1,OPT2,OPT3',
186  help = 'Extra options passed to Valgrind when running Athena. ' +
187  'Options starting with "-" must be given as ' +
188  '--valgrindExtraOpts=\'--opt1=foo,--opt2=bar,...\''
189  )
190 
191 
192 def addVTuneArguments(parser):
193  parser.defineArgGroup('VTune', 'General VTune Options')
194  parser.add_argument(
195  '--vtune',
196  group = 'VTune',
197  type = argFactory(
199  runarg = False
200  ),
201  metavar = "substep:BOOL",
202  help = 'Enable VTune'
203  )
204  parser.add_argument(
205  '--vtuneDefaultOpts',
206  group = 'VTune',
207  type = argFactory(
209  runarg = False
210  ),
211  metavar = "substep:BOOL",
212  help = 'Enable default VTune options'
213  )
214  parser.add_argument(
215  '--vtuneExtraOpts',
216  group = 'VTune',
217  type = argFactory(
219  splitter = ',',
220  runarg = False
221  ),
222  metavar = 'OPT1,OPT2,OPT3',
223  help = 'Extra options passed to VTune when running Athena. ' +
224  'Options starting with "-" must be given as ' +
225  '--vtuneExtraOpts=\'-opt1=foo,-opt2=bar,...\''
226  )
227 
228 
232  parser.defineArgGroup('Detector', 'General detector configuration options, for simulation and reconstruction')
233  parser.add_argument('--DBRelease', group = 'Detector', type=argFactory(trfArgClasses.argSubstep, runarg=False), metavar='substep:DBRelease', nargs='+',
234  help='Use DBRelease instead of ORACLE. Give either a DBRelease tarball file (e.g., DBRelease-21.7.1.tar.gz) or cvmfs DBRelease directory (e.g., 21.7.1 or current')
235  parser.add_argument('--conditionsTag', group='Detector', type=argFactory(trfArgClasses.argSubstepConditions), metavar='substep:CondTag', nargs='+',
236  help='Conditions tag to set')
237  parser.add_argument('--geometryVersion', group='Detector', type=argFactory(trfArgClasses.argSubstep), metavar='substep:GeoVersion', nargs='+',
238  help='ATLAS geometry version tag')
239  parser.add_argument('--geometrySQLite', group='Detector', type=argFactory(trfArgClasses.argBool),
240  help='Switch to SQLite Geometry DB')
241  parser.add_argument('--geometrySQLiteFullPath', group='Detector', type=argFactory(trfArgClasses.argString),
242  help='Manual setting of SQLite Geometry DB path. For testing purposes only')
243  parser.add_argument('--beamType', group='Detector', type=argFactory(trfArgClasses.argString),
244  help='Manual beam type setting')
245  parser.add_argument('--runNumber', '--RunNumber', group='Detector', type=argFactory(trfArgClasses.argInt),
246  help='Manual run number setting')
247 
248 
251  parser.defineArgGroup('Metadata', 'Metadata arguments that will be passed into the transform')
252  parser.add_argument('--AMIConfig', '--AMI', type=argFactory(trfArgClasses.argString), help='Configure transform with AMI tag parameters', group="Metadata")
253  parser.add_argument('--AMITag', type=argFactory(trfArgClasses.argString), metavar='TAG', group="Metadata",
254  help='AMI tag from which this job was defined - this option simply writes the '
255  'relevant AMI tag value into the output metadata, it does not configure the job (use --AMIConfig for that)')
256  parser.add_argument('--taskid', type=argFactory(trfArgClasses.argString, runarg=False), help="Task identification number", group="Metadata")
257  parser.add_argument('--jobid', type=argFactory(trfArgClasses.argString, runarg=False), help="Job identification number", group="Metadata")
258  parser.add_argument('--attempt', type=argFactory(trfArgClasses.argString, runarg=False), help="Job attempt number", group="Metadata")
259 
260 
261 
270 def addPrimaryDPDArguments(parser, pick = None, transform = None, multipleOK=False):
271  parser.defineArgGroup('Primary DPDs', 'Primary DPD File Options')
272  # list* really gives just a list of DPD names
273  try:
274  # TODO: do we actually need all of those?
275  listRAWtoDPD = ['StreamDAOD_PIXELVALID', 'StreamDRAW_RPVLL', 'StreamDRAW_ZMUMU', 'StreamDRAW_DIMU', 'StreamDRAW_EGZ', 'StreamDRAW_TAULH', 'StreamDRAW_JET', 'StreamDRAW_EGJPSI', 'StreamDRAW_TAUMUH',
276  'StreamDRAW_EMU', 'StreamDRAW_BCID1', 'StreamDRAW_BCID2', 'StreamDRAW_BCID3', 'StreamDRAW_BCID4', 'StreamDRAW_TOPSLMU', 'StreamDAOD_IDNCB', 'StreamDAOD_SCTVALID']
277  listESDtoDPD = ['StreamDESDM_ALLCELLS', 'StreamDESDM_EOVERP', 'StreamDESDM_IDALIGN', 'StreamDESDM_EGAMMA', 'StreamDESDM_MCP', 'StreamDESDM_TILEMU', 'StreamDESDM_PHOJET', 'StreamDESDM_SGLEL', 'StreamDESDM_SLTTMU', 'StreamDESDM_CALJET',
278  'StreamDESDM_EXOTHIP', 'StreamDAOD_IDTRKVALID', 'StreamDAOD_IDTIDE', 'StreamDAOD_IDTRKLUMI', 'StreamDAOD_IDPIXLUMI', 'StreamDAOD_L1CALO1', 'StreamDAOD_L1CALO2', 'StreamDAOD_L1CALO3', 'StreamDAOD_L1CALO4', 'StreamDAOD_L1CALO5', 'StreamDESD_DEDX']
279  listAODtoDPD = []
280  matchedOutputList = [(['r2a'], listRAWtoDPD + listESDtoDPD), (['a2d'], listAODtoDPD)]
281  for substep, dpdList in matchedOutputList:
282  for dpdName in [ dpd.replace('Stream', '') for dpd in dpdList ]:
283  msg.debug('Handling {0}'.format(dpdName))
284  if pick is None or dpdName in pick:
285  # Need to decide which file type we actually have here
286  dpdType = dpdName.split('_')[0]
287  if 'RAW' in dpdType:
288  parser.add_argument('--output' + dpdName + 'File',
289  type=argFactory(trfArgClasses.argBSFile, multipleOK=multipleOK, type='RAW'),
290  group = 'Primary DPDs', metavar=dpdName.upper(),
291  help='DPD {0} output {1} file'.format(dpdType, dpdName))
292  elif 'AOD' in dpdType:
293  parser.add_argument('--output' + dpdName + 'File',
294  type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='AOD'),
295  group = 'Primary DPDs', metavar=dpdName.upper(),
296  help='DPD {0} output {1} file'.format(dpdType, dpdName))
297  elif 'ESD' in dpdType:
298  parser.add_argument('--output' + dpdName + 'File',
299  type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='ESD'),
300  group = 'Primary DPDs', metavar=dpdName.upper(),
301  help='DPD {0} output {1} file'.format(dpdType, dpdName))
302  else:
303  msg.warning('Unrecognised primary DPD type: {0}'.format(dpdName))
304  # Automatically add DPD as output data arguments of their relevant executors
305  if transform:
306  for executor in transform.executors:
307  if hasattr(executor, 'substep') and executor.substep in substep:
308  executor.outDataUpdate([dpdName])
309  if executor.name in substep:
310  executor.outDataUpdate([dpdName])
311 
312  except ImportError:
313  msg.warning('PrimaryDPDFlags not available - cannot add primary DPD arguments')
314 
315 
316 
321 
322 
330  def __init__(self, name, type = None, substeps = [], argclass = None, treeNames = None, help = None):
331  self._name = name
332 
333 
336  if type is None:
337  if 'RAW' in name:
338  self._type = 'bs'
339  elif 'ESD' in name:
340  self._type = 'esd'
341  elif 'AOD' in name:
342  self._type = 'aod'
343  elif 'NTUP' in name:
344  self._type = 'ntup'
345  else:
346  self._type = type
347 
348 
350  if substeps == []:
351  if 'RAW' in name:
352  self._substeps = ['RAWtoALL']
353  elif 'ESD' in name:
354  self._substeps = ['RAWtoALL']
355  elif 'AOD' in name:
356  self._substeps = ['RAWtoALL']
357  elif 'NTUP' in name:
358  self._substeps = ['RAWtoALL', 'AODtoDPD']
359  else:
360  self._substeps = substeps
361 
362 
363  if argclass is None:
364  if 'RAW' in name:
366  elif 'ESD' in name:
368  elif 'AOD' in name:
370  elif 'NTUP' in name:
372  else:
373  self._argclass = argclass
374 
375  self._help = help
376  self._treeNames = treeNames
377 
378  @property
379  def name(self):
380  return self._name
381 
382  @property
383  def type(self):
384  return self._type
385 
386  @property
387  def substeps(self):
388  return self._substeps
389 
390  @property
391  def argclass(self):
392  return self._argclass
393 
394  @property
395  def help(self):
396  return self._help
397 
398  @property
399  def treeNames(self):
400  return self._treeNames
401 
402 def getExtraDPDList(NTUPOnly = False):
403  extraDPDs = []
404  extraDPDs.append(dpdType('NTUP_SCT', substeps=['r2e']))
405  extraDPDs.append(dpdType('NTUP_MUONCALIB', substeps=['r2e','r2a'], treeNames=['PatternNtupleMaker/Segments']))
406  extraDPDs.append(dpdType('NTUP_TRKVALID', substeps=['r2e']))
407  extraDPDs.append(dpdType('NTUP_FASTMON', substeps=['a2t','a2d','e2a']))
408  extraDPDs.append(dpdType('NTUP_LARNOISE', substeps=['e2d'], treeNames=['CollectionTree']))
409  extraDPDs.append(dpdType('NTUP_WZ', substeps=['e2d'], treeNames=['physics']))
410  extraDPDs.append(dpdType('NTUP_TRT', substeps=['e2d'], treeNames=['MyCollectionTree']))
411  extraDPDs.append(dpdType('NTUP_HECNOISE', substeps=['e2d'], treeNames=['HECNoise']))
412  extraDPDs.append(dpdType('NTUP_ENHBIAS', substeps=['e2d','e2a'], treeNames=['vertices']))
413  extraDPDs.append(dpdType('NTUP_TRUTH', substeps=['a2d'], treeNames=['truth']))
414  extraDPDs.append(dpdType('NTUP_SUSYTRUTH', substeps=['a2d'], treeNames=['truth']))
415  extraDPDs.append(dpdType('NTUP_HIGHMULT', substeps=['e2a'], treeNames=['MinBiasTree']))
416  extraDPDs.append(dpdType('NTUP_PROMPTPHOT', substeps=['e2d', 'a2d'], treeNames=["PAUReco","HggUserData"]))
417 
418  extraDPDs.append(dpdType('NTUP_MCPTP', substeps=['a2d'], help="Ntuple file for MCP Tag and Probe"))
419  extraDPDs.append(dpdType('NTUP_MCPScale', substeps=['a2d'], help="Ntuple file for MCP scale calibration"))
420 
421  extraDPDs.append(dpdType('NTUP_FastCaloSim', substeps=['e2d']))
422  extraDPDs.append(dpdType('NTUP_PILEUP', substeps=['a2da']))
423 
424  # Trigger NTUPs (for merging only!)
425  if NTUPOnly:
426  extraDPDs.append(dpdType('NTUP_TRIGCOST'))
427  extraDPDs.append(dpdType('NTUP_TRIGRATE', treeNames=['metadata']))
428  else:
429  extraDPDs.append(dpdType('DAOD_HSG2'))
430  extraDPDs.append(dpdType('DESDM_ZMUMU'))
431 
432  return extraDPDs
433 
434 
442 def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs = False):
443  parser.defineArgGroup('Additional DPDs', 'Extra DPD file types')
444 
445  extraDPDs = getExtraDPDList(NTUPOnly=NTUPMergerArgs)
446 
447  if NTUPMergerArgs:
448  for dpd in extraDPDs:
449  if pick is None or dpd.name in pick:
450  if dpd.name.startswith('NTUP'):
451  parser.add_argument('--input' + dpd.name + 'File',
452  type=argFactory(dpd.argclass, multipleOK=True, io='input', type=dpd.type, treeNames=dpd.treeNames),
453  group = 'Additional DPDs', metavar=dpd.name.upper(), nargs='+',
454  help=dpd.help if dpd.help else 'DPD input {0} file'.format(dpd.name))
455  parser.add_argument('--output' + dpd.name + '_MRGFile',
456  type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
457  group = 'Additional DPDs', metavar=dpd.name.upper(),
458  help=dpd.help if dpd.help else 'DPD output merged {0} file'.format(dpd.name))
459 
460  pass
461  else:
462  for dpd in extraDPDs:
463  if pick is None or dpd.name in pick:
464  msg.debug('Adding DPD {0} ({1}, {2}, {3}, {4})'.format(dpd.name, dpd.type, dpd.substeps, dpd.treeNames, dpd.argclass))
465  # NTUPs are a bit special as they can take a treeName to count events
466  if issubclass(dpd.argclass, trfArgClasses.argNTUPFile):
467  parser.add_argument('--output' + dpd.name + 'File',
468  type=argFactory(dpd.argclass, name=dpd.name.upper(), multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
469  group = 'Additional DPDs', metavar=dpd.name.upper(),
470  help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
471  else:
472  parser.add_argument('--output' + dpd.name + 'File',
473  type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type),
474  group = 'Additional DPDs', metavar=dpd.name.upper(),
475  help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
476  if transform:
477  for executor in transform.executors:
478  if hasattr(executor, 'substep') and executor.substep in dpd.substeps:
479  executor.outDataUpdate([dpd.name])
480  if executor.name in dpd.substeps:
481  executor.outDataUpdate([dpd.name])
482 
483 
485  parser.defineArgGroup('File Validation', 'Standard file validation switches')
486  parser.add_argument('--fileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
487  group='File Validation', help='If FALSE skip both input and output file validation (default TRUE; warning - do not use this option in production jobs!)')
488  parser.add_argument('--inputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
489  group='File Validation', help='If FALSE skip input file validation (default TRUE; warning - do not use this option in production jobs!)')
490  parser.add_argument('--outputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
491  group='File Validation', help='If FALSE skip output file validation (default TRUE; warning - do not use this option in production jobs!)')
492 
493  parser.add_argument('--parallelFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
494  nargs='?', const=trfArgClasses.argBool('True'),
495  group='File Validation', help='Parallelise file validation if True')
496  parser.add_argument('--multithreadedFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
497  nargs='?', const=trfArgClasses.argBool('True'),
498  group='File Validation', help='Use multithreaded ROOT file validation if True')
499 
501  parser.defineArgGroup('pool', 'Parallel Job Processor arguments')
502  parser.add_argument('----parallelProcessPool', group='pool', type=argFactory(trfArgClasses.argInt, runarg=False), help='Number of processes in pool requested (int)')
503 
505  parser.defineArgGroup('Validation', 'Standard job validation switches')
506  parser.add_argument('--ignoreFiles', '--ignoreFilters', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
507  help='Files containing error patterns to be ignored during logfile scans (will split on commas; use "None" to disable the standard "atlas_error_mask.db")', nargs='+')
508  parser.add_argument('--ignorePatterns', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
509  help='Regexp error patterns to be ignored during logfile scans (will be applied as a search against the whole logfile line)', nargs='+')
510  parser.add_argument('--ignoreErrors', type=argFactory(trfArgClasses.argBool, runarg=False), metavar="BOOL", group='Validation',
511  help='Ignore ERROR lines in logfiles (use with care this can mask serious problems; --ignorePatterns is prefered)')
512  parser.add_argument('--checkEventCount', type=trfArgClasses.argFactory(trfArgClasses.argSubstepBool, defaultSubstep = 'all', runarg=False),
513  help='Enable check of output events against input events (default: True)', group='Validation',
514  metavar="BOOL")
515 
516 
517 def addTriggerArguments(parser, addTrigFilter=True):
518  parser.defineArgGroup('Trigger', 'Trigger Related Options')
519  parser.add_argument('--triggerConfig',
520  type=argFactory(trfArgClasses.argSubstep, defaultSubstep="RDOtoRDOTrigger", separator='='),
521  metavar='substep=triggerConf',
522  help='Trigger configuration string (substep aware argument - default is to run trigger in RDOtoRDOTrigger step, '
523  'use syntax SUBSTEP=TRIGCONF if you want to run trigger somewhere else). '
524  'N.B. This argument uses EQUALS (=) to separate the substep name from the value.',
525  group='Trigger', nargs='+')
526  if addTrigFilter:
527  parser.add_argument('--trigFilterList',
528  type=argFactory(trfArgClasses.argList), nargs="+",
529  help='Trigger filter list (multiple values can be given separately or split on commas; only understood in RAWtoALL)',
530  group='Trigger')
531 
532 
533 def addTeaArguments(parser):
534  parser.defineArgGroup('Tea', 'Tea Making Options (for testing)')
535  parser.add_argument('--cupsOfTea', group='Tea', type=argFactory(trfArgClasses.argInt), help='Number of cups of tea requested (int)')
536  parser.add_argument('--teaType', group='Tea', type=argFactory(trfArgClasses.argString), help='Requested flavour of tea (string)')
537  parser.add_argument('--mugVolume', group='Tea', type=argFactory(trfArgClasses.argFloat), help='How large a cup to use (float)')
538  parser.add_argument('--drinkers', group='Tea', nargs='+', type=argFactory(trfArgClasses.argList), help='Who is drinking tea (list)')
539 
python.trfArgs.addDetectorArguments
def addDetectorArguments(parser)
Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as re...
Definition: trfArgs.py:231
python.trfArgs.addTriggerArguments
def addTriggerArguments(parser, addTrigFilter=True)
Add trigger related arguments.
Definition: trfArgs.py:517
python.trfArgs.dpdType._treeNames
_treeNames
Definition: trfArgs.py:376
python.trfArgs.addExtraDPDTypes
def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs=False)
Add additional DPD arguments.
Definition: trfArgs.py:442
python.trfArgs.addPerfMonArguments
def addPerfMonArguments(parser)
Options for PerfMon.
Definition: trfArgs.py:146
python.trfArgs.dpdType.help
def help(self)
Definition: trfArgs.py:395
python.trfArgClasses.argPOOLFile
POOL file class.
Definition: trfArgClasses.py:1440
vtune_athena.format
format
Definition: vtune_athena.py:14
python.trfArgClasses.argNTUPFile
NTUP (plain ROOT) file class.
Definition: trfArgClasses.py:1694
python.trfArgs.addValidationArguments
def addValidationArguments(parser)
Definition: trfArgs.py:504
python.trfArgs.addMetadataArguments
def addMetadataArguments(parser)
Options for passing metadata into the transform.
Definition: trfArgs.py:250
python.trfArgs.getExtraDPDList
def getExtraDPDList(NTUPOnly=False)
Definition: trfArgs.py:402
python.trfArgs.dpdType._name
_name
Definition: trfArgs.py:331
python.trfArgs.dpdType.substeps
def substeps(self)
Definition: trfArgs.py:387
PyJobTransforms.trfArgClasses
Transform argument class definitions.
python.trfArgs.addStandardTrfArgs
def addStandardTrfArgs(parser)
Add standard transform arguments to an argparse ArgumentParser.
Definition: trfArgs.py:16
python.trfArgs.dpdType._argclass
_argclass
Definition: trfArgs.py:365
python.trfArgClasses.argSubstepBool
Boolean substep argument.
Definition: trfArgClasses.py:2158
python.trfArgClasses.argFloat
Float type argument.
Definition: trfArgClasses.py:261
python.trfArgs.addParallelJobProcessorArguments
def addParallelJobProcessorArguments(parser)
Definition: trfArgs.py:500
python.trfArgClasses.argList
List of string arguments.
Definition: trfArgClasses.py:348
python.trfArgClasses.argFactory
Factory class used to generate argument class instances for argparse.
Definition: trfArgClasses.py:31
python.trfArgs.dpdType.argclass
def argclass(self)
Definition: trfArgs.py:391
python.trfArgs.addAthenaArguments
def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True)
Options related to running athena in general TODO: Some way to mask certain options (perExec,...
Definition: trfArgs.py:59
python.trfArgs.addValgrindArguments
def addValgrindArguments(parser)
Add Valgrind options.
Definition: trfArgs.py:155
python.trfArgs.dpdType.treeNames
def treeNames(self)
Definition: trfArgs.py:399
python.trfArgClasses.argBool
Boolean type argument.
Definition: trfArgClasses.py:317
python.trfArgs.dpdType._type
_type
Definition: trfArgs.py:338
python.trfArgClasses.argBSFile
ByteStream file class.
Definition: trfArgClasses.py:1373
python.trfArgClasses.argString
String type argument.
Definition: trfArgClasses.py:174
python.trfArgClasses.argSubstepList
Argument class for substep lists, suitable for preExec/postExec.
Definition: trfArgClasses.py:2056
python.trfArgClasses.argSubstepString
String substep argument.
Definition: trfArgClasses.py:2117
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
python.trfArgs.dpdType._substeps
_substeps
Definition: trfArgs.py:352
python.trfArgs.dpdType
Simple class to store information about extra DPD filetypes.
Definition: trfArgs.py:320
python.trfArgs.addFileValidationArguments
def addFileValidationArguments(parser)
Definition: trfArgs.py:484
python.trfArgs.dpdType._help
_help
Definition: trfArgs.py:375
python.trfArgClasses.argSubstepSteering
Special argument class to hold steering information.
Definition: trfArgClasses.py:2317
python.trfArgClasses.argSubstepFloat
Float substep argument.
Definition: trfArgClasses.py:2250
python.trfArgClasses.argKeyFloatValueList
Definition: trfArgClasses.py:458
python.trfArgClasses.argSubstepConditions
Substep class for conditionsTag.
Definition: trfArgClasses.py:2420
python.trfArgs.dpdType.__init__
def __init__(self, name, type=None, substeps=[], argclass=None, treeNames=None, help=None)
Class constructor for dpdType.
Definition: trfArgs.py:330
PyJobTransforms.trfLogger
Logging configuration for ATLAS job transforms.
python.trfArgs.addTeaArguments
def addTeaArguments(parser)
Tea for two and two for tea...
Definition: trfArgs.py:533
python.trfArgClasses.argSubstep
Base class for substep arguments.
Definition: trfArgClasses.py:1946
python.trfArgs.addVTuneArguments
def addVTuneArguments(parser)
Add VTune options.
Definition: trfArgs.py:192
pickleTool.object
object
Definition: pickleTool.py:29
python.trfArgClasses.argSubstepInt
Int substep argument.
Definition: trfArgClasses.py:2202
python.trfArgs.dpdType.type
def type(self)
Definition: trfArgs.py:383
python.trfArgClasses.argInt
Int type argument.
Definition: trfArgClasses.py:225
python.trfArgs.addPrimaryDPDArguments
def addPrimaryDPDArguments(parser, pick=None, transform=None, multipleOK=False)
Add primary DPD arguments.
Definition: trfArgs.py:270
python.trfArgs.dpdType.name
def name(self)
Definition: trfArgs.py:379