ATLAS Offline Software
Classes | Functions | Variables
python.trfArgs Namespace Reference

Classes

class  dpdType
 Simple class to store information about extra DPD filetypes. More...
 

Functions

def addStandardTrfArgs (parser)
 Add standard transform arguments to an argparse ArgumentParser. More...
 
def addAthenaArguments (parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True)
 Options related to running athena in general TODO: Some way to mask certain options (perExec, e.g.) More...
 
def addPerfMonArguments (parser)
 Options for PerfMon. More...
 
def addValgrindArguments (parser)
 Add Valgrind options. More...
 
def addVTuneArguments (parser)
 Add VTune options. More...
 
def addDetectorArguments (parser)
 Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as reconstruction) More...
 
def addMetadataArguments (parser)
 Options for passing metadata into the transform. More...
 
def addPrimaryDPDArguments (parser, pick=None, transform=None, multipleOK=False)
 Add primary DPD arguments. More...
 
def getExtraDPDList (NTUPOnly=False)
 
def addExtraDPDTypes (parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs=False)
 Add additional DPD arguments. More...
 
def addFileValidationArguments (parser)
 
def addParallelJobProcessorArguments (parser)
 
def addValidationArguments (parser)
 
def addTriggerArguments (parser, addTrigFilter=True)
 Add trigger related arguments. More...
 
def addTeaArguments (parser)
 Tea for two and two for tea... More...
 

Variables

 msg
 

Function Documentation

◆ addAthenaArguments()

def python.trfArgs.addAthenaArguments (   parser,
  maxEventsDefaultSubstep = 'first',
  addValgrind = True,
  addPerfMon = True,
  addVTune = True 
)

Options related to running athena in general TODO: Some way to mask certain options (perExec, e.g.)

Add standard athena options

Parameters
parsertrfArgParser object
maxEventsDefaultSubstepSpecial option which can change the default substep for maxEvents (needed by some special transforms).

Definition at line 59 of file trfArgs.py.

59 def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True):
60  parser.defineArgGroup('Athena', 'General Athena Options')
61  parser.add_argument('--athenaopts', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=' ', runarg=False), nargs="+", metavar='substep:ATHENAOPTS',
62  help='Extra options to pass to athena. Opts will split on spaces. '
63  'Multiple substep options can be given with --athenaopts=\'sutbstep1:--opt1 --opt2[=foo] ...\' \'substep2:--opt3\''
64  'Without substep specified, options will be used for all substeps.')
65  parser.add_argument('--command', '-c', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='COMMAND',
66  help='Run %(metavar)s before all else')
67  parser.add_argument('--athena', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='ATHENA',
68  help='Use %(metavar)s as the athena executable')
69  parser.add_argument('--preExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+',
70  metavar='substep:PREEXEC',
71  help='Python code to execute before main job options are included (can be optionally limited to a single substep)')
72  parser.add_argument('--preInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
73  metavar='substep:PREINCLUDE',
74  help='Python configuration fragment to include before main job options (can be optionally limited to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
75  parser.add_argument('--postExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+',
76  metavar='substep:POSTEXEC',
77  help='Python code to execute after main job options are included (can be optionally limited to a single substep)')
78  parser.add_argument('--postInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
79  metavar='substep:POSTINCLUDE',
80  help='Python configuration fragment to include after main job options (can be optionally limited '
81  'to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
82  parser.add_argument('--splitConfig', group = 'Athena', type=argFactory(trfArgClasses.argSubstepString),
83  metavar='substep:SPLITCONFIG',
84  help='Configuration file to internally split job into multiple parts (can be optionally limited to a single substep)')
85  parser.add_argument('--maxEvents', group='Athena', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep=maxEventsDefaultSubstep),
86  nargs='+', metavar='substep:maxEvents',
87  help='Set maximum events for each processing step (default substep is "{0}")'.format(maxEventsDefaultSubstep))
88  parser.add_argument('--skipEvents', group='Athena', nargs='+', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep='first'),
89  help='Number of events to skip over in the first processing step (skipping substep can be overridden)')
90  parser.add_argument('--asetup', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:ASETUP',
91  help='asetup command string to be run before this substep is executed')
92  parser.add_argument('--runInContainer', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:CONTAINER_OS',
93  help='Set the substep to run in a container with the specified OS. Requires the --athena flag')
94  parser.add_argument('--eventAcceptanceEfficiency', type=trfArgClasses.argFactory(trfArgClasses.argSubstepFloat, min=0.0, max=1.0, runarg=False),
95  help='Allowed "efficiency" for processing events - used to ensure output file has enough events (default 1.0)')
96  parser.add_argument('--athenaMPMergeTargetSize', '--mts', type=trfArgClasses.argFactory(trfArgClasses.argKeyFloatValueList, runarg=False),
97  metavar='dataType:targetSizeInMegaBytes', nargs='+', group='Athena',
98  help='Set the target merge size for an AthenaMP output file type (give size in MB). '
99  'Note that the special value 0 means do not merge this output file; negative values mean '
100  'always merge to a single file. Globbing is supported, e.g. "DESD_*:500" is understood. '
101  'Special datatype "ALL" can be used as a default for all datatypes not explicitly '
102  'given their own value or glob matched.')
103  parser.add_argument('--athenaMPStrategy', type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=False),
104  nargs='+', metavar='substep:Strategy', group='Athena',
105  help='Set the AthenaMP scheduling strategy for a particular substep. Default is unset.')
106  parser.add_argument('--athenaMPUseEventOrders', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
107  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
108  help='Change AthenaMP setup to read event numbers from event orders files')
109  parser.add_argument('--athenaMPEventsBeforeFork', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False),
110  metavar='N', group='Athena',
111  help='Set AthenaMP to fork after processing N events (default is to fork immediately after '
112  'initialisation')
113  parser.add_argument('--sharedWriter', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
114  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
115  help='SharedWriter mode active')
116  parser.add_argument('--parallelCompression',
117  type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
118  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
119  help='Delegate event data compression to the workers while using SharedWriter')
120  parser.add_argument('--eventService', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
121  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
122  help='Switch AthenaMP to the Event Service configuration')
123  parser.add_argument('--multithreaded', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
124  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
125  help='Multithreaded mode active')
126  parser.add_argument('--multiprocess', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
127  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
128  help='Multiprocess mode active')
129  parser.add_argument('--deleteIntermediateOutputfiles', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
130  metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
131  help='Remove intermediate input/output files of multi step TRF')
132 
133  if addPerfMon:
134  addPerfMonArguments(parser)
135 
136  if addValgrind:
137  addValgrindArguments(parser)
138 
139  if addVTune:
140  addVTuneArguments(parser)
141 

◆ addDetectorArguments()

def python.trfArgs.addDetectorArguments (   parser)

Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as reconstruction)

Parameters
parsertrfArgParser object

Definition at line 229 of file trfArgs.py.

229 def addDetectorArguments(parser):
230  parser.defineArgGroup('Detector', 'General detector configuration options, for simulation and reconstruction')
231  parser.add_argument('--DBRelease', group = 'Detector', type=argFactory(trfArgClasses.argSubstep, runarg=False), metavar='substep:DBRelease', nargs='+',
232  help='Use DBRelease instead of ORACLE. Give either a DBRelease tarball file (e.g., DBRelease-21.7.1.tar.gz) or cvmfs DBRelease directory (e.g., 21.7.1 or current')
233  parser.add_argument('--conditionsTag', group='Detector', type=argFactory(trfArgClasses.argSubstepConditions), metavar='substep:CondTag', nargs='+',
234  help='Conditions tag to set')
235  parser.add_argument('--geometryVersion', group='Detector', type=argFactory(trfArgClasses.argSubstep), metavar='substep:GeoVersion', nargs='+',
236  help='ATLAS geometry version tag')
237  parser.add_argument('--geometrySQLite', group='Detector', type=argFactory(trfArgClasses.argBool),
238  help='Switch to SQLite Geometry DB')
239  parser.add_argument('--geometrySQLiteFullPath', group='Detector', type=argFactory(trfArgClasses.argString),
240  help='Manual setting of SQLite Geometry DB path. For testing purposes only')
241  parser.add_argument('--beamType', group='Detector', type=argFactory(trfArgClasses.argString),
242  help='Manual beam type setting')
243  parser.add_argument('--runNumber', '--RunNumber', group='Detector', type=argFactory(trfArgClasses.argInt),
244  help='Manual run number setting')
245 

◆ addExtraDPDTypes()

def python.trfArgs.addExtraDPDTypes (   parser,
  pick = None,
  transform = None,
  multipleOK = False,
  NTUPMergerArgs = False 
)

Add additional DPD arguments.

Manually add DPDs that, for some reason, are not in any of the automated lists parsed by the companion functions above.

Parameters
parserArgument parser object to add arguments to
pickOptional list of DPD types to add (use short names, e.g., NTUP_EGAMMA)
transformTransform object. DPD data types will be added to the correct executor (by name or substep)
multipleOKIf the multipleOK flag should be set for this argument
NTUPMergerArgsIf True, add NTUP arguments as input/output types, suitable for NTUPMerge_tf

Definition at line 440 of file trfArgs.py.

440 def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs = False):
441  parser.defineArgGroup('Additional DPDs', 'Extra DPD file types')
442 
443  extraDPDs = getExtraDPDList(NTUPOnly=NTUPMergerArgs)
444 
445  if NTUPMergerArgs:
446  for dpd in extraDPDs:
447  if pick is None or dpd.name in pick:
448  if dpd.name.startswith('NTUP'):
449  parser.add_argument('--input' + dpd.name + 'File',
450  type=argFactory(dpd.argclass, multipleOK=True, io='input', type=dpd.type, treeNames=dpd.treeNames),
451  group = 'Additional DPDs', metavar=dpd.name.upper(), nargs='+',
452  help=dpd.help if dpd.help else 'DPD input {0} file'.format(dpd.name))
453  parser.add_argument('--output' + dpd.name + '_MRGFile',
454  type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
455  group = 'Additional DPDs', metavar=dpd.name.upper(),
456  help=dpd.help if dpd.help else 'DPD output merged {0} file'.format(dpd.name))
457 
458  pass
459  else:
460  for dpd in extraDPDs:
461  if pick is None or dpd.name in pick:
462  msg.debug('Adding DPD {0} ({1}, {2}, {3}, {4})'.format(dpd.name, dpd.type, dpd.substeps, dpd.treeNames, dpd.argclass))
463  # NTUPs are a bit special as they can take a treeName to count events
464  if issubclass(dpd.argclass, trfArgClasses.argNTUPFile):
465  parser.add_argument('--output' + dpd.name + 'File',
466  type=argFactory(dpd.argclass, name=dpd.name.upper(), multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
467  group = 'Additional DPDs', metavar=dpd.name.upper(),
468  help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
469  else:
470  parser.add_argument('--output' + dpd.name + 'File',
471  type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type),
472  group = 'Additional DPDs', metavar=dpd.name.upper(),
473  help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
474  if transform:
475  for executor in transform.executors:
476  if hasattr(executor, 'substep') and executor.substep in dpd.substeps:
477  executor.outDataUpdate([dpd.name])
478  if executor.name in dpd.substeps:
479  executor.outDataUpdate([dpd.name])
480 
481 

◆ addFileValidationArguments()

def python.trfArgs.addFileValidationArguments (   parser)

Definition at line 482 of file trfArgs.py.

482 def addFileValidationArguments(parser):
483  parser.defineArgGroup('File Validation', 'Standard file validation switches')
484  parser.add_argument('--fileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
485  group='File Validation', help='If FALSE skip both input and output file validation (default TRUE; warning - do not use this option in production jobs!)')
486  parser.add_argument('--inputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
487  group='File Validation', help='If FALSE skip input file validation (default TRUE; warning - do not use this option in production jobs!)')
488  parser.add_argument('--outputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
489  group='File Validation', help='If FALSE skip output file validation (default TRUE; warning - do not use this option in production jobs!)')
490 
491  parser.add_argument('--parallelFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
492  nargs='?', const=trfArgClasses.argBool('True'),
493  group='File Validation', help='Parallelise file validation if True')
494  parser.add_argument('--multithreadedFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
495  nargs='?', const=trfArgClasses.argBool('True'),
496  group='File Validation', help='Use multithreaded ROOT file validation if True')
497 

◆ addMetadataArguments()

def python.trfArgs.addMetadataArguments (   parser)

Options for passing metadata into the transform.

Parameters
parsertrfArgParser object

Definition at line 248 of file trfArgs.py.

248 def addMetadataArguments(parser):
249  parser.defineArgGroup('Metadata', 'Metadata arguments that will be passed into the transform')
250  parser.add_argument('--AMIConfig', '--AMI', type=argFactory(trfArgClasses.argString), help='Configure transform with AMI tag parameters', group="Metadata")
251  parser.add_argument('--AMITag', type=argFactory(trfArgClasses.argString), metavar='TAG', group="Metadata",
252  help='AMI tag from which this job was defined - this option simply writes the '
253  'relevant AMI tag value into the output metadata, it does not configure the job (use --AMIConfig for that)')
254  parser.add_argument('--taskid', type=argFactory(trfArgClasses.argString, runarg=False), help="Task identification number", group="Metadata")
255  parser.add_argument('--jobid', type=argFactory(trfArgClasses.argString, runarg=False), help="Job identification number", group="Metadata")
256  parser.add_argument('--attempt', type=argFactory(trfArgClasses.argString, runarg=False), help="Job attempt number", group="Metadata")
257 
258 

◆ addParallelJobProcessorArguments()

def python.trfArgs.addParallelJobProcessorArguments (   parser)

Definition at line 498 of file trfArgs.py.

499  parser.defineArgGroup('Parallel Job Processor', 'Parallel Job Processor arguments')
500  parser.add_argument('----parallelProcessPool', group='pool', type=argFactory(trfArgClasses.argInt, runarg=False), help='Number of processes in pool requested (int)')
501 

◆ addPerfMonArguments()

def python.trfArgs.addPerfMonArguments (   parser)

Options for PerfMon.

Parameters
parsertrfArgParser object

Definition at line 144 of file trfArgs.py.

144 def addPerfMonArguments(parser):
145  parser.defineArgGroup('PerfMon', 'General PerfMon Options')
146  parser.add_argument('--perfmon',
147  default=trfArgClasses.argString('fastmonmt'),
148  type=argFactory(trfArgClasses.argString),
149  help='Enable PerfMon (fastmonmt [default], fullmonmt, or none)',
150  group='PerfMon')
151 

◆ addPrimaryDPDArguments()

def python.trfArgs.addPrimaryDPDArguments (   parser,
  pick = None,
  transform = None,
  multipleOK = False 
)

Add primary DPD arguments.

Get the list of current primary DPDs and add then to the parser optionally only some DPDs may be added, using the pick list. This function uses the silent decorator to supress useless messages from ROOT

Parameters
parserArgument parser object to add arguments to
pickOptional list of DPD types to add (use short names, e.g., DESDM_MUON)
transformTransform object. DPD data types will be added to the correct executor (by name or substep)
multipleOKIf the multipleOK flag should be set for this argument @silent

Definition at line 268 of file trfArgs.py.

268 def addPrimaryDPDArguments(parser, pick = None, transform = None, multipleOK=False):
269  parser.defineArgGroup('Primary DPDs', 'Primary DPD File Options')
270  # list* really gives just a list of DPD names
271  try:
272  # TODO: do we actually need all of those?
273  listRAWtoDPD = ['StreamDAOD_PIXELVALID', 'StreamDRAW_RPVLL', 'StreamDRAW_ZMUMU', 'StreamDRAW_DIMU', 'StreamDRAW_EGZ', 'StreamDRAW_TAULH', 'StreamDRAW_JET', 'StreamDRAW_EGJPSI', 'StreamDRAW_TAUMUH',
274  'StreamDRAW_EMU', 'StreamDRAW_BCID1', 'StreamDRAW_BCID2', 'StreamDRAW_BCID3', 'StreamDRAW_BCID4', 'StreamDRAW_TOPSLMU', 'StreamDAOD_IDNCB', 'StreamDAOD_SCTVALID']
275  listESDtoDPD = ['StreamDESDM_ALLCELLS', 'StreamDESDM_EOVERP', 'StreamDESDM_IDALIGN', 'StreamDESDM_EGAMMA', 'StreamDESDM_MCP', 'StreamDESDM_TILEMU', 'StreamDESDM_PHOJET', 'StreamDESDM_SGLEL', 'StreamDESDM_SLTTMU', 'StreamDESDM_CALJET',
276  'StreamDESDM_EXOTHIP', 'StreamDAOD_IDTRKVALID', 'StreamDAOD_IDTIDE', 'StreamDAOD_IDTRKLUMI', 'StreamDAOD_IDPIXLUMI', 'StreamDAOD_L1CALO1', 'StreamDAOD_L1CALO2', 'StreamDAOD_L1CALO3', 'StreamDAOD_L1CALO4', 'StreamDAOD_L1CALO5', 'StreamDESD_DEDX']
277  listAODtoDPD = []
278  matchedOutputList = [(['r2a'], listRAWtoDPD + listESDtoDPD), (['a2d'], listAODtoDPD)]
279  for substep, dpdList in matchedOutputList:
280  for dpdName in [ dpd.replace('Stream', '') for dpd in dpdList ]:
281  msg.debug('Handling {0}'.format(dpdName))
282  if pick is None or dpdName in pick:
283  # Need to decide which file type we actually have here
284  dpdType = dpdName.split('_')[0]
285  if 'RAW' in dpdType:
286  parser.add_argument('--output' + dpdName + 'File',
287  type=argFactory(trfArgClasses.argBSFile, multipleOK=multipleOK, type='RAW'),
288  group = 'Primary DPDs', metavar=dpdName.upper(),
289  help='DPD {0} output {1} file'.format(dpdType, dpdName))
290  elif 'AOD' in dpdType:
291  parser.add_argument('--output' + dpdName + 'File',
292  type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='AOD'),
293  group = 'Primary DPDs', metavar=dpdName.upper(),
294  help='DPD {0} output {1} file'.format(dpdType, dpdName))
295  elif 'ESD' in dpdType:
296  parser.add_argument('--output' + dpdName + 'File',
297  type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='ESD'),
298  group = 'Primary DPDs', metavar=dpdName.upper(),
299  help='DPD {0} output {1} file'.format(dpdType, dpdName))
300  else:
301  msg.warning('Unrecognised primary DPD type: {0}'.format(dpdName))
302  # Automatically add DPD as output data arguments of their relevant executors
303  if transform:
304  for executor in transform.executors:
305  if hasattr(executor, 'substep') and executor.substep in substep:
306  executor.outDataUpdate([dpdName])
307  if executor.name in substep:
308  executor.outDataUpdate([dpdName])
309 
310  except ImportError:
311  msg.warning('PrimaryDPDFlags not available - cannot add primary DPD arguments')
312 
313 

◆ addStandardTrfArgs()

def python.trfArgs.addStandardTrfArgs (   parser)

Add standard transform arguments to an argparse ArgumentParser.

Definition at line 16 of file trfArgs.py.

16 def addStandardTrfArgs(parser):
17  parser.add_argument('--CA', action=argActionFactory(trfArgClasses.argSubstepBool, runarg=False), nargs='*',
18  help='Use ComponentAccumulator base configuration')
19  parser.add_argument('--verbose', '--debug', action='store_true', help='Set transform loglevel to DEBUG')
20  parser.add_argument('--loglevel', choices=list(stdLogLevels), help='Set transform logging level')
21  parser.add_argument('--argJSON', '--argjson', metavar='FILE', help='File containing JSON serialised argument dictionary')
22  parser.add_argument('--dumpargs', action='store_true', help='Dump transform arguments and exit')
23  parser.add_argument('--showGraph', action='store_true', help='Show multi-step transform graph, then exit')
24  parser.add_argument('--showPath', action='store_true', help='Show execution path only, then exit')
25  parser.add_argument('--showSteps', action='store_true', help='Show list of executor steps only, then exit')
26  parser.add_argument('--dumpPickle', metavar='FILE', help='Interpret command line arguments and write them out as a pickle file')
27  parser.add_argument('--dumpJSON', metavar='FILE', help='Interpret command line arguments and write them out as a JSON file')
28  parser.add_argument('--reportName', type=argFactory(trfArgClasses.argString, runarg=False),
29  help='Base name for job reports (default name is "jobReport" for most reports, but "metadata" for classic prodsys XML)')
30  parser.add_argument('--reportType', type=argFactory(trfArgClasses.argList, runarg=False), nargs='+', metavar='TYPE',
31  help='Job reports to produce: valid values are "text", "json", "classic", "pilotPickle" and "gpickle"')
32  parser.add_argument('--execOnly', action='store_true',
33  help='Exec the first substep only, replacing the transform process (no job reports and the return code will be from the substep process)')
34  parser.add_argument('--env', type=argFactory(trfArgClasses.argSubstepList, runarg=False), metavar='substep:KEY=VALUE', nargs='+',
35  help='Explicitly set environment variables for an executor (default is all substeps).'
36  ' N.B. this setting is passed to the shell, so reference to shell variables is allowed, e.g.'
37  ' KEY=VALUE:$KEY')
38  parser.add_argument('--imf', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
39  help='Manually include/exclude the Intel IMF maths library '
40  '(otherwise this is disabled for base releases < 17.7, enabled otherwise)')
41  parser.add_argument('--tcmalloc', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
42  help='Switch preload of the tcmalloc library (disabled by default)')
43  parser.add_argument('--steering', type=argFactory(trfArgClasses.argSubstepSteering, runarg=False), nargs='+', metavar='substep:{in/out}{+-}DATA',
44  help='Steer the transform by manipulating the execution graph before the execution path is calculated. '
45  'Format is substep:{in,out}{+-}DATA,{in,out}{+-}DATA,... to modify the substep\'s input/output '
46  ' by adding/removing a data type. e.g. RAWtoALL:in-RDO,in+RDO_TRIG would remove RDO and add '
47  'RDO_TRIG to the list of valid input datatypes for the RAWtoALL substep. See current version of '
48  'trfArgClasses.argSubstepSteering.steeringAlises for supported aliases. '
49  'https://twiki.cern.ch/twiki/bin/view/AtlasComputing/TransformSteering')
50  addMetadataArguments(parser)
51 
52 

◆ addTeaArguments()

def python.trfArgs.addTeaArguments (   parser)

Tea for two and two for tea...

these arguments are used for testing

Definition at line 531 of file trfArgs.py.

531 def addTeaArguments(parser):
532  parser.defineArgGroup('Tea', 'Tea Making Options (for testing)')
533  parser.add_argument('--cupsOfTea', group='Tea', type=argFactory(trfArgClasses.argInt), help='Number of cups of tea requested (int)')
534  parser.add_argument('--teaType', group='Tea', type=argFactory(trfArgClasses.argString), help='Requested flavour of tea (string)')
535  parser.add_argument('--mugVolume', group='Tea', type=argFactory(trfArgClasses.argFloat), help='How large a cup to use (float)')
536  parser.add_argument('--drinkers', group='Tea', nargs='+', type=argFactory(trfArgClasses.argList), help='Who is drinking tea (list)')
537 

◆ addTriggerArguments()

def python.trfArgs.addTriggerArguments (   parser,
  addTrigFilter = True 
)

Add trigger related arguments.

Definition at line 515 of file trfArgs.py.

515 def addTriggerArguments(parser, addTrigFilter=True):
516  parser.defineArgGroup('Trigger', 'Trigger Related Options')
517  parser.add_argument('--triggerConfig',
518  type=argFactory(trfArgClasses.argSubstep, defaultSubstep="RDOtoRDOTrigger", separator='='),
519  metavar='substep=triggerConf',
520  help='Trigger configuration string (substep aware argument - default is to run trigger in RDOtoRDOTrigger step, '
521  'use syntax SUBSTEP=TRIGCONF if you want to run trigger somewhere else). '
522  'N.B. This argument uses EQUALS (=) to separate the substep name from the value.',
523  group='Trigger', nargs='+')
524  if addTrigFilter:
525  parser.add_argument('--trigFilterList',
526  type=argFactory(trfArgClasses.argList), nargs="+",
527  help='Trigger filter list (multiple values can be given separately or split on commas; only understood in RAWtoALL)',
528  group='Trigger')
529 

◆ addValgrindArguments()

def python.trfArgs.addValgrindArguments (   parser)

Add Valgrind options.

Definition at line 153 of file trfArgs.py.

153 def addValgrindArguments(parser):
154  parser.defineArgGroup('Valgrind', 'General Valgrind Options')
155  parser.add_argument(
156  '--valgrind',
157  group = 'Valgrind',
158  type = argFactory(
159  trfArgClasses.argBool,
160  runarg = False
161  ),
162  metavar = "substep:BOOL",
163  help = 'Enable Valgrind'
164  )
165  parser.add_argument(
166  '--valgrindDefaultOpts',
167  group = 'Valgrind',
168  type = argFactory(
169  trfArgClasses.argBool,
170  runarg = False
171  ),
172  metavar = "substep:BOOL",
173  help = 'Enable default Valgrind options'
174  )
175  parser.add_argument(
176  '--valgrindExtraOpts',
177  group = 'Valgrind',
178  type = argFactory(
179  trfArgClasses.argList,
180  splitter = ',',
181  runarg = False
182  ),
183  metavar = 'OPT1,OPT2,OPT3',
184  help = 'Extra options passed to Valgrind when running Athena. ' +
185  'Options starting with "-" must be given as ' +
186  '--valgrindExtraOpts=\'--opt1=foo,--opt2=bar,...\''
187  )
188 

◆ addValidationArguments()

def python.trfArgs.addValidationArguments (   parser)

Definition at line 502 of file trfArgs.py.

502 def addValidationArguments(parser):
503  parser.defineArgGroup('Validation', 'Standard job validation switches')
504  parser.add_argument('--ignoreFiles', '--ignoreFilters', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
505  help='Files containing error patterns to be ignored during logfile scans (will split on commas; use "None" to disable the standard "atlas_error_mask.db")', nargs='+')
506  parser.add_argument('--ignorePatterns', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
507  help='Regexp error patterns to be ignored during logfile scans (will be applied as a search against the whole logfile line)', nargs='+')
508  parser.add_argument('--ignoreErrors', type=argFactory(trfArgClasses.argBool, runarg=False), metavar="BOOL", group='Validation',
509  help='Ignore ERROR lines in logfiles (use with care this can mask serious problems; --ignorePatterns is prefered)')
510  parser.add_argument('--checkEventCount', type=trfArgClasses.argFactory(trfArgClasses.argSubstepBool, defaultSubstep = 'all', runarg=False),
511  help='Enable check of output events against input events (default: True)', group='Validation',
512  metavar="BOOL")
513 

◆ addVTuneArguments()

def python.trfArgs.addVTuneArguments (   parser)

Add VTune options.

Definition at line 190 of file trfArgs.py.

190 def addVTuneArguments(parser):
191  parser.defineArgGroup('VTune', 'General VTune Options')
192  parser.add_argument(
193  '--vtune',
194  group = 'VTune',
195  type = argFactory(
196  trfArgClasses.argBool,
197  runarg = False
198  ),
199  metavar = "substep:BOOL",
200  help = 'Enable VTune'
201  )
202  parser.add_argument(
203  '--vtuneDefaultOpts',
204  group = 'VTune',
205  type = argFactory(
206  trfArgClasses.argBool,
207  runarg = False
208  ),
209  metavar = "substep:BOOL",
210  help = 'Enable default VTune options'
211  )
212  parser.add_argument(
213  '--vtuneExtraOpts',
214  group = 'VTune',
215  type = argFactory(
216  trfArgClasses.argList,
217  splitter = ',',
218  runarg = False
219  ),
220  metavar = 'OPT1,OPT2,OPT3',
221  help = 'Extra options passed to VTune when running Athena. ' +
222  'Options starting with "-" must be given as ' +
223  '--vtuneExtraOpts=\'-opt1=foo,-opt2=bar,...\''
224  )
225 

◆ getExtraDPDList()

def python.trfArgs.getExtraDPDList (   NTUPOnly = False)

Definition at line 400 of file trfArgs.py.

400 def getExtraDPDList(NTUPOnly = False):
401  extraDPDs = []
402  extraDPDs.append(dpdType('NTUP_SCT', substeps=['r2e']))
403  extraDPDs.append(dpdType('NTUP_MUONCALIB', substeps=['r2e','r2a'], treeNames=['PatternNtupleMaker/Segments']))
404  extraDPDs.append(dpdType('NTUP_TRKVALID', substeps=['r2e']))
405  extraDPDs.append(dpdType('NTUP_FASTMON', substeps=['a2t','a2d','e2a']))
406  extraDPDs.append(dpdType('NTUP_LARNOISE', substeps=['e2d'], treeNames=['CollectionTree']))
407  extraDPDs.append(dpdType('NTUP_WZ', substeps=['e2d'], treeNames=['physics']))
408  extraDPDs.append(dpdType('NTUP_TRT', substeps=['e2d'], treeNames=['MyCollectionTree']))
409  extraDPDs.append(dpdType('NTUP_HECNOISE', substeps=['e2d'], treeNames=['HECNoise']))
410  extraDPDs.append(dpdType('NTUP_ENHBIAS', substeps=['e2d','e2a'], treeNames=['vertices']))
411  extraDPDs.append(dpdType('NTUP_TRUTH', substeps=['a2d'], treeNames=['truth']))
412  extraDPDs.append(dpdType('NTUP_SUSYTRUTH', substeps=['a2d'], treeNames=['truth']))
413  extraDPDs.append(dpdType('NTUP_HIGHMULT', substeps=['e2a'], treeNames=['MinBiasTree']))
414  extraDPDs.append(dpdType('NTUP_PROMPTPHOT', substeps=['e2d', 'a2d'], treeNames=["PAUReco","HggUserData"]))
415 
416  extraDPDs.append(dpdType('NTUP_MCPTP', substeps=['a2d'], help="Ntuple file for MCP Tag and Probe"))
417  extraDPDs.append(dpdType('NTUP_MCPScale', substeps=['a2d'], help="Ntuple file for MCP scale calibration"))
418 
419  extraDPDs.append(dpdType('NTUP_FastCaloSim', substeps=['e2d']))
420  extraDPDs.append(dpdType('NTUP_PILEUP', substeps=['a2da']))
421 
422  # Trigger NTUPs (for merging only!)
423  if NTUPOnly:
424  extraDPDs.append(dpdType('NTUP_TRIGCOST'))
425  extraDPDs.append(dpdType('NTUP_TRIGRATE', treeNames=['metadata']))
426  else:
427  extraDPDs.append(dpdType('DAOD_HSG2'))
428  extraDPDs.append(dpdType('DESDM_ZMUMU'))
429 
430  return extraDPDs
431 

Variable Documentation

◆ msg

python.trfArgs.msg

Definition at line 8 of file trfArgs.py.

python.trfArgs.addDetectorArguments
def addDetectorArguments(parser)
Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as re...
Definition: trfArgs.py:229
python.trfArgs.addTriggerArguments
def addTriggerArguments(parser, addTrigFilter=True)
Add trigger related arguments.
Definition: trfArgs.py:515
python.trfArgs.addExtraDPDTypes
def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs=False)
Add additional DPD arguments.
Definition: trfArgs.py:440
python.trfArgs.addPerfMonArguments
def addPerfMonArguments(parser)
Options for PerfMon.
Definition: trfArgs.py:144
vtune_athena.format
format
Definition: vtune_athena.py:14
python.trfArgs.addValidationArguments
def addValidationArguments(parser)
Definition: trfArgs.py:502
python.trfArgs.addMetadataArguments
def addMetadataArguments(parser)
Options for passing metadata into the transform.
Definition: trfArgs.py:248
python.trfArgs.getExtraDPDList
def getExtraDPDList(NTUPOnly=False)
Definition: trfArgs.py:400
python.trfArgs.addStandardTrfArgs
def addStandardTrfArgs(parser)
Add standard transform arguments to an argparse ArgumentParser.
Definition: trfArgs.py:16
python.trfArgs.addParallelJobProcessorArguments
def addParallelJobProcessorArguments(parser)
Definition: trfArgs.py:498
python.trfArgs.addAthenaArguments
def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True)
Options related to running athena in general TODO: Some way to mask certain options (perExec,...
Definition: trfArgs.py:59
python.trfArgs.addValgrindArguments
def addValgrindArguments(parser)
Add Valgrind options.
Definition: trfArgs.py:153
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
python.trfArgs.addFileValidationArguments
def addFileValidationArguments(parser)
Definition: trfArgs.py:482
python.trfArgs.addTeaArguments
def addTeaArguments(parser)
Tea for two and two for tea...
Definition: trfArgs.py:531
python.trfArgs.addVTuneArguments
def addVTuneArguments(parser)
Add VTune options.
Definition: trfArgs.py:190
python.trfArgs.addPrimaryDPDArguments
def addPrimaryDPDArguments(parser, pick=None, transform=None, multipleOK=False)
Add primary DPD arguments.
Definition: trfArgs.py:268