ATLAS Offline Software
Loading...
Searching...
No Matches
python.trfArgs Namespace Reference

Classes

class  dpdType
 Simple class to store information about extra DPD filetypes. More...

Functions

 addStandardTrfArgs (parser)
 Add standard transform arguments to an argparse ArgumentParser.
 addAthenaArguments (parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True)
 Options related to running athena in general TODO: Some way to mask certain options (perExec, e.g.)
 addPerfMonArguments (parser)
 Options for PerfMon.
 addValgrindArguments (parser)
 Add Valgrind options.
 addVTuneArguments (parser)
 Add VTune options.
 addDetectorArguments (parser)
 Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as reconstruction)
 addMetadataArguments (parser)
 Options for passing metadata into the transform.
 addPrimaryDPDArguments (parser, pick=None, transform=None, multipleOK=False)
 Add primary DPD arguments.
 getExtraDPDList (NTUPOnly=False)
 addExtraDPDTypes (parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs=False)
 Add additional DPD arguments.
 addFileValidationArguments (parser)
 addParallelJobProcessorArguments (parser)
 addValidationArguments (parser)
 addTriggerArguments (parser, addTrigFilter=True)
 Add trigger related arguments.
 addTeaArguments (parser)
 Tea for two and two for tea... these arguments are used for testing.

Variables

 msg = logging.getLogger(__name__)

Function Documentation

◆ addAthenaArguments()

python.trfArgs.addAthenaArguments ( parser,
maxEventsDefaultSubstep = 'first',
addValgrind = True,
addPerfMon = True,
addVTune = True )

Options related to running athena in general TODO: Some way to mask certain options (perExec, e.g.)

Add standard athena options

Parameters
parsertrfArgParser object
maxEventsDefaultSubstepSpecial option which can change the default substep for maxEvents (needed by some special transforms).

Definition at line 59 of file trfArgs.py.

59def addAthenaArguments(parser, maxEventsDefaultSubstep='first', addValgrind=True, addPerfMon=True, addVTune=True):
60 parser.defineArgGroup('Athena', 'General Athena Options')
61 parser.add_argument('--athenaopts', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=' ', runarg=False), nargs="+", metavar='substep:ATHENAOPTS',
62 help='Extra options to pass to athena. Opts will split on spaces. '
63 'Multiple substep options can be given with --athenaopts=\'sutbstep1:--opt1 --opt2[=foo] ...\' \'substep2:--opt3\''
64 'Without substep specified, options will be used for all substeps.')
65 parser.add_argument('--command', '-c', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='COMMAND',
66 help='Run %(metavar)s before all else')
67 parser.add_argument('--athena', group = 'Athena', type=argFactory(trfArgClasses.argString, runarg=False), metavar='ATHENA',
68 help='Use %(metavar)s as the athena executable')
69 parser.add_argument('--preExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+', action='extend',
70 metavar='substep:PREEXEC',
71 help='Python code to execute before main job options are included (can be optionally limited to a single substep)')
72 parser.add_argument('--preInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
73 metavar='substep:PREINCLUDE',
74 help='Python configuration fragment to include before main job options (can be optionally limited to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
75 parser.add_argument('--postExec', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList), nargs='+', action='extend',
76 metavar='substep:POSTEXEC',
77 help='Python code to execute after main job options are included (can be optionally limited to a single substep)')
78 parser.add_argument('--postInclude', group = 'Athena', type=argFactory(trfArgClasses.argSubstepList, splitter=','), nargs='+',
79 metavar='substep:POSTINCLUDE',
80 help='Python configuration fragment to include after main job options (can be optionally limited '
81 'to a single substep). Will split on commas: frag1.py,frag2.py is understood.')
82 parser.add_argument('--splitConfig', group = 'Athena', type=argFactory(trfArgClasses.argSubstepString),
83 metavar='substep:SPLITCONFIG',
84 help='Configuration file to internally split job into multiple parts (can be optionally limited to a single substep)')
85 parser.add_argument('--maxEvents', group='Athena', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep=maxEventsDefaultSubstep),
86 nargs='+', metavar='substep:maxEvents',
87 help='Set maximum events for each processing step (default substep is "{0}")'.format(maxEventsDefaultSubstep))
88 parser.add_argument('--skipEvents', group='Athena', nargs='+', type=argFactory(trfArgClasses.argSubstepInt, defaultSubstep='first'),
89 help='Number of events to skip over in the first processing step (skipping substep can be overridden)')
90 parser.add_argument('--asetup', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:ASETUP',
91 help='asetup command string to be run before this substep is executed')
92 parser.add_argument('--runInContainer', group='Athena', type=argFactory(trfArgClasses.argSubstep, runarg=False), nargs='+', metavar='substep:CONTAINER_OS',
93 help='Set the substep to run in a container with the specified OS. Requires the --athena flag')
94 parser.add_argument('--eventAcceptanceEfficiency', type=trfArgClasses.argFactory(trfArgClasses.argSubstepFloat, min=0.0, max=1.0, runarg=False),
95 help='Allowed "efficiency" for processing events - used to ensure output file has enough events (default 1.0)')
96 parser.add_argument('--athenaMPMergeTargetSize', '--mts', type=trfArgClasses.argFactory(trfArgClasses.argKeyFloatValueList, runarg=False),
97 metavar='dataType:targetSizeInMegaBytes', nargs='+', group='Athena',
98 help='Set the target merge size for an AthenaMP output file type (give size in MB). '
99 'Note that the special value 0 means do not merge this output file; negative values mean '
100 'always merge to a single file. Globbing is supported, e.g. "DESD_*:500" is understood. '
101 'Special datatype "ALL" can be used as a default for all datatypes not explicitly '
102 'given their own value or glob matched.')
103 parser.add_argument('--athenaMPStrategy', type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=False),
104 nargs='+', metavar='substep:Strategy', group='Athena',
105 help='Set the AthenaMP scheduling strategy for a particular substep. Default is unset.')
106 parser.add_argument('--athenaMPUseEventOrders', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
107 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
108 help='Change AthenaMP setup to read event numbers from event orders files')
109 parser.add_argument('--athenaMPEventsBeforeFork', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False),
110 metavar='N', group='Athena',
111 help='Set AthenaMP to fork after processing N events (default is to fork immediately after '
112 'initialisation')
113 parser.add_argument('--sharedWriter', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
114 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
115 help='SharedWriter mode active')
116 parser.add_argument('--parallelCompression',
117 type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
118 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
119 help='Delegate event data compression to the workers while using SharedWriter')
120 parser.add_argument('--eventService', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
121 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
122 help='Switch AthenaMP to the Event Service configuration')
123 parser.add_argument('--multithreaded', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
124 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
125 help='Multithreaded mode active')
126 parser.add_argument("--mpi", type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True), nargs="?",
127 const=trfArgClasses.argBool("True"), help="MPI mode active",)
128 parser.add_argument('--multiprocess', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
129 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
130 help='Multiprocess mode active')
131 parser.add_argument('--deleteIntermediateOutputfiles', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=False),
132 metavar='BOOL', group='Athena', nargs='?', const=trfArgClasses.argBool('True'),
133 help='Remove intermediate input/output files of multi step TRF')
134
135 if addPerfMon:
136 addPerfMonArguments(parser)
137
138 if addValgrind:
139 addValgrindArguments(parser)
140
141 if addVTune:
142 addVTuneArguments(parser)
143

◆ addDetectorArguments()

python.trfArgs.addDetectorArguments ( parser)

Options related to the setup of the ATLAS detector (used in simulation and digitisation as well as reconstruction)

Parameters
parsertrfArgParser object

Definition at line 231 of file trfArgs.py.

231def addDetectorArguments(parser):
232 parser.defineArgGroup('Detector', 'General detector configuration options, for simulation and reconstruction')
233 parser.add_argument('--DBRelease', group = 'Detector', type=argFactory(trfArgClasses.argSubstep, runarg=False), metavar='substep:DBRelease', nargs='+',
234 help='Use DBRelease instead of ORACLE. Give either a DBRelease tarball file (e.g., DBRelease-21.7.1.tar.gz) or cvmfs DBRelease directory (e.g., 21.7.1 or current')
235 parser.add_argument('--conditionsTag', group='Detector', type=argFactory(trfArgClasses.argSubstepConditions), metavar='substep:CondTag', nargs='+',
236 help='Conditions tag to set')
237 parser.add_argument('--geometryVersion', group='Detector', type=argFactory(trfArgClasses.argSubstep), metavar='substep:GeoVersion', nargs='+',
238 help='ATLAS geometry version tag')
239 parser.add_argument('--geometrySQLite', group='Detector', type=argFactory(trfArgClasses.argBool),
240 help='Switch to SQLite Geometry DB')
241 parser.add_argument('--geometrySQLiteFullPath', group='Detector', type=argFactory(trfArgClasses.argString),
242 help='Manual setting of SQLite Geometry DB path. For testing purposes only')
243 parser.add_argument('--beamType', group='Detector', type=argFactory(trfArgClasses.argString),
244 help='Manual beam type setting')
245 parser.add_argument('--runNumber', '--RunNumber', group='Detector', type=argFactory(trfArgClasses.argInt),
246 help='Manual run number setting')
247

◆ addExtraDPDTypes()

python.trfArgs.addExtraDPDTypes ( parser,
pick = None,
transform = None,
multipleOK = False,
NTUPMergerArgs = False )

Add additional DPD arguments.

Manually add DPDs that, for some reason, are not in any of the automated lists parsed by the companion functions above.

Parameters
parserArgument parser object to add arguments to
pickOptional list of DPD types to add (use short names, e.g., NTUP_EGAMMA)
transformTransform object. DPD data types will be added to the correct executor (by name or substep)
multipleOKIf the multipleOK flag should be set for this argument
NTUPMergerArgsIf True, add NTUP arguments as input/output types, suitable for NTUPMerge_tf

Definition at line 442 of file trfArgs.py.

442def addExtraDPDTypes(parser, pick=None, transform=None, multipleOK=False, NTUPMergerArgs = False):
443 parser.defineArgGroup('Additional DPDs', 'Extra DPD file types')
444
445 extraDPDs = getExtraDPDList(NTUPOnly=NTUPMergerArgs)
446
447 if NTUPMergerArgs:
448 for dpd in extraDPDs:
449 if pick is None or dpd.name in pick:
450 if dpd.name.startswith('NTUP'):
451 parser.add_argument('--input' + dpd.name + 'File',
452 type=argFactory(dpd.argclass, multipleOK=True, io='input', type=dpd.type, treeNames=dpd.treeNames),
453 group = 'Additional DPDs', metavar=dpd.name.upper(), nargs='+',
454 help=dpd.help if dpd.help else 'DPD input {0} file'.format(dpd.name))
455 parser.add_argument('--output' + dpd.name + '_MRGFile',
456 type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
457 group = 'Additional DPDs', metavar=dpd.name.upper(),
458 help=dpd.help if dpd.help else 'DPD output merged {0} file'.format(dpd.name))
459
460 pass
461 else:
462 for dpd in extraDPDs:
463 if pick is None or dpd.name in pick:
464 msg.debug('Adding DPD {0} ({1}, {2}, {3}, {4})'.format(dpd.name, dpd.type, dpd.substeps, dpd.treeNames, dpd.argclass))
465 # NTUPs are a bit special as they can take a treeName to count events
466 if issubclass(dpd.argclass, trfArgClasses.argNTUPFile):
467 parser.add_argument('--output' + dpd.name + 'File',
468 type=argFactory(dpd.argclass, name=dpd.name.upper(), multipleOK=multipleOK, type=dpd.type, treeNames=dpd.treeNames),
469 group = 'Additional DPDs', metavar=dpd.name.upper(),
470 help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
471 else:
472 parser.add_argument('--output' + dpd.name + 'File',
473 type=argFactory(dpd.argclass, multipleOK=multipleOK, type=dpd.type),
474 group = 'Additional DPDs', metavar=dpd.name.upper(),
475 help=dpd.help if dpd.help else 'DPD output {0} file'.format(dpd.name))
476 if transform:
477 for executor in transform.executors:
478 if hasattr(executor, 'substep') and executor.substep in dpd.substeps:
479 executor.outDataUpdate([dpd.name])
480 if executor.name in dpd.substeps:
481 executor.outDataUpdate([dpd.name])
482
483

◆ addFileValidationArguments()

python.trfArgs.addFileValidationArguments ( parser)

Definition at line 484 of file trfArgs.py.

484def addFileValidationArguments(parser):
485 parser.defineArgGroup('File Validation', 'Standard file validation switches')
486 parser.add_argument('--fileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
487 group='File Validation', help='If FALSE skip both input and output file validation (default TRUE; warning - do not use this option in production jobs!)')
488 parser.add_argument('--inputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
489 group='File Validation', help='If FALSE skip input file validation (default TRUE; warning - do not use this option in production jobs!)')
490 parser.add_argument('--outputFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
491 group='File Validation', help='If FALSE skip output file validation (default TRUE; warning - do not use this option in production jobs!)')
492
493 parser.add_argument('--parallelFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
494 nargs='?', const=trfArgClasses.argBool('True'),
495 group='File Validation', help='Parallelise file validation if True')
496 parser.add_argument('--multithreadedFileValidation', type = argFactory(trfArgClasses.argBool), metavar='BOOL',
497 nargs='?', const=trfArgClasses.argBool('True'),
498 group='File Validation', help='Use multithreaded ROOT file validation if True')
499

◆ addMetadataArguments()

python.trfArgs.addMetadataArguments ( parser)

Options for passing metadata into the transform.

Parameters
parsertrfArgParser object

Definition at line 250 of file trfArgs.py.

250def addMetadataArguments(parser):
251 parser.defineArgGroup('Metadata', 'Metadata arguments that will be passed into the transform')
252 parser.add_argument('--AMIConfig', '--AMI', type=argFactory(trfArgClasses.argString), help='Configure transform with AMI tag parameters', group="Metadata")
253 parser.add_argument('--AMITag', type=argFactory(trfArgClasses.argString), metavar='TAG', group="Metadata",
254 help='AMI tag from which this job was defined - this option simply writes the '
255 'relevant AMI tag value into the output metadata, it does not configure the job (use --AMIConfig for that)')
256 parser.add_argument('--taskid', type=argFactory(trfArgClasses.argString, runarg=False), help="Task identification number", group="Metadata")
257 parser.add_argument('--jobid', type=argFactory(trfArgClasses.argString, runarg=False), help="Job identification number", group="Metadata")
258 parser.add_argument('--attempt', type=argFactory(trfArgClasses.argString, runarg=False), help="Job attempt number", group="Metadata")
259
260

◆ addParallelJobProcessorArguments()

python.trfArgs.addParallelJobProcessorArguments ( parser)

Definition at line 500 of file trfArgs.py.

500def addParallelJobProcessorArguments(parser):
501 parser.defineArgGroup('pool', 'Parallel Job Processor arguments')
502 parser.add_argument('----parallelProcessPool', group='pool', type=argFactory(trfArgClasses.argInt, runarg=False), help='Number of processes in pool requested (int)')
503

◆ addPerfMonArguments()

python.trfArgs.addPerfMonArguments ( parser)

Options for PerfMon.

Parameters
parsertrfArgParser object

Definition at line 146 of file trfArgs.py.

146def addPerfMonArguments(parser):
147 parser.defineArgGroup('PerfMon', 'General PerfMon Options')
148 parser.add_argument('--perfmon',
149 default=trfArgClasses.argString('fastmonmt'),
150 type=argFactory(trfArgClasses.argString),
151 help='Enable PerfMon (fastmonmt [default], fullmonmt, or none)',
152 group='PerfMon')
153

◆ addPrimaryDPDArguments()

python.trfArgs.addPrimaryDPDArguments ( parser,
pick = None,
transform = None,
multipleOK = False )

Add primary DPD arguments.

Get the list of current primary DPDs and add then to the parser optionally only some DPDs may be added, using the pick list. This function uses the silent decorator to supress useless messages from ROOT

Parameters
parserArgument parser object to add arguments to
pickOptional list of DPD types to add (use short names, e.g., DESDM_MUON)
transformTransform object. DPD data types will be added to the correct executor (by name or substep)
multipleOKIf the multipleOK flag should be set for this argument @silent

Definition at line 270 of file trfArgs.py.

270def addPrimaryDPDArguments(parser, pick = None, transform = None, multipleOK=False):
271 parser.defineArgGroup('Primary DPDs', 'Primary DPD File Options')
272 # list* really gives just a list of DPD names
273 try:
274 # TODO: do we actually need all of those?
275 listRAWtoDPD = ['StreamDAOD_PIXELVALID', 'StreamDRAW_RPVLL', 'StreamDRAW_ZMUMU', 'StreamDRAW_DIMU', 'StreamDRAW_EGZ', 'StreamDRAW_TAULH', 'StreamDRAW_JET', 'StreamDRAW_EGJPSI', 'StreamDRAW_TAUMUH',
276 'StreamDRAW_EMU', 'StreamDRAW_BCID1', 'StreamDRAW_BCID2', 'StreamDRAW_BCID3', 'StreamDRAW_BCID4', 'StreamDRAW_TOPSLMU', 'StreamDAOD_IDNCB', 'StreamDAOD_SCTVALID']
277 listESDtoDPD = ['StreamDESDM_ALLCELLS', 'StreamDESDM_EOVERP', 'StreamDESDM_IDALIGN', 'StreamDESDM_EGAMMA', 'StreamDESDM_MCP', 'StreamDESDM_TILEMU', 'StreamDESDM_PHOJET', 'StreamDESDM_SGLEL', 'StreamDESDM_SLTTMU', 'StreamDESDM_CALJET',
278 'StreamDESDM_EXOTHIP', 'StreamDAOD_IDTRKVALID', 'StreamDAOD_IDTIDE', 'StreamDAOD_IDTRKLUMI', 'StreamDAOD_IDPIXLUMI', 'StreamDAOD_L1CALO1', 'StreamDAOD_L1CALO2', 'StreamDAOD_L1CALO3', 'StreamDAOD_L1CALO4', 'StreamDAOD_L1CALO5', 'StreamDESD_DEDX']
279 listAODtoDPD = []
280 matchedOutputList = [(['r2a'], listRAWtoDPD + listESDtoDPD), (['a2d'], listAODtoDPD)]
281 for substep, dpdList in matchedOutputList:
282 for dpdName in [ dpd.replace('Stream', '') for dpd in dpdList ]:
283 msg.debug('Handling {0}'.format(dpdName))
284 if pick is None or dpdName in pick:
285 # Need to decide which file type we actually have here
286 dpdType = dpdName.split('_')[0]
287 if 'RAW' in dpdType:
288 parser.add_argument('--output' + dpdName + 'File',
289 type=argFactory(trfArgClasses.argBSFile, multipleOK=multipleOK, type='RAW'),
290 group = 'Primary DPDs', metavar=dpdName.upper(),
291 help='DPD {0} output {1} file'.format(dpdType, dpdName))
292 elif 'AOD' in dpdType:
293 parser.add_argument('--output' + dpdName + 'File',
294 type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='AOD'),
295 group = 'Primary DPDs', metavar=dpdName.upper(),
296 help='DPD {0} output {1} file'.format(dpdType, dpdName))
297 elif 'ESD' in dpdType:
298 parser.add_argument('--output' + dpdName + 'File',
299 type=argFactory(trfArgClasses.argPOOLFile, multipleOK=multipleOK, type='ESD'),
300 group = 'Primary DPDs', metavar=dpdName.upper(),
301 help='DPD {0} output {1} file'.format(dpdType, dpdName))
302 else:
303 msg.warning('Unrecognised primary DPD type: {0}'.format(dpdName))
304 # Automatically add DPD as output data arguments of their relevant executors
305 if transform:
306 for executor in transform.executors:
307 if hasattr(executor, 'substep') and executor.substep in substep:
308 executor.outDataUpdate([dpdName])
309 if executor.name in substep:
310 executor.outDataUpdate([dpdName])
311
312 except ImportError:
313 msg.warning('PrimaryDPDFlags not available - cannot add primary DPD arguments')
314
315

◆ addStandardTrfArgs()

python.trfArgs.addStandardTrfArgs ( parser)

Add standard transform arguments to an argparse ArgumentParser.

Definition at line 16 of file trfArgs.py.

16def addStandardTrfArgs(parser):
17 parser.add_argument('--CA', action=argActionFactory(trfArgClasses.argSubstepBool, runarg=False), nargs='*',
18 help='Use ComponentAccumulator base configuration')
19 parser.add_argument('--verbose', '--debug', action='store_true', help='Set transform loglevel to DEBUG')
20 parser.add_argument('--loglevel', choices=list(stdLogLevels), help='Set transform logging level')
21 parser.add_argument('--argJSON', '--argjson', metavar='FILE', help='File containing JSON serialised argument dictionary')
22 parser.add_argument('--dumpargs', action='store_true', help='Dump transform arguments and exit')
23 parser.add_argument('--showGraph', action='store_true', help='Show multi-step transform graph, then exit')
24 parser.add_argument('--showPath', action='store_true', help='Show execution path only, then exit')
25 parser.add_argument('--showSteps', action='store_true', help='Show list of executor steps only, then exit')
26 parser.add_argument('--dumpPickle', metavar='FILE', help='Interpret command line arguments and write them out as a pickle file')
27 parser.add_argument('--dumpJSON', metavar='FILE', help='Interpret command line arguments and write them out as a JSON file')
28 parser.add_argument('--reportName', type=argFactory(trfArgClasses.argString, runarg=False),
29 help='Base name for job reports (default name is "jobReport" for most reports, but "metadata" for classic prodsys XML)')
30 parser.add_argument('--reportType', type=argFactory(trfArgClasses.argList, runarg=False), nargs='+', metavar='TYPE',
31 help='Job reports to produce: valid values are "text", "json", "classic", "pilotPickle" and "gpickle"')
32 parser.add_argument('--execOnly', action='store_true',
33 help='Exec the first substep only, replacing the transform process (no job reports and the return code will be from the substep process)')
34 parser.add_argument('--env', type=argFactory(trfArgClasses.argSubstepList, runarg=False), metavar='substep:KEY=VALUE', nargs='+',
35 help='Explicitly set environment variables for an executor (default is all substeps).'
36 ' N.B. this setting is passed to the shell, so reference to shell variables is allowed, e.g.'
37 ' KEY=VALUE:$KEY')
38 parser.add_argument('--imf', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
39 help='Manually include/exclude the Intel IMF maths library '
40 '(otherwise this is disabled for base releases < 17.7, enabled otherwise)')
41 parser.add_argument('--tcmalloc', type=argFactory(trfArgClasses.argSubstepBool, runarg=False), metavar="substep:BOOL", nargs='+',
42 help='Switch preload of the tcmalloc library (disabled by default)')
43 parser.add_argument('--steering', type=argFactory(trfArgClasses.argSubstepSteering, runarg=False), nargs='+', metavar='substep:{in/out}{+-}DATA',
44 help='Steer the transform by manipulating the execution graph before the execution path is calculated. '
45 'Format is substep:{in,out}{+-}DATA,{in,out}{+-}DATA,... to modify the substep\'s input/output '
46 ' by adding/removing a data type. e.g. RAWtoALL:in-RDO,in+RDO_TRIG would remove RDO and add '
47 'RDO_TRIG to the list of valid input datatypes for the RAWtoALL substep. See current version of '
48 'trfArgClasses.argSubstepSteering.steeringAlises for supported aliases. '
49 'https://twiki.cern.ch/twiki/bin/view/AtlasComputing/TransformSteering')
50 addMetadataArguments(parser)
51
52

◆ addTeaArguments()

python.trfArgs.addTeaArguments ( parser)

Tea for two and two for tea... these arguments are used for testing.

Definition at line 533 of file trfArgs.py.

533def addTeaArguments(parser):
534 parser.defineArgGroup('Tea', 'Tea Making Options (for testing)')
535 parser.add_argument('--cupsOfTea', group='Tea', type=argFactory(trfArgClasses.argInt), help='Number of cups of tea requested (int)')
536 parser.add_argument('--teaType', group='Tea', type=argFactory(trfArgClasses.argString), help='Requested flavour of tea (string)')
537 parser.add_argument('--mugVolume', group='Tea', type=argFactory(trfArgClasses.argFloat), help='How large a cup to use (float)')
538 parser.add_argument('--drinkers', group='Tea', nargs='+', type=argFactory(trfArgClasses.argList), help='Who is drinking tea (list)')
539

◆ addTriggerArguments()

python.trfArgs.addTriggerArguments ( parser,
addTrigFilter = True )

Add trigger related arguments.

Definition at line 517 of file trfArgs.py.

517def addTriggerArguments(parser, addTrigFilter=True):
518 parser.defineArgGroup('Trigger', 'Trigger Related Options')
519 parser.add_argument('--triggerConfig',
520 type=argFactory(trfArgClasses.argSubstep, defaultSubstep="RDOtoRDOTrigger", separator='='),
521 metavar='substep=triggerConf',
522 help='Trigger configuration string (substep aware argument - default is to run trigger in RDOtoRDOTrigger step, '
523 'use syntax SUBSTEP=TRIGCONF if you want to run trigger somewhere else). '
524 'N.B. This argument uses EQUALS (=) to separate the substep name from the value.',
525 group='Trigger', nargs='+')
526 if addTrigFilter:
527 parser.add_argument('--trigFilterList',
528 type=argFactory(trfArgClasses.argList), nargs="+",
529 help='Trigger filter list (multiple values can be given separately or split on commas; only understood in RAWtoALL)',
530 group='Trigger')
531

◆ addValgrindArguments()

python.trfArgs.addValgrindArguments ( parser)

Add Valgrind options.

Definition at line 155 of file trfArgs.py.

155def addValgrindArguments(parser):
156 parser.defineArgGroup('Valgrind', 'General Valgrind Options')
157 parser.add_argument(
158 '--valgrind',
159 group = 'Valgrind',
160 type = argFactory(
161 trfArgClasses.argBool,
162 runarg = False
163 ),
164 metavar = "substep:BOOL",
165 help = 'Enable Valgrind'
166 )
167 parser.add_argument(
168 '--valgrindDefaultOpts',
169 group = 'Valgrind',
170 type = argFactory(
171 trfArgClasses.argBool,
172 runarg = False
173 ),
174 metavar = "substep:BOOL",
175 help = 'Enable default Valgrind options'
176 )
177 parser.add_argument(
178 '--valgrindExtraOpts',
179 group = 'Valgrind',
180 type = argFactory(
181 trfArgClasses.argList,
182 splitter = ',',
183 runarg = False
184 ),
185 metavar = 'OPT1,OPT2,OPT3',
186 help = 'Extra options passed to Valgrind when running Athena. ' +
187 'Options starting with "-" must be given as ' +
188 '--valgrindExtraOpts=\'--opt1=foo,--opt2=bar,...\''
189 )
190

◆ addValidationArguments()

python.trfArgs.addValidationArguments ( parser)

Definition at line 504 of file trfArgs.py.

504def addValidationArguments(parser):
505 parser.defineArgGroup('Validation', 'Standard job validation switches')
506 parser.add_argument('--ignoreFiles', '--ignoreFilters', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
507 help='Files containing error patterns to be ignored during logfile scans (will split on commas; use "None" to disable the standard "atlas_error_mask.db")', nargs='+')
508 parser.add_argument('--ignorePatterns', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False),
509 help='Regexp error patterns to be ignored during logfile scans (will be applied as a search against the whole logfile line)', nargs='+')
510 parser.add_argument('--ignoreErrors', type=argFactory(trfArgClasses.argBool, runarg=False), metavar="BOOL", group='Validation',
511 help='Ignore ERROR lines in logfiles (use with care this can mask serious problems; --ignorePatterns is prefered)')
512 parser.add_argument('--checkEventCount', type=trfArgClasses.argFactory(trfArgClasses.argSubstepBool, defaultSubstep = 'all', runarg=False),
513 help='Enable check of output events against input events (default: True)', group='Validation',
514 metavar="BOOL")
515

◆ addVTuneArguments()

python.trfArgs.addVTuneArguments ( parser)

Add VTune options.

Definition at line 192 of file trfArgs.py.

192def addVTuneArguments(parser):
193 parser.defineArgGroup('VTune', 'General VTune Options')
194 parser.add_argument(
195 '--vtune',
196 group = 'VTune',
197 type = argFactory(
198 trfArgClasses.argBool,
199 runarg = False
200 ),
201 metavar = "substep:BOOL",
202 help = 'Enable VTune'
203 )
204 parser.add_argument(
205 '--vtuneDefaultOpts',
206 group = 'VTune',
207 type = argFactory(
208 trfArgClasses.argBool,
209 runarg = False
210 ),
211 metavar = "substep:BOOL",
212 help = 'Enable default VTune options'
213 )
214 parser.add_argument(
215 '--vtuneExtraOpts',
216 group = 'VTune',
217 type = argFactory(
218 trfArgClasses.argList,
219 splitter = ',',
220 runarg = False
221 ),
222 metavar = 'OPT1,OPT2,OPT3',
223 help = 'Extra options passed to VTune when running Athena. ' +
224 'Options starting with "-" must be given as ' +
225 '--vtuneExtraOpts=\'-opt1=foo,-opt2=bar,...\''
226 )
227

◆ getExtraDPDList()

python.trfArgs.getExtraDPDList ( NTUPOnly = False)

Definition at line 402 of file trfArgs.py.

402def getExtraDPDList(NTUPOnly = False):
403 extraDPDs = []
404 extraDPDs.append(dpdType('NTUP_SCT', substeps=['r2e']))
405 extraDPDs.append(dpdType('NTUP_MUONCALIB', substeps=['r2e','r2a'], treeNames=['PatternNtupleMaker/Segments']))
406 extraDPDs.append(dpdType('NTUP_TRKVALID', substeps=['r2e']))
407 extraDPDs.append(dpdType('NTUP_FASTMON', substeps=['a2t','a2d','e2a']))
408 extraDPDs.append(dpdType('NTUP_LARNOISE', substeps=['e2d'], treeNames=['CollectionTree']))
409 extraDPDs.append(dpdType('NTUP_WZ', substeps=['e2d'], treeNames=['physics']))
410 extraDPDs.append(dpdType('NTUP_TRT', substeps=['e2d'], treeNames=['MyCollectionTree']))
411 extraDPDs.append(dpdType('NTUP_HECNOISE', substeps=['e2d'], treeNames=['HECNoise']))
412 extraDPDs.append(dpdType('NTUP_ENHBIAS', substeps=['e2d','e2a'], treeNames=['vertices']))
413 extraDPDs.append(dpdType('NTUP_TRUTH', substeps=['a2d'], treeNames=['truth']))
414 extraDPDs.append(dpdType('NTUP_SUSYTRUTH', substeps=['a2d'], treeNames=['truth']))
415 extraDPDs.append(dpdType('NTUP_HIGHMULT', substeps=['e2a'], treeNames=['MinBiasTree']))
416 extraDPDs.append(dpdType('NTUP_PROMPTPHOT', substeps=['e2d', 'a2d'], treeNames=["PAUReco","HggUserData"]))
417
418 extraDPDs.append(dpdType('NTUP_MCPTP', substeps=['a2d'], help="Ntuple file for MCP Tag and Probe"))
419 extraDPDs.append(dpdType('NTUP_MCPScale', substeps=['a2d'], help="Ntuple file for MCP scale calibration"))
420
421 extraDPDs.append(dpdType('NTUP_FastCaloSim', substeps=['e2d']))
422 extraDPDs.append(dpdType('NTUP_PILEUP', substeps=['a2da']))
423
424 # Trigger NTUPs (for merging only!)
425 if NTUPOnly:
426 extraDPDs.append(dpdType('NTUP_TRIGCOST'))
427 extraDPDs.append(dpdType('NTUP_TRIGRATE', treeNames=['metadata']))
428 else:
429 extraDPDs.append(dpdType('DAOD_HSG2'))
430 extraDPDs.append(dpdType('DESDM_ZMUMU'))
431
432 return extraDPDs
433

Variable Documentation

◆ msg

python.trfArgs.msg = logging.getLogger(__name__)

Definition at line 8 of file trfArgs.py.