ATLAS Offline Software
Loading...
Searching...
No Matches
athenaEF Namespace Reference

Classes

class  ConfigRunner
class  MyHelp
class  RunParams

Functions

 get_trigconf_keys_from_oks (partition=None, webdaq_base=None, strict=False)
 get_run_params (args=None, from_is=False, partition=None, webdaq_base=None, strict=False, solenoid_current_override=None, toroids_current_override=None)
 load_from_json (json_file, run_params=None, num_threads=1, num_slots=1, ef_files=None)
 load_from_database (db_server, smk, l1psk=None, hltpsk=None, run_params=None, num_threads=1, num_slots=1, ef_files=None)
str arg_sor_time (s)
 The following arg_* methods are used as custom types in argparse.
 arg_detector_mask (s)
 arg_log_level (s)
 check_args (parser, args)
 update_run_params (args, flags)
 update_trigconf_keys (args, flags)
 main ()

Variables

 log = logging.getLogger('athenaEF')

Detailed Description

date"

# defaults
export USETCMALLOC=1
export USEIMF=1

# parse command line arguments
for a in ${@}
do
    case "$a" in
        --stdcmalloc)    USETCMALLOC=0;;
        --tcmalloc)      USETCMALLOC=1;;
        --stdcmath)      USEIMF=0;;
        --imf)           USEIMF=1;;
        --preloadlib*)   export ATHENA_ADD_PRELOAD=${a#*=};;
        --no-ers-signal-handlers)  export TDAQ_ERS_NO_SIGNAL_HANDLERS=1;;
    esac
done

# Do the actual preloading via LD_PRELOAD
source `which athena_preload.sh `

# Now resurrect ourselves as python script
python_path=`which python`
"exec" "$python_path" "-tt" "$0" "$@";

Function Documentation

◆ arg_detector_mask()

athenaEF.arg_detector_mask ( s)
Convert detector mask to format expected by eformat

Definition at line 805 of file athenaEF.py.

805def arg_detector_mask(s):
806 """Convert detector mask to format expected by eformat"""
807 if s=='all':
808 return RunParams.DEFAULT_DETECTOR_MASK
809 dmask = hex(int(s,16)) # Normalize input to hex-string
810 dmask = dmask.lower().replace('0x', '').replace('l', '') # remove markers
811 return '0' * (32 - len(dmask)) + dmask # (pad with 0s)
812
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition hcg.cxx:310

◆ arg_log_level()

athenaEF.arg_log_level ( s)
Argument handler for log levels

Definition at line 813 of file athenaEF.py.

813def arg_log_level(s):
814 """Argument handler for log levels"""
815 lvls = s.split(',')
816 if len(lvls)==1: lvls.append('ERROR')
817 return lvls
818
819

◆ arg_sor_time()

str athenaEF.arg_sor_time ( s)

The following arg_* methods are used as custom types in argparse.

Convert possible SOR time arguments to an OWLTime compatible string

Definition at line 798 of file athenaEF.py.

798def arg_sor_time(s) -> str:
799 """Convert possible SOR time arguments to an OWLTime compatible string"""
800 fmt = '%d/%m/%y %H:%M:%S.%f'
801 if s=='now': return dt.now().strftime(fmt)
802 elif s.isdigit(): return dt.fromtimestamp(float(s)/1e9).strftime(fmt)
803 else: return s
804

◆ check_args()

athenaEF.check_args ( parser,
args )
Consistency check of command line arguments (same as athenaHLT.py)

Definition at line 820 of file athenaEF.py.

820def check_args(parser, args):
821 """Consistency check of command line arguments (same as athenaHLT.py)"""
822
823 if not args.jobOptions and not args.use_database:
824 parser.error("No job options file specified")
825
826 if not args.file and not args.dump_config_exit:
827 parser.error("--file is required unless using --dump-config-exit")
828
829 if args.use_crest and not args.use_database:
830 parser.error("--use-crest requires --use-database")
831
832

◆ get_run_params()

athenaEF.get_run_params ( args = None,
from_is = False,
partition = None,
webdaq_base = None,
strict = False,
solenoid_current_override = None,
toroids_current_override = None )
Get run parameters from the appropriate source.

This is the main entry point for obtaining run parameters. It provides
a single place to modify when adding new sources (like WEBDAQ).

Args:
   args: argparse Namespace with command-line arguments (optional)
   from_is: If True, try to read from WEBDAQ first
   partition: Partition name for IS access (defaults to TDAQ_PARTITION env var)
   webdaq_base: WEBDAQ base URL (defaults to TDAQ_WEBDAQ_BASE env var)
   strict: If True, raise an exception if IS read fails (for --online-environment)
   solenoid_current_override: Command-line override for solenoid current
   toroids_current_override: Command-line override for toroids current

Returns:
   RunParams instance

Raises:
   RuntimeError: If strict=True and IS read fails

Definition at line 487 of file athenaEF.py.

488 solenoid_current_override=None, toroids_current_override=None):
489 """
490 Get run parameters from the appropriate source.
491
492 This is the main entry point for obtaining run parameters. It provides
493 a single place to modify when adding new sources (like WEBDAQ).
494
495 Args:
496 args: argparse Namespace with command-line arguments (optional)
497 from_is: If True, try to read from WEBDAQ first
498 partition: Partition name for IS access (defaults to TDAQ_PARTITION env var)
499 webdaq_base: WEBDAQ base URL (defaults to TDAQ_WEBDAQ_BASE env var)
500 strict: If True, raise an exception if IS read fails (for --online-environment)
501 solenoid_current_override: Command-line override for solenoid current
502 toroids_current_override: Command-line override for toroids current
503
504 Returns:
505 RunParams instance
506
507 Raises:
508 RuntimeError: If strict=True and IS read fails
509 """
510 if from_is:
511 return RunParams.from_is(partition=partition, webdaq_base=webdaq_base, strict=strict,
512 solenoid_current_override=solenoid_current_override,
513 toroids_current_override=toroids_current_override)
514 elif args is not None:
515 return RunParams.from_args(args)
516 else:
517 return RunParams()
518
519

◆ get_trigconf_keys_from_oks()

athenaEF.get_trigconf_keys_from_oks ( partition = None,
webdaq_base = None,
strict = False )
Read trigger configuration keys (SMK, L1PSK, HLTPSK) and DB info from OKS via WEBDAQ REST API.

This reads the keys from the partition's TriggerConfiguration object and its
related L1TriggerConfiguration and TriggerDBConnection objects.

OKS Structure:
- Partition -> TriggerConfiguration -> L1TriggerConfiguration (Lvl1PrescaleKey)
- Partition -> TriggerConfiguration -> TriggerDBConnection (SuperMasterKey)
- Partition -> TriggerConfiguration -> HLTImplementationDB (hltPrescaleKey)

Args:
   partition: The partition name (default: from TDAQ_PARTITION env var)
   webdaq_base: Base URL for webis_server (default: from TDAQ_WEBDAQ_BASE env var)
   strict: If True, raise an exception if OKS read fails (for --online-environment)

Returns:
   dict with keys: SMK, L1PSK, HLTPSK, db_alias (values may be None if not found)

Raises:
   RuntimeError: If strict=True and OKS read fails

Definition at line 324 of file athenaEF.py.

324def get_trigconf_keys_from_oks(partition=None, webdaq_base=None, strict=False):
325 """
326 Read trigger configuration keys (SMK, L1PSK, HLTPSK) and DB info from OKS via WEBDAQ REST API.
327
328 This reads the keys from the partition's TriggerConfiguration object and its
329 related L1TriggerConfiguration and TriggerDBConnection objects.
330
331 OKS Structure:
332 - Partition -> TriggerConfiguration -> L1TriggerConfiguration (Lvl1PrescaleKey)
333 - Partition -> TriggerConfiguration -> TriggerDBConnection (SuperMasterKey)
334 - Partition -> TriggerConfiguration -> HLTImplementationDB (hltPrescaleKey)
335
336 Args:
337 partition: The partition name (default: from TDAQ_PARTITION env var)
338 webdaq_base: Base URL for webis_server (default: from TDAQ_WEBDAQ_BASE env var)
339 strict: If True, raise an exception if OKS read fails (for --online-environment)
340
341 Returns:
342 dict with keys: SMK, L1PSK, HLTPSK, db_alias (values may be None if not found)
343
344 Raises:
345 RuntimeError: If strict=True and OKS read fails
346 """
347 import requests
348
349 # Determine the base URL
350 if webdaq_base is None:
351 webdaq_base = os.environ.get('TDAQ_WEBDAQ_BASE')
352
353 if not webdaq_base:
354 msg = "TDAQ_WEBDAQ_BASE not set, cannot read from OKS"
355 if strict:
356 raise RuntimeError(msg + " (required for --online-environment)")
357 log.warning(msg)
358 return {'SMK': None, 'L1PSK': None, 'HLTPSK': None, 'db_alias': None}
359
360 # Determine partition
361 if partition is None:
362 partition = os.environ.get('TDAQ_PARTITION', 'ATLAS')
363
364 log.info("Reading trigger configuration keys from OKS via WEBDAQ: %s (partition=%s)",
365 webdaq_base, partition)
366
367 result = {'SMK': None, 'L1PSK': None, 'HLTPSK': None, 'db_alias': None}
368
369 def extract_oks_data(response_json):
370 """
371 Extract data from OKS compact format: [name, type, attributes, relationships]
372 Returns tuple (attributes_dict, relationships_dict)
373 """
374 if isinstance(response_json, list) and len(response_json) >= 4:
375 return response_json[2], response_json[3] # attributes, relationships
376 elif isinstance(response_json, list) and len(response_json) >= 3:
377 return response_json[2], {} # attributes only
378 return response_json, {} # fallback
379
380 def get_ref_id(ref):
381 """Extract object ID from a relationship reference."""
382 if isinstance(ref, list) and len(ref) >= 2:
383 return ref[0] # [id, class] format
384 elif isinstance(ref, dict) and 'id' in ref:
385 return ref['id']
386 elif isinstance(ref, str):
387 return ref
388 return None
389
390 # OKS API: GET /info/current/{partition}/oks/{class}/{name}?format=compact
391 # Response format: [name, type, attributes, relationships]
392 # - attributes: dict of simple values (strings, ints, etc.)
393 # - relationships: dict of references to other objects
394 try:
395 url = f"{webdaq_base}/info/current/{partition}/oks/Partition/{partition}?format=compact"
396 log.debug("Fetching Partition from OKS: %s", url)
397
398 response = requests.get(url, timeout=10)
399 if response.status_code == 200:
400 part_attrs, part_rels = extract_oks_data(response.json())
401 log.debug("Partition attributes: %s", part_attrs)
402 log.debug("Partition relationships: %s", part_rels)
403
404 # Get TriggerConfiguration reference from relationships
405 trig_conf_id = None
406 if 'TriggerConfiguration' in part_rels:
407 trig_conf_id = get_ref_id(part_rels['TriggerConfiguration'])
408
409 if trig_conf_id:
410 log.debug("TriggerConfiguration ID: %s", trig_conf_id)
411
412 # Get TriggerConfiguration object
413 url = f"{webdaq_base}/info/current/{partition}/oks/TriggerConfiguration/{trig_conf_id}?format=compact"
414 response = requests.get(url, timeout=10)
415 if response.status_code == 200:
416 trig_attrs, trig_rels = extract_oks_data(response.json())
417 log.debug("TriggerConfiguration attributes: %s", trig_attrs)
418 log.debug("TriggerConfiguration relationships: %s", trig_rels)
419
420 # Get L1TriggerConfiguration for L1PSK (relationship 'l1')
421 if 'l1' in trig_rels:
422 l1_id = get_ref_id(trig_rels['l1'])
423 if l1_id:
424 url = f"{webdaq_base}/info/current/{partition}/oks/L1TriggerConfiguration/{l1_id}?format=compact"
425 resp = requests.get(url, timeout=10)
426 if resp.status_code == 200:
427 l1_attrs, _ = extract_oks_data(resp.json())
428 log.debug("L1TriggerConfiguration attributes: %s", l1_attrs)
429 if 'Lvl1PrescaleKey' in l1_attrs:
430 result['L1PSK'] = int(l1_attrs['Lvl1PrescaleKey'])
431 log.info("Got L1PSK=%d from OKS", result['L1PSK'])
432
433 # Get TriggerDBConnection for SMK and db_alias (relationship 'TriggerDBConnection')
434 if 'TriggerDBConnection' in trig_rels:
435 db_id = get_ref_id(trig_rels['TriggerDBConnection'])
436 if db_id:
437 url = f"{webdaq_base}/info/current/{partition}/oks/TriggerDBConnection/{db_id}?format=compact"
438 resp = requests.get(url, timeout=10)
439 if resp.status_code == 200:
440 db_attrs, _ = extract_oks_data(resp.json())
441 log.debug("TriggerDBConnection attributes: %s", db_attrs)
442 if 'SuperMasterKey' in db_attrs:
443 result['SMK'] = int(db_attrs['SuperMasterKey'])
444 log.info("Got SMK=%d from OKS", result['SMK'])
445 if 'Alias' in db_attrs:
446 result['db_alias'] = db_attrs['Alias']
447 log.info("Got db_alias=%s from OKS", result['db_alias'])
448
449 # Get HLTImplementationDB for HLTPSK (relationship 'hlt')
450 if 'hlt' in trig_rels:
451 hlt_id = get_ref_id(trig_rels['hlt'])
452 if hlt_id:
453 url = f"{webdaq_base}/info/current/{partition}/oks/HLTImplementationDB/{hlt_id}?format=compact"
454 resp = requests.get(url, timeout=10)
455 if resp.status_code == 200:
456 hlt_attrs, _ = extract_oks_data(resp.json())
457 log.debug("HLTImplementationDB attributes: %s", hlt_attrs)
458 if 'hltPrescaleKey' in hlt_attrs:
459 result['HLTPSK'] = int(hlt_attrs['hltPrescaleKey'])
460 log.info("Got HLTPSK=%d from OKS", result['HLTPSK'])
461 else:
462 msg = f"Failed to fetch Partition from OKS: HTTP {response.status_code}"
463 if strict:
464 raise RuntimeError(msg + " (required for --online-environment)")
465 log.warning(msg)
466
467 except requests.exceptions.RequestException as e:
468 msg = f"Error fetching trigger keys from OKS: {e}"
469 if strict:
470 raise RuntimeError(msg + " (required for --online-environment)")
471 log.warning(msg)
472 except (ValueError, KeyError, TypeError) as e:
473 msg = f"Error parsing trigger keys from OKS: {e}"
474 if strict:
475 raise RuntimeError(msg + " (required for --online-environment)")
476 log.warning(msg)
477
478 # In strict mode, verify we got the required keys from OKS
479 if strict:
480 missing = [k for k in ['SMK', 'L1PSK', 'HLTPSK'] if result.get(k) is None]
481 if missing:
482 raise RuntimeError(f"Failed to get {', '.join(missing)} from OKS (required for --online-environment)")
483
484 return result
485
486

◆ load_from_database()

athenaEF.load_from_database ( db_server,
smk,
l1psk = None,
hltpsk = None,
run_params = None,
num_threads = 1,
num_slots = 1,
ef_files = None )
Load configuration from trigger database using the Super Master Key (SMK).

Returns a ConfigRunner that uses TrigConf::JobOptionsSvc with TYPE="DB"
to load configuration directly from the database, same as athenaHLT.

Definition at line 780 of file athenaEF.py.

781 num_threads=1, num_slots=1, ef_files=None):
782 """
783 Load configuration from trigger database using the Super Master Key (SMK).
784
785 Returns a ConfigRunner that uses TrigConf::JobOptionsSvc with TYPE="DB"
786 to load configuration directly from the database, same as athenaHLT.
787 """
788 log.info("Loading job options from database %s with SMK %d", db_server, smk)
789 return ConfigRunner.from_database(db_server, smk, l1psk, hltpsk, run_params,
790 num_threads=num_threads,
791 num_slots=num_slots,
792 ef_files=ef_files)
793
794

◆ load_from_json()

athenaEF.load_from_json ( json_file,
run_params = None,
num_threads = 1,
num_slots = 1,
ef_files = None )
Load configuration from a Gaudi joboptions JSON file.

Returns a ConfigRunner with a run() method that executes the configuration
using TrigConf::JobOptionsSvc with TYPE="FILE".

Definition at line 760 of file athenaEF.py.

760def load_from_json(json_file, run_params=None, num_threads=1, num_slots=1, ef_files=None):
761 """
762 Load configuration from a Gaudi joboptions JSON file.
763
764 Returns a ConfigRunner with a run() method that executes the configuration
765 using TrigConf::JobOptionsSvc with TYPE="FILE".
766 """
767 with open(json_file, 'r') as f:
768 jocat = json.load(f)
769
770 if jocat.get('filetype') != 'joboptions':
771 raise ValueError(f"Invalid JSON file type: {jocat.get('filetype')}, expected 'joboptions'")
772
773 properties = jocat.get('properties', {})
774 return ConfigRunner.from_json(json_file, run_params, properties,
775 num_threads=num_threads,
776 num_slots=num_slots,
777 ef_files=ef_files)
778
779

◆ main()

athenaEF.main ( )

Definition at line 1008 of file athenaEF.py.

1008def main():
1009 parser = argparse.ArgumentParser(prog='athenaEF.py', formatter_class=
1010 lambda prog : argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=32, width=100),
1011 usage = '%(prog)s [OPTION]... -f FILE jobOptions',
1012 add_help=False)
1013 parser.expert_groups = [] # Keep list of expert option groups
1014
1015
1016 g = parser.add_argument_group('Options')
1017 g.add_argument('jobOptions', nargs='?', help='job options: CA module (package.module:function), pickle file (.pkl), or JSON file (.json)')
1018 g.add_argument('--threads', metavar='N', type=int, default=1, help='number of threads')
1019 g.add_argument('--concurrent-events', metavar='N', type=int, help='number of concurrent events if different from --threads')
1020 g.add_argument('--log-level', '-l', metavar='LVL', type=arg_log_level, default='INFO,ERROR', help='OutputLevel of athena,POOL')
1021 g.add_argument('--precommand', '-c', metavar='CMD', action='append', default=[],
1022 help='Python commands executed before job options')
1023 g.add_argument('--postcommand', '-C', metavar='CMD', action='append', default=[],
1024 help='Python commands executed after job options')
1025 g.add_argument('--interactive', '-i', action='store_true', help='interactive mode')
1026 g.add_argument('--help', '-h', nargs='?', choices=['all'], action=MyHelp, help='show help')
1027
1028 g = parser.add_argument_group('Input/Output')
1029 g.add_argument('--file', '--filesInput', '-f', action='append', help='input RAW file')
1030 g.add_argument('--save-output', '-o', metavar='FILE', help='output file name')
1031 g.add_argument('--number-of-events', '--evtMax', '-n', metavar='N', type=int, default=-1, help='processes N events (default: -1, means all)')
1032 g.add_argument('--skip-events', '--skipEvents', '-k', metavar='N', type=int, default=0, help='skip N first events')
1033 g.add_argument('--loop-files', action='store_true', help='loop over input files if no more events')
1034 g.add_argument('--efdf-interface-library', metavar='LIB', default='TrigDFEmulator',
1035 help='name of the EFDF interface shared library to load')
1036
1037
1038 g = parser.add_argument_group('Performance and debugging')
1039 g.add_argument('--perfmon', action='store_true', help='enable PerfMon')
1040 g.add_argument('--tcmalloc', action='store_true', default=True, help='use tcmalloc')
1041 g.add_argument('--stdcmalloc', action='store_true', help='use stdcmalloc')
1042 g.add_argument('--stdcmath', action='store_true', help='use stdcmath library')
1043 g.add_argument('--imf', action='store_true', default=True, help='use Intel math library')
1044 g.add_argument('--show-includes', '-s', action='store_true', help='show printout of included files')
1045
1046
1047 g = parser.add_argument_group('Conditions')
1048 g.add_argument('--run-number', '-R', metavar='RUN', type=int,
1049 help='run number (if None, read from first event)')
1050 g.add_argument('--lb-number', '-L', metavar='LBN', type=int,
1051 help='lumiblock number (if None, read from first event)')
1052 g.add_argument('--conditions-run', metavar='RUN', type=int, default=None,
1053 help='reference run number for conditions lookup (use when IS run number has no COOL data)')
1054 g.add_argument('--sor-time', type=arg_sor_time,
1055 help='The Start Of Run time. Three formats are accepted: '
1056 '1) the string "now", for current time; '
1057 '2) the number of nanoseconds since epoch (e.g. 1386355338658000000 or int(time.time() * 1e9)); '
1058 '3) human-readable "20/11/18 17:40:42.3043". If not specified the sor-time is read from the conditions DB')
1059 g.add_argument('--detector-mask', metavar='MASK', type=arg_detector_mask,
1060 help='detector mask (if None, read from the conditions DB), use string "all" to enable all detectors')
1061
1062
1063 g = parser.add_argument_group('Database')
1064 g.add_argument('--use-database', '-b', action='store_true',
1065 help='configure from trigger database using SMK')
1066 g.add_argument('--db-server', metavar='DB', default='TRIGGERDB_RUN3', help='DB server name (alias)')
1067 g.add_argument('--smk', type=int, default=None, help='Super Master Key')
1068 g.add_argument('--l1psk', type=int, default=None, help='L1 prescale key')
1069 g.add_argument('--hltpsk', type=int, default=None, help='HLT prescale key')
1070 g.add_argument('--use-crest', action='store_true', default=False,
1071 help='Use CREST for trigger configuration')
1072 g.add_argument('--crest-server', metavar='URL', default=None,
1073 help='CREST server URL (defaults to flags.Trigger.crestServer)')
1074 g.add_argument('--dump-config', action='store_true', help='Dump joboptions JSON file')
1075 g.add_argument('--dump-config-exit', action='store_true', help='Dump joboptions JSON file and exit')
1076
1077
1078 g = parser.add_argument_group('Magnets')
1079 g.add_argument('--solenoid-current', type=float, default=None,
1080 help='Solenoid current in Amperes (default: nominal current for offline running, required from IS online)')
1081 g.add_argument('--toroids-current', type=float, default=None,
1082 help='Toroids current in Amperes (default: nominal current for offline running, required from IS online)')
1083
1084
1085 g = parser.add_argument_group('Online')
1086 g.add_argument('--online-environment', action='store_true',
1087 help='Enable online environment: read run parameters from IS and trigger '
1088 'configuration keys (SMK, L1PSK, HLTPSK) from OKS via WEBDAQ REST API')
1089 g.add_argument('--partition', metavar='NAME', default=None,
1090 help='TDAQ partition name (defaults to TDAQ_PARTITION environment variable)')
1091 g.add_argument('--webdaq-base', metavar='URL', default=None,
1092 help='WEBDAQ base URL (defaults to TDAQ_WEBDAQ_BASE environment variable)')
1093
1094
1095 g = parser.add_argument_group('Online Histogramming')
1096 g.add_argument('--oh-monitoring', '-M', action='store_true', default=False,
1097 help='enable online histogram publishing via WebdaqHistSvc')
1098
1099
1100 g = parser.add_argument_group('Expert')
1101 parser.expert_groups.append(g)
1102 (args, unparsed_args) = parser.parse_known_args()
1103 check_args(parser, args)
1104
1105 # set ROOT to batch mode (ATR-21890)
1106 from ROOT import gROOT
1107 gROOT.SetBatch()
1108
1109 # set default OutputLevels and file inclusion
1110 import AthenaCommon.Logging
1111 AthenaCommon.Logging.log.setLevel(getattr(logging, args.log_level[0]))
1112 AthenaCommon.Logging.log.setFormat("%(asctime)s Py:%(name)-31s %(levelname)7s %(message)s")
1113 if args.show_includes:
1114 from AthenaCommon.Include import include
1115 include.setShowIncludes( True )
1116
1117 # consistency checks for arguments
1118 if not args.concurrent_events:
1119 args.concurrent_events = args.threads
1120
1121 # Update args and set athena flags
1122 from TrigPSC import PscConfig
1123 from TrigPSC.PscDefaultFlags import defaultOnlineFlags
1124
1125 # Get flags with online defaults (same as athenaHLT)
1126 flags = defaultOnlineFlags()
1127
1128 # Enable WebdaqHistSvc for online histogram publishing if requested
1129 if args.oh_monitoring:
1130 flags.Trigger.Online.useOnlineWebdaqHistSvc = True
1131 log.info("Enabled WebdaqHistSvc for online histogram publishing")
1132
1133 # CREST configuration (same as athenaHLT)
1134 log.info("Using CREST for trigger configuration: %s", args.use_crest)
1135 if args.use_crest:
1136 flags.Trigger.useCrest = True
1137 if args.crest_server:
1138 flags.Trigger.crestServer = args.crest_server
1139 else:
1140 args.crest_server = flags.Trigger.crestServer
1141
1142 update_run_params(args, flags)
1143
1144 if args.use_database:
1145 # If HLTPSK was given on the command line OR from OKS (--online-environment),
1146 # we ignore what is stored in COOL and use the specified key directly from the DB.
1147 # This is needed because COOL may point to a different HLTPSK for the forced run number.
1148 PscConfig.forcePSK = (args.hltpsk is not None) or args.online_environment
1149 # Read trigger config keys from COOL/OKS if not specified
1150 update_trigconf_keys(args, flags)
1151
1152 # Fill flags from command line (if not running from DB/JSON)
1153 if not args.use_database and args.jobOptions and not args.jobOptions.endswith('.json'):
1154 PscConfig.unparsedArguments = unparsed_args
1155 for flag_arg in unparsed_args:
1156 flags.fillFromString(flag_arg)
1157
1158 PscConfig.interactive = args.interactive
1159 PscConfig.exitAfterDump = args.dump_config_exit
1160
1161 # NOTE: Do NOT set flags.Input.Files here!
1162 # Like athenaHLT, we keep Input.Files=[] during configuration to ensure the
1163 # configuration is portable and doesn't depend on specific input file metadata.
1164 # Input files are passed to EFInterface for runtime use only.
1165
1166 # Set conditions run number override (for test partitions with fake run numbers)
1167 if args.conditions_run is not None:
1168 log.info("Using conditions from reference run %d (overriding run %s for IOV lookup)",
1169 args.conditions_run, args.run_number)
1170 flags.Input.ConditionsRunNumber = args.conditions_run
1171
1172 # Set number of events
1173 if args.number_of_events > 0:
1174 flags.Exec.MaxEvents = args.number_of_events
1175
1176 # Set skip events
1177 if args.skip_events > 0:
1178 flags.Exec.SkipEvents = args.skip_events
1179
1180 # NOTE: Do NOT set flags.Concurrency.NumThreads or NumConcurrentEvents here.
1181 # Threading is set at runtime via iProperty after configure() - see ConfigRunner.run()
1182
1183 # Enable PerfMon if requested
1184 flags.PerfMon.doFastMonMT = args.perfmon
1185
1186 # Configure EF ByteStream services (mandatory to run without HLTMPPU)
1187 # This provides the data flow interface that would normally come from HLTMPPU
1188 flags.Trigger.Online.useEFByteStreamSvc = True
1189 ef = flags.Trigger.Online.EFInterface
1190 ef_files = args.file if args.file else []
1191 ef.Files = ef_files
1192 ef.LoopFiles = args.loop_files
1193 ef.NumEvents = args.number_of_events
1194 ef.SkipEvents = args.skip_events
1195 ef.RunNumber = args.run_number
1196 ef.T0ProjectTag = args.T0_project_tag
1197 ef.BeamType = args.beam_type
1198 ef.BeamEnergy = args.beam_energy
1199 ef.TriggerType = args.trigger_type
1200 ef.Stream = args.stream
1201 ef.Lumiblock = args.lumiblock
1202 ef.DetMask = args.file_detector_mask
1203 ef.LibraryName = args.efdf_interface_library
1204
1205 # Execute precommands
1206 if args.precommand:
1207 log.info("Executing precommand(s)")
1208 for cmd in args.precommand:
1209 log.info(" %s", cmd)
1210 exec(cmd, globals(), {'flags': flags})
1211
1212 # Determine input type
1213 is_database = args.use_database
1214 is_pickle = False
1215 is_json = False
1216
1217 if not is_database and args.jobOptions:
1218 jobOptions = args.jobOptions
1219 is_pickle = jobOptions.endswith('.pkl')
1220 is_json = jobOptions.endswith('.json')
1221
1222 if is_database:
1223 # Load configuration from trigger database
1224 # Handle CREST vs standard DB access (same as athenaHLT)
1225 if args.use_crest:
1226 crestconn = TriggerCrestUtil.getCrestConnection(args.db_server)
1227 db_alias = f"{args.crest_server}/{crestconn}"
1228 log.info("Loading configuration via CREST from %s with SMK %d", db_alias, args.smk)
1229 else:
1230 db_alias = args.db_server
1231 log.info("Loading configuration from database %s with SMK %d", db_alias, args.smk)
1232
1233 # Get run parameters for prepareForStart
1234 run_params = get_run_params(args).to_dict()
1235 acc = load_from_database(db_alias, args.smk, args.l1psk, args.hltpsk, run_params,
1236 num_threads=args.threads, num_slots=args.concurrent_events,
1237 ef_files=ef_files)
1238 log.info("Configuration loaded from database")
1239
1240 elif is_pickle:
1241 # Load ComponentAccumulator from pickle file
1242 log.info("Loading configuration from pickle file: %s", jobOptions)
1243 with open(jobOptions, 'rb') as f:
1244 acc = pickle.load(f)
1245 log.info("Configuration loaded from pickle")
1246
1247 elif is_json:
1248 # Load configuration from JSON file
1249 log.info("Loading configuration from JSON file: %s", jobOptions)
1250 # Get run parameters for prepareForStart
1251 run_params = get_run_params(args).to_dict()
1252 acc = load_from_json(jobOptions, run_params,
1253 num_threads=args.threads, num_slots=args.concurrent_events,
1254 ef_files=ef_files)
1255 log.info("Configuration loaded from JSON")
1256
1257 else:
1258 # Load from CA module - follow the same pattern as athenaHLT/TrigPSCPythonCASetup:
1259 # 1. Build the full configuration with services
1260 # 2. Dump to JSON file
1261 # 3. Use TrigConf::JobOptionsSvc to load from JSON
1262 # This preserves the ability to use the same JobOptionsSvc as athenaHLT
1263 log.info("Loading CA configuration from: %s", jobOptions)
1264
1265 # Clone and lock flags for services configuration (as done in TrigPSCPythonCASetup)
1266 from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
1267 from AthenaConfiguration.MainServicesConfig import addMainSequences
1268 from TrigServices.TriggerUnixStandardSetup import commonServicesCfg
1269 from AthenaConfiguration.ComponentFactory import CompFactory
1270
1271 locked_flags = flags.clone()
1272 locked_flags.lock()
1273
1274 # Create base CA with framework services (like TrigPSCPythonCASetup)
1275 cfg = ComponentAccumulator(CompFactory.AthSequencer("AthMasterSeq", Sequential=True))
1276 cfg.setAppProperty('ExtSvcCreates', False)
1277 cfg.setAppProperty("MessageSvcType", "TrigMessageSvc")
1278 cfg.setAppProperty("JobOptionsSvcType", "TrigConf::JobOptionsSvc")
1279
1280 # Add main sequences and common services (includes TrigServicesCfg)
1281 addMainSequences(locked_flags, cfg)
1282 cfg.merge(commonServicesCfg(locked_flags))
1283
1284 # Now merge user CA config (with unlocked flags, as in TrigPSCPythonCASetup)
1285 cfg_func = AthHLT.getCACfg(jobOptions)
1286 cfg.merge(cfg_func(flags))
1287
1288 # Execute postcommands before dumping (like TrigPSCPythonCASetup)
1289 if args.postcommand:
1290 log.info("Executing postcommand(s)")
1291 for cmd in args.postcommand:
1292 log.info(" %s", cmd)
1293 exec(cmd, globals(), {'flags': flags, 'cfg': cfg})
1294 args.postcommand = [] # Clear so we don't run them again later
1295
1296 # Dump configuration to JSON (like TrigPSCPythonCASetup)
1297 fname = "HLTJobOptions_EF"
1298 log.info("Dumping configuration to %s.pkl and %s.json", fname, fname)
1299 with open(f"{fname}.pkl", "wb") as f:
1300 cfg.store(f)
1301
1302 from TrigConfIO.JsonUtils import create_joboptions_json
1303 create_joboptions_json(f"{fname}.pkl", f"{fname}.json")
1304
1305 # Check for dump-and-exit
1306 if args.dump_config_exit:
1307 log.info("Configuration dumped to %s.json. Exiting...", fname)
1308 sys.exit(0)
1309
1310 # Now load the JSON using JsonConfigRunner with TrigConf::JobOptionsSvc
1311 log.info("Loading configuration from %s.json via TrigConf::JobOptionsSvc", fname)
1312 # Get run parameters for prepareForStart
1313 run_params = get_run_params(args).to_dict()
1314 acc = load_from_json(f"{fname}.json", run_params,
1315 num_threads=args.threads, num_slots=args.concurrent_events,
1316 ef_files=ef_files)
1317
1318 log.info("Configuration loaded with HLT online services")
1319
1320 # Execute postcommands
1321 if args.postcommand:
1322 log.info("Executing postcommand(s)")
1323 for cmd in args.postcommand:
1324 log.info(" %s", cmd)
1325 exec(cmd, globals(), {'flags': flags, 'acc': acc})
1326
1327 # Dump configuration if requested
1328 if args.dump_config or args.dump_config_exit:
1329 fname = "HLTJobOptions_EF"
1330
1331 if is_database:
1332 # For DB mode, fetch properties via Python API
1333 from TrigConfIO.HLTTriggerConfigAccess import HLTJobOptionsAccess
1334 log.info("Fetching configuration from database for dump...")
1335 jo_access = HLTJobOptionsAccess(dbalias=acc.db_server, smkey=acc.smk)
1336 props = jo_access.algorithms()
1337
1338 log.info("Dumping configuration to %s.json", fname)
1339 hlt_json = {'filetype': 'joboptions', 'properties': props}
1340 with open(f"{fname}.json", "w") as f:
1341 json.dump(hlt_json, f, indent=4, sort_keys=True, ensure_ascii=True)
1342
1343 elif is_json:
1344 # For JSON mode, properties were already loaded
1345 props = acc.properties
1346 if props:
1347 log.info("Dumping configuration to %s.json", fname)
1348 hlt_json = {'filetype': 'joboptions', 'properties': props}
1349 with open(f"{fname}.json", "w") as f:
1350 json.dump(hlt_json, f, indent=4, sort_keys=True, ensure_ascii=True)
1351 else:
1352 log.warning("No properties available to dump")
1353
1354 elif is_pickle:
1355 # For pickle-loaded ComponentAccumulator, gather properties
1356 app_props, msg_props, comp_props = acc.gatherProps()
1357 props = {"ApplicationMgr": app_props, "MessageSvc": msg_props}
1358 for comp, name, value in comp_props:
1359 props.setdefault(comp, {})[name] = value
1360
1361 log.info("Dumping configuration to %s.json", fname)
1362 hlt_json = {'filetype': 'joboptions', 'properties': props}
1363 with open(f"{fname}.json", "w") as f:
1364 json.dump(hlt_json, f, indent=4, sort_keys=True, ensure_ascii=True)
1365
1366 # Note: For CA module, dumping is already handled earlier
1367 # before converting to ConfigRunner
1368
1369 if args.dump_config_exit:
1370 log.info("Configuration dumped. Exiting...")
1371 sys.exit(0)
1372
1373 # Run the application directly (like athena.py does)
1374 log.info("Starting Athena execution...")
1375
1376 # Create worker directory structure that HLT services expect
1377 # (normally created by HLTMPPU/PSC). Worker ID 1 means single-worker, non-forked mode
1378 # and must match what we pass to hltUpdateAfterFork(worker_id=1) in ConfigRunner.run()
1379 worker_dir = os.path.join(os.getcwd(), "athenaHLT_workers", "athenaHLT-01")
1380 if not os.path.exists(worker_dir):
1381 log.info("Creating worker directory: %s", worker_dir)
1382 os.makedirs(worker_dir, exist_ok=True)
1383
1384 if args.interactive:
1385 log.info("Interactive mode - call acc.run() to execute")
1386 import code
1387 code.interact(local={'acc': acc, 'flags': flags})
1388 else:
1389 # Run the application
1390 from AthenaCommon import ExitCodes
1391 exitcode = 0
1392 try:
1393 # Pass maxEvents if explicitly set (including -1 for all events)
1394 sc = acc.run(args.number_of_events)
1395 if sc.isFailure():
1396 exitcode = ExitCodes.EXE_ALG_FAILURE
1397 except SystemExit as e:
1398 exitcode = ExitCodes.EXE_ALG_FAILURE if e.code == 1 else e.code
1399 except Exception:
1400 traceback.print_exc()
1401 exitcode = ExitCodes.UNKNOWN_EXCEPTION
1402
1403 log.info('Leaving with code %d: "%s"', exitcode, ExitCodes.what(exitcode))
1404 sys.exit(exitcode)
1405
1406
int main()
Definition hello.cxx:18

◆ update_run_params()

athenaEF.update_run_params ( args,
flags )
Update run parameters from IS, file, or conditions DB

Definition at line 833 of file athenaEF.py.

833def update_run_params(args, flags):
834 """Update run parameters from IS, file, or conditions DB"""
835
836 # If --online-environment is specified, try to read from Information Service first
837 if getattr(args, 'online_environment', False):
838 log.info("Reading run parameters from Information Service via WEBDAQ")
839 # Pass command-line magnet values as overrides (if provided)
840 # strict=True ensures we fail if IS read fails, rather than falling back to defaults
841 # But if user provided magnet values on command line, those take precedence over IS
842 solenoid_override = getattr(args, 'solenoid_current', None)
843 toroids_override = getattr(args, 'toroids_current', None)
844
845 run_params = get_run_params(from_is=True,
846 partition=getattr(args, 'partition', None),
847 webdaq_base=getattr(args, 'webdaq_base', None),
848 strict=True,
849 solenoid_current_override=solenoid_override,
850 toroids_current_override=toroids_override)
851 # Update args with values from IS (if not already set on command line)
852 if args.run_number is None and run_params.run_number is not None:
853 args.run_number = run_params.run_number
854 log.info("Using run_number=%d from IS", args.run_number)
855 if args.lb_number is None and run_params.lb_number is not None:
856 args.lb_number = run_params.lb_number
857 log.info("Using lb_number=%d from IS", args.lb_number)
858 if args.sor_time is None and run_params.sor_time is not None:
859 args.sor_time = run_params.sor_time
860 log.info("Using sor_time=%s from IS", args.sor_time)
861 if args.detector_mask is None and run_params.detector_mask is not None:
862 args.detector_mask = run_params.detector_mask
863 log.info("Using detector_mask=%s from IS", args.detector_mask)
864 # Update magnet currents from IS (run_params already has command-line overrides if provided)
865 args.solenoid_current = run_params.solenoid_current
866 args.toroids_current = run_params.toroids_current
867
868 if (args.run_number is not None and args.lb_number is None) or (args.run_number is None and args.lb_number is not None):
869 log.error("Both or neither of the options -R (--run-number) and -L (--lb-number) have to be specified")
870
871 # Read metadata from input file (like HLTMPPy/runner.py getRunParamsFromFile)
872 if args.file:
873 from eformat import EventStorage
874 dr = EventStorage.pickDataReader(args.file[0])
875 if args.run_number is None:
876 args.run_number = dr.runNumber()
877 args.lb_number = dr.lumiblockNumber()
878 args.T0_project_tag = dr.projectTag()
879 args.beam_type = dr.beamType()
880 args.beam_energy = dr.beamEnergy()
881 args.trigger_type = dr.triggerType()
882 args.stream = dr.stream()
883 args.lumiblock = dr.lumiblockNumber()
884 args.file_detector_mask = "{:032x}".format(dr.detectorMask())
885 else:
886 args.T0_project_tag = getattr(args, 'T0_project_tag', '')
887 args.beam_type = getattr(args, 'beam_type', 0)
888 args.beam_energy = getattr(args, 'beam_energy', 0)
889 args.trigger_type = getattr(args, 'trigger_type', 0)
890 args.stream = getattr(args, 'stream', '')
891 args.lumiblock = getattr(args, 'lumiblock', 0)
892 args.file_detector_mask = getattr(args, 'file_detector_mask', '00000000000000000000000000000000')
893
894 sor_params = None
895 if (args.sor_time is None or args.detector_mask is None) and args.run_number is not None:
896 sor_params = AthHLT.get_sor_params(args.run_number)
897 log.debug('SOR parameters: %s', sor_params)
898 if sor_params is None:
899 log.error("Run %d does not exist. If you want to use this run-number specify "
900 "remaining run parameters, e.g.: --sor-time=now --detector-mask=all", args.run_number)
901 sys.exit(1)
902
903 if args.sor_time is None and sor_params is not None:
904 args.sor_time = arg_sor_time(str(sor_params['SORTime']))
905
906 if args.detector_mask is None and sor_params is not None:
907 dmask = sor_params['DetectorMask']
908 if args.run_number < AthHLT.CondDB._run2:
909 dmask = hex(dmask)
910 args.detector_mask = arg_detector_mask(dmask)
911
912 # Apply defaults for magnet currents if not set (offline mode only)
913 # In online mode, magnets must come from IS or command line (handled above)
914 if getattr(args, 'solenoid_current', None) is None:
915 args.solenoid_current = RunParams.DEFAULT_SOLENOID_CURRENT
916 log.debug("Using default solenoid_current=%.1f", args.solenoid_current)
917 if getattr(args, 'toroids_current', None) is None:
918 args.toroids_current = RunParams.DEFAULT_TOROIDS_CURRENT
919 log.debug("Using default toroids_current=%.1f", args.toroids_current)
920
921

◆ update_trigconf_keys()

athenaEF.update_trigconf_keys ( args,
flags )
Update trigger configuration keys from OKS, COOL, or CREST.

Priority order:
1. Command-line arguments (always take precedence)
2. OKS via WEBDAQ (if --online-environment is set)
3. CREST (if --use-crest is set)
4. COOL (default)

Definition at line 922 of file athenaEF.py.

922def update_trigconf_keys(args, flags):
923 """Update trigger configuration keys from OKS, COOL, or CREST.
924
925 Priority order:
926 1. Command-line arguments (always take precedence)
927 2. OKS via WEBDAQ (if --online-environment is set)
928 3. CREST (if --use-crest is set)
929 4. COOL (default)
930 """
931
932 if args.smk is None or args.l1psk is None or args.hltpsk is None:
933 trigconf = None
934
935 # Try OKS first if --online-environment is set
936 if getattr(args, 'online_environment', False):
937 log.info("Reading trigger configuration keys from OKS (online environment)")
938 # strict=True ensures we fail if OKS read fails, rather than falling back to COOL
939 oks_keys = get_trigconf_keys_from_oks(
940 partition=getattr(args, 'partition', None),
941 webdaq_base=getattr(args, 'webdaq_base', None),
942 strict=True
943 )
944 log.info("Retrieved trigger keys from OKS: %s", oks_keys)
945
946 # With strict=True, we're guaranteed to have all keys or an exception was raised
947 trigconf = {
948 'SMK': oks_keys.get('SMK'),
949 'LVL1PSK': oks_keys.get('L1PSK'),
950 'HLTPSK': oks_keys.get('HLTPSK')
951 }
952 # Also update db_server if provided by OKS and not set on command line
953 if oks_keys.get('db_alias') and args.db_server == 'TRIGGERDB_RUN3':
954 args.db_server = oks_keys['db_alias']
955 log.info("Using db_server=%s from OKS", args.db_server)
956
957 # Fall back to CREST or COOL only if NOT in online-environment mode
958 if trigconf is None:
959 if args.use_crest:
960 crest_server = args.crest_server or flags.Trigger.crestServer
961 log.info("Reading trigger configuration keys from CREST for run %s", args.run_number)
962 trigconf = AthHLT.get_trigconf_keys_crest(args.run_number, args.lb_number, crest_server)
963 log.info("Retrieved trigger keys from CREST: %s", trigconf)
964 else:
965 log.info("Reading trigger configuration keys from COOL for run %s", args.run_number)
966 trigconf = AthHLT.get_trigconf_keys(args.run_number, args.lb_number)
967 log.info("Retrieved trigger keys from COOL: %s", trigconf)
968
969 try:
970 if args.smk is None:
971 args.smk = trigconf['SMK']
972 log.debug("Using SMK=%d from conditions DB/OKS", args.smk)
973 else:
974 log.debug("Using SMK=%d from command line (ignoring DB/OKS value %s)", args.smk, trigconf.get('SMK'))
975 if args.l1psk is None:
976 args.l1psk = trigconf['LVL1PSK']
977 log.debug("Using L1PSK=%d from conditions DB/OKS", args.l1psk)
978 else:
979 log.debug("Using L1PSK=%d from command line (ignoring DB/OKS value %s)", args.l1psk, trigconf.get('LVL1PSK'))
980 if args.hltpsk is None:
981 args.hltpsk = trigconf['HLTPSK']
982 log.debug("Using HLTPSK=%d from conditions DB/OKS", args.hltpsk)
983 else:
984 log.debug("Using HLTPSK=%d from command line (ignoring DB/OKS value %s)", args.hltpsk, trigconf.get('HLTPSK'))
985 except KeyError:
986 log.error("Cannot read trigger configuration keys from the conditions database for run %d", args.run_number)
987 sys.exit(1)
988 else:
989 log.info("Using trigger configuration keys from command line: SMK=%d, L1PSK=%d, HLTPSK=%d",
990 args.smk, args.l1psk, args.hltpsk)
991
992

Variable Documentation

◆ log

athenaEF.log = logging.getLogger('athenaEF')

Definition at line 54 of file athenaEF.py.