326 Read trigger configuration keys (SMK, L1PSK, HLTPSK) and DB info from OKS via WEBDAQ REST API.
328 This reads the keys from the partition's TriggerConfiguration object and its
329 related L1TriggerConfiguration and TriggerDBConnection objects.
332 - Partition -> TriggerConfiguration -> L1TriggerConfiguration (Lvl1PrescaleKey)
333 - Partition -> TriggerConfiguration -> TriggerDBConnection (SuperMasterKey)
334 - Partition -> TriggerConfiguration -> HLTImplementationDB (hltPrescaleKey)
337 partition: The partition name (default: from TDAQ_PARTITION env var)
338 webdaq_base: Base URL for webis_server (default: from TDAQ_WEBDAQ_BASE env var)
339 strict: If True, raise an exception if OKS read fails (for --online-environment)
342 dict with keys: SMK, L1PSK, HLTPSK, db_alias (values may be None if not found)
345 RuntimeError: If strict=True and OKS read fails
350 if webdaq_base
is None:
351 webdaq_base = os.environ.get(
'TDAQ_WEBDAQ_BASE')
354 msg =
"TDAQ_WEBDAQ_BASE not set, cannot read from OKS"
356 raise RuntimeError(msg +
" (required for --online-environment)")
358 return {
'SMK':
None,
'L1PSK':
None,
'HLTPSK':
None,
'db_alias':
None}
361 if partition
is None:
362 partition = os.environ.get(
'TDAQ_PARTITION',
'ATLAS')
364 log.info(
"Reading trigger configuration keys from OKS via WEBDAQ: %s (partition=%s)",
365 webdaq_base, partition)
367 result = {
'SMK':
None,
'L1PSK':
None,
'HLTPSK':
None,
'db_alias':
None}
369 def extract_oks_data(response_json):
371 Extract data from OKS compact format: [name, type, attributes, relationships]
372 Returns tuple (attributes_dict, relationships_dict)
374 if isinstance(response_json, list)
and len(response_json) >= 4:
375 return response_json[2], response_json[3]
376 elif isinstance(response_json, list)
and len(response_json) >= 3:
377 return response_json[2], {}
378 return response_json, {}
381 """Extract object ID from a relationship reference."""
382 if isinstance(ref, list)
and len(ref) >= 2:
384 elif isinstance(ref, dict)
and 'id' in ref:
386 elif isinstance(ref, str):
395 url = f
"{webdaq_base}/info/current/{partition}/oks/Partition/{partition}?format=compact"
396 log.debug(
"Fetching Partition from OKS: %s", url)
398 response = requests.get(url, timeout=10)
399 if response.status_code == 200:
400 part_attrs, part_rels = extract_oks_data(response.json())
401 log.debug(
"Partition attributes: %s", part_attrs)
402 log.debug(
"Partition relationships: %s", part_rels)
406 if 'TriggerConfiguration' in part_rels:
407 trig_conf_id = get_ref_id(part_rels[
'TriggerConfiguration'])
410 log.debug(
"TriggerConfiguration ID: %s", trig_conf_id)
413 url = f
"{webdaq_base}/info/current/{partition}/oks/TriggerConfiguration/{trig_conf_id}?format=compact"
414 response = requests.get(url, timeout=10)
415 if response.status_code == 200:
416 trig_attrs, trig_rels = extract_oks_data(response.json())
417 log.debug(
"TriggerConfiguration attributes: %s", trig_attrs)
418 log.debug(
"TriggerConfiguration relationships: %s", trig_rels)
421 if 'l1' in trig_rels:
422 l1_id = get_ref_id(trig_rels[
'l1'])
424 url = f
"{webdaq_base}/info/current/{partition}/oks/L1TriggerConfiguration/{l1_id}?format=compact"
425 resp = requests.get(url, timeout=10)
426 if resp.status_code == 200:
427 l1_attrs, _ = extract_oks_data(resp.json())
428 log.debug(
"L1TriggerConfiguration attributes: %s", l1_attrs)
429 if 'Lvl1PrescaleKey' in l1_attrs:
430 result[
'L1PSK'] = int(l1_attrs[
'Lvl1PrescaleKey'])
431 log.info(
"Got L1PSK=%d from OKS", result[
'L1PSK'])
434 if 'TriggerDBConnection' in trig_rels:
435 db_id = get_ref_id(trig_rels[
'TriggerDBConnection'])
437 url = f
"{webdaq_base}/info/current/{partition}/oks/TriggerDBConnection/{db_id}?format=compact"
438 resp = requests.get(url, timeout=10)
439 if resp.status_code == 200:
440 db_attrs, _ = extract_oks_data(resp.json())
441 log.debug(
"TriggerDBConnection attributes: %s", db_attrs)
442 if 'SuperMasterKey' in db_attrs:
443 result[
'SMK'] = int(db_attrs[
'SuperMasterKey'])
444 log.info(
"Got SMK=%d from OKS", result[
'SMK'])
445 if 'Alias' in db_attrs:
446 result[
'db_alias'] = db_attrs[
'Alias']
447 log.info(
"Got db_alias=%s from OKS", result[
'db_alias'])
450 if 'hlt' in trig_rels:
451 hlt_id = get_ref_id(trig_rels[
'hlt'])
453 url = f
"{webdaq_base}/info/current/{partition}/oks/HLTImplementationDB/{hlt_id}?format=compact"
454 resp = requests.get(url, timeout=10)
455 if resp.status_code == 200:
456 hlt_attrs, _ = extract_oks_data(resp.json())
457 log.debug(
"HLTImplementationDB attributes: %s", hlt_attrs)
458 if 'hltPrescaleKey' in hlt_attrs:
459 result[
'HLTPSK'] = int(hlt_attrs[
'hltPrescaleKey'])
460 log.info(
"Got HLTPSK=%d from OKS", result[
'HLTPSK'])
462 msg = f
"Failed to fetch Partition from OKS: HTTP {response.status_code}"
464 raise RuntimeError(msg +
" (required for --online-environment)")
467 except requests.exceptions.RequestException
as e:
468 msg = f
"Error fetching trigger keys from OKS: {e}"
470 raise RuntimeError(msg +
" (required for --online-environment)")
472 except (ValueError, KeyError, TypeError)
as e:
473 msg = f
"Error parsing trigger keys from OKS: {e}"
475 raise RuntimeError(msg +
" (required for --online-environment)")
480 missing = [k
for k
in [
'SMK',
'L1PSK',
'HLTPSK']
if result.get(k)
is None]
482 raise RuntimeError(f
"Failed to get {', '.join(missing)} from OKS (required for --online-environment)")
834 """Update run parameters from IS, file, or conditions DB"""
837 if getattr(args,
'online_environment',
False):
838 log.info(
"Reading run parameters from Information Service via WEBDAQ")
842 solenoid_override = getattr(args,
'solenoid_current',
None)
843 toroids_override = getattr(args,
'toroids_current',
None)
846 partition=getattr(args,
'partition',
None),
847 webdaq_base=getattr(args,
'webdaq_base',
None),
849 solenoid_current_override=solenoid_override,
850 toroids_current_override=toroids_override)
852 if args.run_number
is None and run_params.run_number
is not None:
853 args.run_number = run_params.run_number
854 log.info(
"Using run_number=%d from IS", args.run_number)
855 if args.lb_number
is None and run_params.lb_number
is not None:
856 args.lb_number = run_params.lb_number
857 log.info(
"Using lb_number=%d from IS", args.lb_number)
858 if args.sor_time
is None and run_params.sor_time
is not None:
859 args.sor_time = run_params.sor_time
860 log.info(
"Using sor_time=%s from IS", args.sor_time)
861 if args.detector_mask
is None and run_params.detector_mask
is not None:
862 args.detector_mask = run_params.detector_mask
863 log.info(
"Using detector_mask=%s from IS", args.detector_mask)
865 args.solenoid_current = run_params.solenoid_current
866 args.toroids_current = run_params.toroids_current
868 if (args.run_number
is not None and args.lb_number
is None)
or (args.run_number
is None and args.lb_number
is not None):
869 log.error(
"Both or neither of the options -R (--run-number) and -L (--lb-number) have to be specified")
873 from eformat
import EventStorage
874 dr = EventStorage.pickDataReader(args.file[0])
875 if args.run_number
is None:
876 args.run_number = dr.runNumber()
877 args.lb_number = dr.lumiblockNumber()
878 args.T0_project_tag = dr.projectTag()
879 args.beam_type = dr.beamType()
880 args.beam_energy = dr.beamEnergy()
881 args.trigger_type = dr.triggerType()
882 args.stream = dr.stream()
883 args.lumiblock = dr.lumiblockNumber()
884 args.file_detector_mask =
"{:032x}".format(dr.detectorMask())
886 args.T0_project_tag = getattr(args,
'T0_project_tag',
'')
887 args.beam_type = getattr(args,
'beam_type', 0)
888 args.beam_energy = getattr(args,
'beam_energy', 0)
889 args.trigger_type = getattr(args,
'trigger_type', 0)
890 args.stream = getattr(args,
'stream',
'')
891 args.lumiblock = getattr(args,
'lumiblock', 0)
892 args.file_detector_mask = getattr(args,
'file_detector_mask',
'00000000000000000000000000000000')
895 if (args.sor_time
is None or args.detector_mask
is None)
and args.run_number
is not None:
896 sor_params = AthHLT.get_sor_params(args.run_number)
897 log.debug(
'SOR parameters: %s', sor_params)
898 if sor_params
is None:
899 log.error(
"Run %d does not exist. If you want to use this run-number specify "
900 "remaining run parameters, e.g.: --sor-time=now --detector-mask=all", args.run_number)
903 if args.sor_time
is None and sor_params
is not None:
906 if args.detector_mask
is None and sor_params
is not None:
907 dmask = sor_params[
'DetectorMask']
908 if args.run_number < AthHLT.CondDB._run2:
912 if args.dump_config_exit
and not args.run_number:
917 if getattr(args,
'solenoid_current',
None)
is None:
918 args.solenoid_current = RunParams.DEFAULT_SOLENOID_CURRENT
919 log.debug(
"Using default solenoid_current=%.1f", args.solenoid_current)
920 if getattr(args,
'toroids_current',
None)
is None:
921 args.toroids_current = RunParams.DEFAULT_TOROIDS_CURRENT
922 log.debug(
"Using default toroids_current=%.1f", args.toroids_current)
1012 parser = argparse.ArgumentParser(prog=
'athenaEF.py', formatter_class=
1013 lambda prog : argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=32, width=100),
1014 usage =
'%(prog)s [OPTION]... -f FILE jobOptions',
1016 parser.expert_groups = []
1019 g = parser.add_argument_group(
'Options')
1020 g.add_argument(
'jobOptions', nargs=
'?', help=
'job options: CA module (package.module:function), pickle file (.pkl), or JSON file (.json)')
1021 g.add_argument(
'--threads', metavar=
'N', type=int, default=1, help=
'number of threads')
1022 g.add_argument(
'--concurrent-events', metavar=
'N', type=int, help=
'number of concurrent events if different from --threads')
1023 g.add_argument(
'--log-level',
'-l', metavar=
'LVL', type=arg_log_level, default=
'INFO,ERROR', help=
'OutputLevel of athena,POOL')
1024 g.add_argument(
'--precommand',
'-c', metavar=
'CMD', action=
'append', default=[],
1025 help=
'Python commands executed before job options')
1026 g.add_argument(
'--postcommand',
'-C', metavar=
'CMD', action=
'append', default=[],
1027 help=
'Python commands executed after job options')
1028 g.add_argument(
'--interactive',
'-i', action=
'store_true', help=
'interactive mode')
1029 g.add_argument(
'--help',
'-h', nargs=
'?', choices=[
'all'], action=MyHelp, help=
'show help')
1031 g = parser.add_argument_group(
'Input/Output')
1032 g.add_argument(
'--file',
'--filesInput',
'-f', action=
'append', help=
'input RAW file')
1033 g.add_argument(
'--save-output',
'-o', metavar=
'FILE', help=
'output file name')
1034 g.add_argument(
'--number-of-events',
'--evtMax',
'-n', metavar=
'N', type=int, default=-1, help=
'processes N events (default: -1, means all)')
1035 g.add_argument(
'--skip-events',
'--skipEvents',
'-k', metavar=
'N', type=int, default=0, help=
'skip N first events')
1036 g.add_argument(
'--loop-files', action=
'store_true', help=
'loop over input files if no more events')
1037 g.add_argument(
'--efdf-interface-library', metavar=
'LIB', default=
'TrigDFEmulator',
1038 help=
'name of the EFDF interface shared library to load')
1041 g = parser.add_argument_group(
'Performance and debugging')
1042 g.add_argument(
'--perfmon', action=
'store_true', help=
'enable PerfMon')
1043 g.add_argument(
'--tcmalloc', action=
'store_true', default=
True, help=
'use tcmalloc')
1044 g.add_argument(
'--stdcmalloc', action=
'store_true', help=
'use stdcmalloc')
1045 g.add_argument(
'--stdcmath', action=
'store_true', help=
'use stdcmath library')
1046 g.add_argument(
'--imf', action=
'store_true', default=
True, help=
'use Intel math library')
1047 g.add_argument(
'--show-includes',
'-s', action=
'store_true', help=
'show printout of included files')
1050 g = parser.add_argument_group(
'Conditions')
1051 g.add_argument(
'--run-number',
'-R', metavar=
'RUN', type=int,
1052 help=
'run number (if None, read from first event)')
1053 g.add_argument(
'--lb-number',
'-L', metavar=
'LBN', type=int,
1054 help=
'lumiblock number (if None, read from first event)')
1055 g.add_argument(
'--conditions-run', metavar=
'RUN', type=int, default=
None,
1056 help=
'reference run number for conditions lookup (use when IS run number has no COOL data)')
1057 g.add_argument(
'--sor-time', type=arg_sor_time,
1058 help=
'The Start Of Run time. Three formats are accepted: '
1059 '1) the string "now", for current time; '
1060 '2) the number of nanoseconds since epoch (e.g. 1386355338658000000 or int(time.time() * 1e9)); '
1061 '3) human-readable "20/11/18 17:40:42.3043". If not specified the sor-time is read from the conditions DB')
1062 g.add_argument(
'--detector-mask', metavar=
'MASK', type=arg_detector_mask,
1063 help=
'detector mask (if None, read from the conditions DB), use string "all" to enable all detectors')
1066 g = parser.add_argument_group(
'Database')
1067 g.add_argument(
'--use-database',
'-b', action=
'store_true',
1068 help=
'configure from trigger database using SMK')
1069 g.add_argument(
'--db-server', metavar=
'DB', default=
'TRIGGERDB_RUN3', help=
'DB server name (alias)')
1070 g.add_argument(
'--smk', type=int, default=
None, help=
'Super Master Key')
1071 g.add_argument(
'--l1psk', type=int, default=
None, help=
'L1 prescale key')
1072 g.add_argument(
'--hltpsk', type=int, default=
None, help=
'HLT prescale key')
1073 g.add_argument(
'--use-crest', action=
'store_true', default=
False,
1074 help=
'Use CREST for trigger configuration')
1075 g.add_argument(
'--crest-server', metavar=
'URL', default=
None,
1076 help=
'CREST server URL (defaults to flags.Trigger.crestServer)')
1077 g.add_argument(
'--dump-config', action=
'store_true', help=
'Dump joboptions JSON file')
1078 g.add_argument(
'--dump-config-exit', action=
'store_true', help=
'Dump joboptions JSON file and exit')
1081 g = parser.add_argument_group(
'Magnets')
1082 g.add_argument(
'--solenoid-current', type=float, default=
None,
1083 help=
'Solenoid current in Amperes (default: nominal current for offline running, required from IS online)')
1084 g.add_argument(
'--toroids-current', type=float, default=
None,
1085 help=
'Toroids current in Amperes (default: nominal current for offline running, required from IS online)')
1088 g = parser.add_argument_group(
'Online')
1089 g.add_argument(
'--online-environment', action=
'store_true',
1090 help=
'Enable online environment: read run parameters from IS and trigger '
1091 'configuration keys (SMK, L1PSK, HLTPSK) from OKS via WEBDAQ REST API')
1092 g.add_argument(
'--partition', metavar=
'NAME', default=
None,
1093 help=
'TDAQ partition name (defaults to TDAQ_PARTITION environment variable)')
1094 g.add_argument(
'--webdaq-base', metavar=
'URL', default=
None,
1095 help=
'WEBDAQ base URL (defaults to TDAQ_WEBDAQ_BASE environment variable)')
1098 g = parser.add_argument_group(
'Online Histogramming')
1099 g.add_argument(
'--oh-monitoring',
'-M', action=
'store_true', default=
False,
1100 help=
'enable online histogram publishing via WebdaqHistSvc')
1103 g = parser.add_argument_group(
'Expert')
1104 parser.expert_groups.append(g)
1105 (args, unparsed_args) = parser.parse_known_args()
1109 from PyUtils.Helpers
import ROOTSetup
1110 ROOTSetup(batch=
True)
1114 ROOT.ROOT.EnableThreadSafety()
1117 import AthenaCommon.Logging
1118 AthenaCommon.Logging.log.setLevel(getattr(logging, args.log_level[0]))
1119 AthenaCommon.Logging.log.setFormat(
"%(asctime)s Py:%(name)-31s %(levelname)7s %(message)s")
1120 if args.show_includes:
1121 from AthenaCommon.Include
import include
1122 include.setShowIncludes(
True )
1125 if not args.concurrent_events:
1126 args.concurrent_events = args.threads
1129 from TrigPSC
import PscConfig
1130 from TrigPSC.PscDefaultFlags
import defaultOnlineFlags
1133 flags = defaultOnlineFlags()
1136 if args.oh_monitoring:
1137 flags.Trigger.Online.useOnlineWebdaqHistSvc =
True
1138 log.info(
"Enabled WebdaqHistSvc for online histogram publishing")
1141 log.info(
"Using CREST for trigger configuration: %s", args.use_crest)
1143 flags.Trigger.useCrest =
True
1144 if args.crest_server:
1145 flags.Trigger.crestServer = args.crest_server
1147 args.crest_server = flags.Trigger.crestServer
1151 if args.use_database:
1155 PscConfig.forcePSK = (args.hltpsk
is not None)
or args.online_environment
1160 if not args.use_database
and args.jobOptions
and not args.jobOptions.endswith(
'.json'):
1161 PscConfig.unparsedArguments = unparsed_args
1162 for flag_arg
in unparsed_args:
1163 flags.fillFromString(flag_arg)
1165 PscConfig.interactive = args.interactive
1166 PscConfig.exitAfterDump = args.dump_config_exit
1174 if args.conditions_run
is not None:
1175 log.info(
"Using conditions from reference run %d (overriding run %s for IOV lookup)",
1176 args.conditions_run, args.run_number)
1177 flags.Input.ConditionsRunNumber = args.conditions_run
1180 if args.number_of_events > 0:
1181 flags.Exec.MaxEvents = args.number_of_events
1184 if args.skip_events > 0:
1185 flags.Exec.SkipEvents = args.skip_events
1191 flags.PerfMon.doFastMonMT = args.perfmon
1195 flags.Trigger.Online.useEFByteStreamSvc =
True
1196 ef = flags.Trigger.Online.EFInterface
1197 ef_files = args.file
if args.file
else []
1199 ef.OutputFileName = f
"athenaEF_{args.save_output}" if args.save_output
else ""
1200 ef.LoopFiles = args.loop_files
1201 ef.NumEvents = args.number_of_events
1202 ef.SkipEvents = args.skip_events
1203 ef.RunNumber = args.run_number
1204 ef.T0ProjectTag = args.T0_project_tag
1205 ef.BeamType = args.beam_type
1206 ef.BeamEnergy = args.beam_energy
1207 ef.TriggerType = args.trigger_type
1208 ef.Stream = args.stream
1209 ef.Lumiblock = args.lumiblock
1210 ef.DetMask = args.file_detector_mask
1211 ef.LibraryName = args.efdf_interface_library
1215 log.info(
"Executing precommand(s)")
1216 for cmd
in args.precommand:
1217 log.info(
" %s", cmd)
1218 exec(cmd, globals(), {
'flags': flags})
1221 is_database = args.use_database
1225 if not is_database
and args.jobOptions:
1226 jobOptions = args.jobOptions
1227 is_pickle = jobOptions.endswith(
'.pkl')
1228 is_json = jobOptions.endswith(
'.json')
1234 crestconn = TriggerCrestUtil.getCrestConnection(args.db_server)
1235 db_alias = f
"{args.crest_server}/{crestconn}"
1236 log.info(
"Loading configuration via CREST from %s with SMK %d", db_alias, args.smk)
1238 db_alias = args.db_server
1239 log.info(
"Loading configuration from database %s with SMK %d", db_alias, args.smk)
1244 num_threads=args.threads, num_slots=args.concurrent_events,
1246 log.info(
"Configuration loaded from database")
1250 log.info(
"Loading configuration from pickle file: %s", jobOptions)
1251 with open(jobOptions,
'rb')
as f:
1252 acc = pickle.load(f)
1253 log.info(
"Configuration loaded from pickle")
1257 log.info(
"Loading configuration from JSON file: %s", jobOptions)
1261 num_threads=args.threads, num_slots=args.concurrent_events,
1263 log.info(
"Configuration loaded from JSON")
1271 log.info(
"Loading CA configuration from: %s", jobOptions)
1274 from AthenaConfiguration.ComponentAccumulator
import ComponentAccumulator
1275 from AthenaConfiguration.MainServicesConfig
import addMainSequences
1276 from TrigServices.TriggerUnixStandardSetup
import commonServicesCfg
1277 from AthenaConfiguration.ComponentFactory
import CompFactory
1279 locked_flags = flags.clone()
1283 cfg = ComponentAccumulator(CompFactory.AthSequencer(
"AthMasterSeq", Sequential=
True))
1284 cfg.setAppProperty(
'ExtSvcCreates',
False)
1285 cfg.setAppProperty(
"MessageSvcType",
"TrigMessageSvc")
1286 cfg.setAppProperty(
"JobOptionsSvcType",
"TrigConf::JobOptionsSvc")
1289 addMainSequences(locked_flags, cfg)
1290 cfg.merge(commonServicesCfg(locked_flags))
1293 cfg_func = AthHLT.getCACfg(jobOptions)
1294 cfg.merge(cfg_func(flags))
1297 if args.postcommand:
1298 log.info(
"Executing postcommand(s)")
1299 for cmd
in args.postcommand:
1300 log.info(
" %s", cmd)
1301 exec(cmd, globals(), {
'flags': flags,
'cfg': cfg})
1302 args.postcommand = []
1305 fname =
"HLTJobOptions"
1306 log.info(
"Dumping configuration to %s.pkl and %s.json", fname, fname)
1307 with open(f
"{fname}.pkl",
"wb")
as f:
1310 from TrigConfIO.JsonUtils
import create_joboptions_json
1311 create_joboptions_json(f
"{fname}.pkl", f
"{fname}.json")
1314 if args.dump_config_exit:
1315 log.info(
"Configuration dumped to %s.json. Exiting...", fname)
1319 log.info(
"Loading configuration from %s.json via TrigConf::JobOptionsSvc", fname)
1323 num_threads=args.threads, num_slots=args.concurrent_events,
1326 log.info(
"Configuration loaded with HLT online services")
1329 if args.postcommand:
1330 log.info(
"Executing postcommand(s)")
1331 for cmd
in args.postcommand:
1332 log.info(
" %s", cmd)
1333 exec(cmd, globals(), {
'flags': flags,
'acc': acc})
1336 if args.dump_config
or args.dump_config_exit:
1337 fname =
"HLTJobOptions"
1341 from TrigConfIO.HLTTriggerConfigAccess
import HLTJobOptionsAccess
1342 log.info(
"Fetching configuration from database for dump...")
1343 jo_access = HLTJobOptionsAccess(dbalias=acc.db_server, smkey=acc.smk)
1344 props = jo_access.algorithms()
1346 log.info(
"Dumping configuration to %s.json", fname)
1347 hlt_json = {
'filetype':
'joboptions',
'properties': props}
1348 with open(f
"{fname}.json",
"w")
as f:
1349 json.dump(hlt_json, f, indent=4, sort_keys=
True, ensure_ascii=
True)
1353 props = acc.properties
1355 log.info(
"Dumping configuration to %s.json", fname)
1356 hlt_json = {
'filetype':
'joboptions',
'properties': props}
1357 with open(f
"{fname}.json",
"w")
as f:
1358 json.dump(hlt_json, f, indent=4, sort_keys=
True, ensure_ascii=
True)
1360 log.warning(
"No properties available to dump")
1364 app_props, msg_props, comp_props = acc.gatherProps()
1365 props = {
"ApplicationMgr": app_props,
"MessageSvc": msg_props}
1366 for comp, name, value
in comp_props:
1367 props.setdefault(comp, {})[name] = value
1369 log.info(
"Dumping configuration to %s.json", fname)
1370 hlt_json = {
'filetype':
'joboptions',
'properties': props}
1371 with open(f
"{fname}.json",
"w")
as f:
1372 json.dump(hlt_json, f, indent=4, sort_keys=
True, ensure_ascii=
True)
1377 if args.dump_config_exit:
1378 log.info(
"Configuration dumped. Exiting...")
1382 log.info(
"Starting Athena execution...")
1387 worker_dir = os.path.join(os.getcwd(),
"athenaHLT_workers",
"athenaHLT-01")
1388 if not os.path.exists(worker_dir):
1389 log.info(
"Creating worker directory: %s", worker_dir)
1390 os.makedirs(worker_dir, exist_ok=
True)
1392 if args.interactive:
1393 log.info(
"Interactive mode - call acc.run() to execute")
1395 code.interact(local={
'acc': acc,
'flags': flags})
1398 from AthenaCommon
import ExitCodes
1402 sc = acc.run(args.number_of_events)
1404 exitcode = ExitCodes.EXE_ALG_FAILURE
1405 except SystemExit
as e:
1406 exitcode = ExitCodes.EXE_ALG_FAILURE
if e.code == 1
else e.code
1408 traceback.print_exc()
1409 exitcode = ExitCodes.UNKNOWN_EXCEPTION
1411 log.info(
'Leaving with code %d: "%s"', exitcode, ExitCodes.what(exitcode))