ATLAS Offline Software
makeDTCalibBlob_pickPhase.py
Go to the documentation of this file.
1 #!/bin/python3
2 # TO FIX: cali OFCs in final db are same as phys
3 # Step which makes picked phase root files puts same value in OFC_1ns and OFC_1ns_mu trees when Ncoll > 0
4 
5 import os, sys, errno, glob, subprocess, pathlib, getpass, datetime
6 import argparse
7 import xmlrpc.client
8 import subprocess
9 
10 # Dictionary with folder info used for LArCompleteToFlat
11 folderInfo = {}
12 folderInfo["/LAR/ElecCalibOflSC/OFC/PhysWave/RTM/4samples1phase"] = {"key":"LArOFC", "classtype":"LArOFCComplete"}
13 folderInfo["/LAR/ElecCalibOflSC/OFC/CaliWave1phase"] = {"key":"LArOFC", "classtype":"LArOFCComplete"}
14 folderInfo["/LAR/ElecCalibOflSC/Shape/RTM/4samples1phase"] = {"key":"LArShape", "classtype": "LArShapeComplete"}
15 folderInfo["/LAR/ElecCalibOflSC/Pedestals/Pedestal"] = {"key":"LArPedestal", "classtype": "LArPedestalComplete"}
16 folderInfo["/LAR/ElecCalibOflSC/Ramps/RampLinea"] = {"key":"LArRamp", "classtype": "LArRampComplete"}
17 folderInfo["/LAR/ElecCalibOflSC/MphysOverMcal/RTM"] = {"key":"LArMphysOverMcal", "classtype": "LArMphysOverMcalComplete"}
18 
19 
20 if "Athena_DIR" not in os.environ:
21  print("It looks like Athena is not set up... this script will not work without it.")
22  print("Please do : ")
23  print("setupATLAS; asetup Athena,24.0.54; source /eos/project-a/atlas-larcalib/public/build/x86_64-el9-gcc13-opt/setup.sh")
24  sys.exit(1)
25 
26 
27 from PyCool import cool,coral
28 
29 def getFolderList(dbstring, retdb=False, cleanlist=False):
30  # get database service and open database
31  dbSvc=cool.DatabaseSvcFactory.databaseService()
32  # database accessed via logical name, can also use physical connections
33  try:
34  db=dbSvc.openDatabase(dbstring)
35  except Exception as e:
36  print(f"Problem opening database {e}")
37  sys.exit(-1)
38  print(f"Opened database {dbstring}")
39  # now list the folders
40  folderlist= [ str(s) for s in db.listAllNodes()]
41  if cleanlist:
42  # Careful... some folders have similar names e.g. OFC and OFCCali
43  # folderlist = [j for j in folderlist if all(j not in k if j!=k else True for k in folderlist)]
44  folderlist = [f for f in folderlist if f.count("/") >= 3]
45 
46  if retdb:
47  return folderlist, db
48  else:
49  return folderlist
50 
51 
52 #from mp import parallel_exec
53 #sys.path.append("/afs/cern.ch/user/l/larmon/public/prod/LArPage1/makeJIRA")
54 #import jiraComment as jc
55 
56 class cfile():
57  #subclass file to have a more convienient use of writeline
58  def __init__(self, filename, mode = 'w'):
59  self.file = open(filename, mode, buffering=1)
60  def wl(self, string):
61  self.file.writelines(string + '\n')
62  self.file.flush()
63  os.fsync(self.file)
64  return None
65  def close(self):
66  self.file.close()
67 
68 def chmkDir( path ):
69  os.umask(0)
70  try:
71  print( "mkdir "+path )
72  os.makedirs(path)
73  except OSError as exception:
74  if exception.errno != errno.EEXIST:
75  raise
76  pass
77 
79  dqmsite = "atlasdqm.cern.ch"
80  dqmpassfile = "/afs/cern.ch/user/l/larmon/public/atlasdqmpass.txt"
81  if not os.path.isfile(dqmpassfile):
82  return 555555
83  dqmpass = None
84  dqmapi = None
85  with open(dqmpassfile, "r") as f:
86  dqmpass = f.readlines()[0].strip()
87  if ":" not in dqmpass:
88  print("Problem reading dqmpass")
89  sys.exit()
90  try:
91  dqmapi = None #xmlrpc.client.ServerProxy("https://"+dqmpass+"@"+dqmsite)
92  except:
93  print("Failed to connect to atlasdqm, therefore giving a fake run number 555555")
94  return 555555
95  if dqmapi is not None:
96  return dqmapi.get_latest_run()
97  else:
98  return 555555
99 
100 
101 class logFile():
102  def __init__(self, logpath):
103  self.logfile = open(logpath, "r", newline="\n")
104  self.lines = self.logfile.readlines()
105  self.runs = self.runsFromLog()
106  self.name = pathlib.Path(logpath).stem
107 
108  def runsFromLog(self):
109  runs = []
110  for line in self.lines:
111  if not line.startswith("Run "): continue
112  runs.append(int(line.split("Run")[1].split(":")[0]))
113  return runs
114  def __str__(self):
115  toprint = []
116  for line in self.lines:
117  if not line.startswith("Run "): continue
118  line = line.strip("\n")
119  toprint.append(line)
120  return "\n".join(toprint)
121  def getTopDirs(self, topdir):
122  self.topdirs = []
123  for run in self.runs:
124  self.infiles = glob.glob(f"{topdir}/*{run}*/root_files/*{run}*")
125  self.topdirs.extend( list(set([ str(pathlib.Path(i).parents[1]) for i in self.infiles ])) )
126  self.topdirs = list(set(self.topdirs))
127  self.sqldbs = self.getDBfiles()
129  return self.topdirs
130 
131  def getDBfiles(self):
132  indbs = [ g for pd in self.topdirs for g in glob.glob(f"{pd}/*/mysql.db") ]
133  indbs = list(set(indbs))
134  self.pooldirs = list(set([ str(pathlib.Path(i).parents[0]) for i in indbs ]) )
135  return indbs
136 
137  def getBCSnapshots(self):
138  indbs = [ g for pd in self.topdirs for g in glob.glob(f"{pd}/*/SnapshotBadChannel.db") ]
139  indbs = list(set(indbs))
140  return indbs
141  # /eos/project/a/atlas-larcalib/AP/00473386_00473391_00473394_FMhighEtaBack_HIGH_2/pool_files/SnapshotBadChannel.db
142 
143 
144 def athListFormat(vals):
145  if isinstance(vals, list):
146  vals = ",".join(vals)
147  return vals
148 
149 
150 def printAndRun(cmd, outlogpath=None, runNow=True):
151  print("**** RUNNING THE FOLLOWING COMMAND ****")
152  print(cmd)
153  print("***************************************")
154  if runNow:
155  process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
156  out,err = process.communicate()
157  retcode = process.returncode
158  print("**** Return code:", retcode, "*"*10)
159  log = None
160  if outlogpath is not None:
161  print(f"See log: {outlogpath}")
162  log = cfile(outlogpath)
163  log.wl("~~~~~~~~ OUTPUT ~~~~~~~~")
164  log.wl(out.decode('ascii'))
165  log.wl("~~~~~~~~~~~~~~~~~~~~~~~~")
166  log.wl("~~~~~~~~ ERROR ~~~~~~~~")
167  log.wl(err.decode('ascii'))
168  log.wl("~~~~~~~~~~~~~~~~~~~~~~~~")
169  else:
170  print("~~~~~~~~ OUTPUT ~~~~~~~~")
171  print(out.decode('ascii'))
172  print("~~~~~~~~~~~~~~~~~~~~~~~~")
173  print("~~~~~~~~ ERROR ~~~~~~~~")
174  print(err.decode('ascii'))
175  print("~~~~~~~~~~~~~~~~~~~~~~~~")
176  if retcode != 0:
177  print("!!!!!!!! DID NOT TERMINATE SUCCESSFULLY... exiting")
178  sys.exit()
179 
180 def run_merge(insqlite, inkeys, outsqlite, outpool, Ncoll=0, poolcat="mergedPoolCat.xml", outlogpath=None, runNow=True):
181  ''' Step 1 of the merging. '''
182  insqlite = athListFormat(insqlite)
183  inkeys = athListFormat(inkeys)
184  cmd = f"python -m LArCalibProcessing.LArNewCalib_MergeDB --insqlite {insqlite} --inkeys {inkeys} --outsqlite {outsqlite} --poolfile {outpool} --isSC --poolcat {poolcat} --Ncoll {Ncoll}"
185  printAndRun(cmd,outlogpath, runNow=runNow)
186 
187 def run_fillofcphase(phases_txt, outkey="LArSCOFCPhase", default_phase=22, folder="/LAR/ElecCalibOflSC/OFCBin/PhysShift", tag="LARElecCalibOflSCOFCBinPhysShift-10", outsql="SCOFCPhase.db", outpool="SC_OFC_Phase_10.pool.root", poolcat="mergedPoolCat.xml", outlogpath=None, runNow=True):
188  ''' Step 2 *if needed* - make an sql file from picked OFC phase txt file '''
189  cmd = f"LArNewCalib_FillOFCPhase.py --infile {phases_txt} --outkey {outkey} --isSC --hasid --default {default_phase} --folder {folder} --tag {tag} --outsql {outsql} --outp {outpool} --poolcat {poolcat}"
190  printAndRun(cmd,outlogpath, runNow=runNow)
191 
192 
193 def run_ofcpick(insqlite, outsqlite, phase_sql, run, BCsnapshotDB, outpdir="./", outrdir="./", outname="Picked_phase24052024", tag="LARElecCalibOflSCOFCBinPhysShift-10", Ncoll=0, poolcat="mergedPoolCat.xml", isPhys=True, outlogpath=None, runNow=True): # insqlite mergeSC.db, outssqlite mergeSCOnl_1.5_phase24052024.db , subdet Picked_phase24052024, phase_sql SCOFCPhase.d)
194  ''' Step 3, picking the phase in the DB '''
195  cmd=f"LArNewCalib_PhysOFCPhasePicker.py --run {run} -b {BCsnapshotDB} --insqlite {insqlite} --poolcat {poolcat} --outsqlite {outsqlite} --isSC --outpdir {outpdir} --outrdir {outrdir} --subdet {outname} --ofcphasetag {tag} --ofcphasesqlite {phase_sql} --Ncoll {Ncoll}"
196  if not isPhys:
197  cmd += " --isCalib --outprefix LArOFCCaliOnePhase"
198 
199  printAndRun(cmd,outlogpath, runNow=runNow)
200 
201 
202 def run_toCoolInline(mergedDB, outDB, infolders="ConvertToInlineSC", globalTag="LARCALIB-RUN2-00", poolcat="mergedPoolCat.xml", outlogpath=None, runNow=True):# outDB freshConstantsOnl_1.5.db
203  ''' Step 4: flattening the DB '''
204  # connecting all folder level tags to some new global tag..... In calibration processing it is used to be able to define some calib. global tag in sqlite file, and then job do not need to know all individual folder level tags, athena automaticaly uses the one connected to defined global tag...
205  cmd=f"/afs/cern.ch/user/l/larcalib/LArDBTools/python/BuildTagHierarchy.py {mergedDB} {globalTag}"
206  printAndRun(cmd,outlogpath, runNow=runNow)
207  cmd=f"LArCalib_ToCoolInlineConfig.py --insqlite {mergedDB} --infolders {infolders} --poolcat {poolcat} --outsqlite {outDB} --isSC"
208  printAndRun(cmd,outlogpath, runNow=runNow)
209 
210 
211 def poolCatalog(poolDir, catalog=None, runNow=True):
212  print(f"** Adding pool files from {poolDir} with pool_insertFileToCatalog **")
213  for f in [os.path.join(poolDir, f) for f in os.listdir(poolDir) if f.endswith(".pool.root")]:
214  cmd1 = "pool_extractFileIdentifier "+f
215  process = subprocess.Popen(cmd1,shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
216  out,err = process.communicate()
217  retcode = process.returncode
218  ident=out.decode('ascii').split(" ")[0]
219  cmd=f"pool_insertFileToCatalog.py {f}"
220  if catalog is not None:
221  cmd += f" --catalog='xmlcatalog_file:{catalog}'"
222  print(f"---- {cmd} ({ident})")
223  if runNow:
224  process = subprocess.Popen(cmd,shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
225  out,err = process.communicate()
226  retcode = process.returncode
227  if retcode != 0:
228  print(err.decode('ascii')) #print("ERROR: inserting file to catalogue failed")
229  #print(out.decode('ascii'))
230  #print(err.decode('ascii'))
231 
232 def folderNamesTags(sql, mustr="mu-60", dbname="CONDBR2", verbose=False):
233  dbstring=f"sqlite://;schema={sql};dbname={dbname}"
234  folderlist, db = getFolderList(dbstring, retdb=True, cleanlist=True)
235  ftags = {}
236  for f in folderlist:
237  try:
238  myfolder=db.getFolder(f)
239  # list tags in folder
240  if verbose: print("*"*20)
241  if verbose: print(f"Reading folder {f}")
242  if verbose: print("*"*20)
243  if verbose: print("Checking tags...")
244  taglist=myfolder.listTags()
245  if verbose: print( f"{len(taglist)} Tags in folder:" )
246  if verbose:
247  for i in taglist: print( i )
248  thetag = ""
249  if len(taglist) > 0:
250  taglist = [ str(t) for t in taglist ]
251  if mustr != "":
252  posstag = [ t for t in taglist if mustr in t ]
253  else:
254  posstag = [ t for t in taglist if "mu-" not in t ]
255  if len(posstag) == 1:
256  thetag = posstag[0]
257  else:
258  if verbose: print(len(posstag), "Choosing first tag")
259  thetag = taglist[0]
260  if verbose: print(f"Will use tag {thetag}")
261  ftags[f] = thetag
262 
263  except Exception as e:
264  continue
265  return ftags
266 
267 if __name__ == "__main__":
268  parser = argparse.ArgumentParser()
269  #parser.add_argument( '-inputLog', '-l', type=str, required=True, dest='inputLog', help='Name of input log file which lists the runs' )
270  parser.add_argument( 'inputLog', type=str, help='Name of input log file which lists the runs' )
271  parser.add_argument( '-logDir', type=str, dest='logDir', default="/afs/cern.ch/user/l/lardaq/public/hfec-calibration-logs", help="Input path for log files. Default %(default)s." )
272  parser.add_argument( '-inDir', '-i', type=str, default='/eos/project-a/atlas-larcalib/AP', help="Input directory containing root files from the processing. Default %(default)s.")
273  parser.add_argument('-r', '-run', dest='run', type=int, default=get_latest_run(), help='Run number - used for bad channels. Default is latest run.')
274  # parser.add_argument('--online', dest="onlineFolders", nargs='+', default=["Pedestal", "MphysOverMcal", "Ramp"], help="List of folders to merge in a separate db 'mergeSCOnl.db'. Default %(default)s.")
275  parser.add_argument('--folders', dest="folders", nargs='+', default=["Pedestal", "MphysOverMcal", "Ramp","AutoCorr", "OFCCali", "LArOFCPhys4samples", "LArOFCPhys4samplesMu", "PhysWave", "PhysAutoCorr", "CaliPulseParams", "DetCellParams", "CaliWave", "LArShape4samples"], help="List of folders to merge in a separate db 'mergeSC.db'. Default %(default)s.")
276  parser.add_argument('-o','--outdir', dest='outdir', default=f'/tmp/{getpass.getuser()}', help="Output directory for running and producing merged files. Default %(default)s.")
277  parser.add_argument('-Ncoll', dest='Ncoll', type=int, default=0, help="Pileup setting. Default %(default)s.")
278  parser.add_argument('-phase_txt', dest="phase_txt", type=str, default=None, help="Full path to .txt file which contains picked phases. Provide either this or a .db version")
279  parser.add_argument('-phase_db', dest="phase_db", type=str, default=None, help="Full path to .db file which contains picked phases. Provide either this or a .txt version")
280  parser.add_argument('-t','--tag', dest="out_tag", type=str, default=None, help="A tag to add to the name of output files, to help keep track of what was used. e.g. could use 'Onl' or 'Ofl' to separate blobs for different uploads. Default is empty, but the current date will be added.")
281  parser.add_argument('-d', '--dryRun', dest='dryRun', action='store_true', help="Perform a dry run? This will print the commands to terminal without running them")
282  args = parser.parse_args()
283 
284  runNow = not args.dryRun
285 
286  outtag = f"mu{args.Ncoll}_{datetime.datetime.now().strftime('%y%m%d')}"
287  if args.out_tag is not None:
288  outtag = f"{args.out_tag}_{outtag}"
289 
290 
291  if not args.inputLog.endswith(".log"):
292  args.inputLog += ".log"
293 
294  logPath = f"{args.logDir}/{args.inputLog}"
295 
296  if not os.path.isfile(logPath):
297  print(f"Couldn't find file {logPath}")
298  sys.exit()
299 
300  doPicking=True
301  if args.phase_txt is None and args.phase_db is None:
302  print("NO PICKED PHASE FILE PROVIDED!! Please supply either a .txt (-phase_txt) or .db (-phase_db) file so that the phases can be picked for the OFCs")
303  print("** This means we will skip the picking part... therefore the single-phase OFC folders **")
304  doPicking=False
305  if args.phase_txt is not None and args.phase_db is not None:
306  print("BOTH .txt AND .db PICKED PHASE FILES WERE PROVIDED. Please only provide one, so there is no ambiguity")
307  sys.exit()
308 
309  for f in [ "phase_txt", "phase_db" ]:
310  if getattr(args,f) is not None:
311  if not os.path.isfile(getattr(args,f)):
312  print(f"PROVIDED {f} DOES NOT EXIST!! Please check the path")
313  sys.exit()
314 
315 
316  theLog = logFile(logPath)
317 
318  # Get the list of .db sql files & BadChannels snapshots
319  theLog.getTopDirs(args.inDir)
320  insqlite = theLog.sqldbs
321  bcsnapshots = theLog.bcsnapshots
322 
323  # Make an output directory to run in
324  args.outdir = f"{args.outdir}/{theLog.name}"
325  args.outdir += f"_{outtag}"
326  outdir_root = f"{args.outdir}/rootFiles"
327  outdir_pool = f"{args.outdir}/poolFiles"
328  outdir_logs = f"{args.outdir}/logs"
329  print(f"Output (including root and pool files) will go to {args.outdir}")
330  chmkDir(args.outdir)
331  chmkDir(outdir_root)
332  chmkDir(outdir_pool)
333  chmkDir(outdir_logs)
334 
335  mergedDB = f"{outdir_pool}/mergeSC_{outtag}.db"
336  mergedPool = f"{outdir_pool}/merged_SC_{outtag}.pool.root"
337  finalDB_flat = f"{outdir_pool}/freshConstants_1.5_{outtag}.db"
338  poolcat = f"{outdir_pool}/mergedPoolCat.xml"
339  phase_pool = f"{outdir_pool}/SC_OFC_Phase_10_{outtag}.pool.root"
340 
341  for pd in theLog.pooldirs:
342  poolCatalog(pd, poolcat, runNow=runNow)
343 
344 
345  # Step 1, make the merged sql file(s)
346  run_merge(insqlite=insqlite, inkeys=args.folders, outsqlite=mergedDB, outpool=mergedPool, Ncoll=args.Ncoll, poolcat=poolcat, outlogpath=f"{outdir_logs}/run_merge_nopick.txt", runNow=runNow)
347 
348  if doPicking:
349  phaseDBname = f"SCOFCPhase_{outtag}.db"
350  if args.phase_txt is not None:
351  # Step 2 (if needed) - make the picked phase sql file
352  args.phase_db = f"{args.outdir}/{phaseDBname}"
353  run_fillofcphase(phases_txt=args.phase_txt, outkey="LArSCOFCPhase", default_phase=22, folder="/LAR/ElecCalibOflSC/OFCBin/PhysShift", tag="LARElecCalibOflSCOFCBinPhysShift-10", outsql=args.phase_db, outpool=phase_pool, outlogpath=f"{outdir_logs}/run_fillofcphase.txt", runNow=runNow, poolcat=poolcat)
354  # Add new pool file to catalogue
355  poolCatalog(outdir_pool, poolcat, runNow=runNow)
356 
357  # Step 3, apply the picking to the merged db file
358  run_ofcpick(insqlite=mergedDB, outsqlite=mergedDB, phase_sql=args.phase_db, run=args.run, BCsnapshotDB=bcsnapshots[0], outpdir=outdir_pool, outrdir=outdir_root, outname=f"Picked_phase_{outtag}", tag="LARElecCalibOflSCOFCBinPhysShift-10", Ncoll=args.Ncoll, poolcat=poolcat, outlogpath=f"{outdir_logs}/run_ofcpick.txt", runNow=runNow, isPhys=True)
359  # Also for cali OFCs
360  run_ofcpick(insqlite=mergedDB, outsqlite=mergedDB, phase_sql=args.phase_db, run=args.run, BCsnapshotDB=bcsnapshots[0], outpdir=outdir_pool, outrdir=outdir_root, outname=f"Picked_phase_{outtag}", tag="LARElecCalibOflSCOFCBinPhysShift-10", Ncoll=args.Ncoll, poolcat=poolcat, outlogpath=f"{outdir_logs}/run_ofccalipick.txt", runNow=runNow, isPhys=False)
361 
362  else:
363  print("**** NOTE: NO OFC PICKING WAS DONE, DUE TO LACK OF PICKING INPUT ****")
364 
365  if args.Ncoll == 0 :
366  foldersTags = folderNamesTags(mergedDB, mustr="", verbose=True)
367  else: # get list of folders, for ones with tags add the last word to the folder list
368  foldersTags = folderNamesTags(mergedDB, mustr=f"mu-{args.Ncoll}", verbose=True)
369 
370  print("-"*30)
371  print("Folders and tags to be used for flattening:")
372  for k in foldersTags.keys():
373  print(k, ":", foldersTags[k])
374  print("-"*30)
375 
376  folderScript = f"{args.outdir}/ConvertToInlineSC" # OIOIOI has to be local for now
377  #folderScript = f"ConvertToInlineSC"
378  print(f"Writing folder info to {folderScript}.py")
379  with open(f"{folderScript}.py", "w") as ffile:
380  lines = ["inputFolders=[]"]
381  for f in foldersTags.keys():
382  if f in folderInfo.keys():
383  theFolder = f
384  theTag = foldersTags[f]
385  theKey = folderInfo[f]["key"]
386  theClassType = folderInfo[f]["classtype"]
387 
388  theline = f'inputFolders.append(("{theFolder}","{theTag}","{theKey}","{theClassType}"))'
389  lines.append(theline)
390 
391  else:
392  print(f"NOT SURE HOW TO FLATTEN {f}, so will skip it")
393  continue
394  ffile.write("\n".join(lines))
395 
396 
397  # Step 4, flatten the db
398  run_toCoolInline(mergedDB=mergedDB, infolders=folderScript, outDB=finalDB_flat, globalTag="LARCALIB-RUN2-00", poolcat=poolcat, outlogpath=f"{outdir_logs}/run_toCoolInline.txt", runNow=runNow)
makeDTCalibBlob_pickPhase.athListFormat
def athListFormat(vals)
Definition: makeDTCalibBlob_pickPhase.py:144
makeDTCalibBlob_pickPhase.logFile.getDBfiles
def getDBfiles(self)
Definition: makeDTCalibBlob_pickPhase.py:131
FullCPAlgorithmsTest_eljob.flush
flush
Definition: FullCPAlgorithmsTest_eljob.py:186
makeDTCalibBlob_pickPhase.cfile.__init__
def __init__(self, filename, mode='w')
Definition: makeDTCalibBlob_pickPhase.py:58
makeDTCalibBlob_pickPhase.cfile.close
def close(self)
Definition: makeDTCalibBlob_pickPhase.py:65
makeDTCalibBlob_pickPhase.printAndRun
def printAndRun(cmd, outlogpath=None, runNow=True)
Definition: makeDTCalibBlob_pickPhase.py:150
makeDTCalibBlob_pickPhase.logFile.__str__
def __str__(self)
Definition: makeDTCalibBlob_pickPhase.py:114
makeDTCalibBlob_pickPhase.getFolderList
def getFolderList(dbstring, retdb=False, cleanlist=False)
Definition: makeDTCalibBlob_pickPhase.py:29
makeDTCalibBlob_pickPhase.folderNamesTags
def folderNamesTags(sql, mustr="mu-60", dbname="CONDBR2", verbose=False)
Definition: makeDTCalibBlob_pickPhase.py:232
makeDTCalibBlob_pickPhase.cfile.file
file
Definition: makeDTCalibBlob_pickPhase.py:59
makeDTCalibBlob_pickPhase.run_toCoolInline
def run_toCoolInline(mergedDB, outDB, infolders="ConvertToInlineSC", globalTag="LARCALIB-RUN2-00", poolcat="mergedPoolCat.xml", outlogpath=None, runNow=True)
Definition: makeDTCalibBlob_pickPhase.py:202
makeDTCalibBlob_pickPhase.logFile.logfile
logfile
Definition: makeDTCalibBlob_pickPhase.py:103
makeDTCalibBlob_pickPhase.cfile
Definition: makeDTCalibBlob_pickPhase.py:56
makeDTCalibBlob_pickPhase.cfile.wl
def wl(self, string)
Definition: makeDTCalibBlob_pickPhase.py:60
makeDTCalibBlob_pickPhase.run_fillofcphase
def run_fillofcphase(phases_txt, outkey="LArSCOFCPhase", default_phase=22, folder="/LAR/ElecCalibOflSC/OFCBin/PhysShift", tag="LARElecCalibOflSCOFCBinPhysShift-10", outsql="SCOFCPhase.db", outpool="SC_OFC_Phase_10.pool.root", poolcat="mergedPoolCat.xml", outlogpath=None, runNow=True)
Definition: makeDTCalibBlob_pickPhase.py:187
makeDTCalibBlob_pickPhase.get_latest_run
def get_latest_run()
Definition: makeDTCalibBlob_pickPhase.py:78
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
makeDTCalibBlob_pickPhase.logFile
Definition: makeDTCalibBlob_pickPhase.py:101
CxxUtils::set
constexpr std::enable_if_t< is_bitmask_v< E >, E & > set(E &lhs, E rhs)
Convenience function to set bits in a class enum bitmask.
Definition: bitmask.h:232
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
makeDTCalibBlob_pickPhase.logFile.infiles
infiles
Definition: makeDTCalibBlob_pickPhase.py:124
makeDTCalibBlob_pickPhase.chmkDir
def chmkDir(path)
Definition: makeDTCalibBlob_pickPhase.py:68
makeDTCalibBlob_pickPhase.run_ofcpick
def run_ofcpick(insqlite, outsqlite, phase_sql, run, BCsnapshotDB, outpdir="./", outrdir="./", outname="Picked_phase24052024", tag="LARElecCalibOflSCOFCBinPhysShift-10", Ncoll=0, poolcat="mergedPoolCat.xml", isPhys=True, outlogpath=None, runNow=True)
Definition: makeDTCalibBlob_pickPhase.py:193
makeDTCalibBlob_pickPhase.logFile.bcsnapshots
bcsnapshots
Definition: makeDTCalibBlob_pickPhase.py:128
makeDTCalibBlob_pickPhase.logFile.runsFromLog
def runsFromLog(self)
Definition: makeDTCalibBlob_pickPhase.py:108
makeDTCalibBlob_pickPhase.logFile.name
name
Definition: makeDTCalibBlob_pickPhase.py:106
makeDTCalibBlob_pickPhase.int
int
Definition: makeDTCalibBlob_pickPhase.py:273
makeDTCalibBlob_pickPhase.logFile.sqldbs
sqldbs
Definition: makeDTCalibBlob_pickPhase.py:127
Trk::open
@ open
Definition: BinningType.h:40
makeDTCalibBlob_pickPhase.logFile.getTopDirs
def getTopDirs(self, topdir)
Definition: makeDTCalibBlob_pickPhase.py:121
makeDTCalibBlob_pickPhase.logFile.runs
runs
Definition: makeDTCalibBlob_pickPhase.py:105
makeDTCalibBlob_pickPhase.logFile.__init__
def __init__(self, logpath)
Definition: makeDTCalibBlob_pickPhase.py:102
ActsTrk::detail::MakeDerivedVariant::extend
constexpr std::variant< Args..., T > extend(const std::variant< Args... > &, const T &)
Definition: MakeDerivedVariant.h:17
makeDTCalibBlob_pickPhase.run_merge
def run_merge(insqlite, inkeys, outsqlite, outpool, Ncoll=0, poolcat="mergedPoolCat.xml", outlogpath=None, runNow=True)
Definition: makeDTCalibBlob_pickPhase.py:180
str
Definition: BTagTrackIpAccessor.cxx:11
makeDTCalibBlob_pickPhase.logFile.getBCSnapshots
def getBCSnapshots(self)
Definition: makeDTCalibBlob_pickPhase.py:137
dbg::print
void print(std::FILE *stream, std::format_string< Args... > fmt, Args &&... args)
Definition: SGImplSvc.cxx:70
makeDTCalibBlob_pickPhase.logFile.lines
lines
Definition: makeDTCalibBlob_pickPhase.py:104
makeDTCalibBlob_pickPhase.poolCatalog
def poolCatalog(poolDir, catalog=None, runNow=True)
Definition: makeDTCalibBlob_pickPhase.py:211
Trk::split
@ split
Definition: LayerMaterialProperties.h:38
makeDTCalibBlob_pickPhase.logFile.pooldirs
pooldirs
Definition: makeDTCalibBlob_pickPhase.py:134
makeDTCalibBlob_pickPhase.logFile.topdirs
topdirs
Definition: makeDTCalibBlob_pickPhase.py:122