ATLAS Offline Software
Classes | Functions | Variables
python.grid Namespace Reference

Classes

class  Config
 
class  Sample
 

Functions

def Add (name)
 
def AvailableDatasets ()
 
def Samples (names)
 
def basicInDSNameShortener (*args)
 
def checkMergeType (configuration)
 
def checkForFile (filename)
 
def checkForPrun ()
 
def findPackages ()
 
def submit (config, allSamples)
 
def makeDirectory (outputDirectory)
 
def convertAODtoTOPQ (derivationToUse, ptag, samples)
 
def checkForShowerAlgorithm (Samples, cutfile)
 
def isAF2 (dataset)
 
def isData (dataset)
 
def checkPRWFile (Samples, cutfile)
 
def getFirstTag (tags, letter)
 gets the first AMI tag of a kind More...
 
def getShortenedConcatenatedSample (sampleName)
 

Variables

dictionary availableDatasets = {}
 

Function Documentation

◆ Add()

def python.grid.Add (   name)

Definition at line 41 of file grid.py.

41 def Add(name):
42  availableDatasets[name] = Sample(name)
43  return availableDatasets[name]
44 

◆ AvailableDatasets()

def python.grid.AvailableDatasets ( )

Definition at line 45 of file grid.py.

45 def AvailableDatasets():
46  return availableDatasets
47 

◆ basicInDSNameShortener()

def python.grid.basicInDSNameShortener ( args)

Definition at line 57 of file grid.py.

57 def basicInDSNameShortener(*args):
58  # don't ask what args[0] is; just use args[1] (sorry for my lack of understanding of python)
59  # if you want to use a different function defined outside this module, you don't need to bother
60  inDSName = args[1]
61  splitted = inDSName.split('.')
62 
63  runNumber = splitted[1]
64  physicsName = splitted[2]
65  if splitted[0] == "user" or splitted[0] == "group": #this is in case we run on private derivations, either produced with user or group role
66  runNumber = splitted[2]
67  physicsName = splitted[3]
68  derivation = splitted[-2]
69  tags = splitted[-1].replace('/','')
70 
71  #grid complains dataset names are too long
72  #stupid grid
73  if len(physicsName) > 20:
74  physicsName = physicsName.split('_')[0]
75 
76  outDSName = runNumber + '.' + physicsName + '.' + derivation + '.' + tags
77  return outDSName
78 

◆ checkForFile()

def python.grid.checkForFile (   filename)

Definition at line 161 of file grid.py.

161 def checkForFile(filename):
162  return os.path.exists(filename)
163 

◆ checkForPrun()

def python.grid.checkForPrun ( )

Definition at line 164 of file grid.py.

164 def checkForPrun():
165  if distutils.spawn.find_executable('prun') == None:
166  print(logger.FAIL + 'DANGER DANGER DANGER' + logger.ENDC)
167  print('Could not find prun. If you use setupATLAS (you should) then')
168  print('"localSetupPandaClient --noAthenaCheck" and run this again')
169  sys.exit()
170 
171 #I see a crash if I setup with rcSetup Top,1.4.0
172 #and don't run rc find_packages
173 #so let's run this by default

◆ checkForShowerAlgorithm()

def python.grid.checkForShowerAlgorithm (   Samples,
  cutfile 
)

Definition at line 442 of file grid.py.

442 def checkForShowerAlgorithm(Samples, cutfile):
443  settingsFilePath = ROOT.PathResolver.find_file(cutfile, "DATAPATH", ROOT.PathResolver.LocalSearch)
444  noShowerDatasets = []
445  customTDPFile = None
446  tmp = open(settingsFilePath, "r")
447  for line in tmp.readlines():
448  if "TDPPath" not in line:
449  continue
450  else:
451  customTDPFile = line.strip().split("TDPPath")[1]
452  break
453  print(customTDPFile)
454  if customTDPFile:
455  tdpFile = ROOT.PathResolver.find_calib_file(customTDPFile)
456  else:
457  tdpFile = ROOT.PathResolver.find_file("dev/AnalysisTop/TopDataPreparation/XSection-MC16-13TeV.data", "CALIBPATH", ROOT.PathResolver.RecursiveSearch)
458  # Load the file
459  print(tdpFile)
460  tdp = analysis.TopDataPreparation(tdpFile)
461  for TopSample in availableDatasets.values():
462  for List in Samples:
463  SublistSamples = List.datasets
464  for sample_concatenated in SublistSamples:
465  sample=getShortenedConcatenatedSample(sample_concatenated) # in the case of comma-separated samples with same DSIDs and same first tags (it's the same sample)
466  scope = sample.split('.')[0]
467  if 'mc' not in scope:
468  continue
469  dsid = sample.split('.')[1]
470  dsid = int(dsid)
471  hasShower = tdp.getShower(dsid) in ['sherpa','sherpa21','pythia','pythia8','herwigpp','amcatnlopythia8', 'herwigpp713', 'sherpa228', 'sherpa2210', 'herwigpp721']
472  #print hasShower," ",sample
473  if not tdp.hasID(dsid) or not hasShower:
474  noShowerDatasets += [dsid]
475 
476  if len(noShowerDatasets) > 0:
477  print('TopDataPreparation .data file specified in '+settingsFilePath+' was checked.')
478  print('The following datasets do not have a showering algorithm defined in TopDataPreparation and will fail on the grid. Please ask for this to be fixed in TopDataPreparation!')
479  for ds in set(noShowerDatasets):
480  print(ds)
481  raise RuntimeError("Datasets without shower.")
482 

◆ checkMergeType()

def python.grid.checkMergeType (   configuration)

Definition at line 153 of file grid.py.

153 def checkMergeType(configuration):
154  type = configuration.mergeType
155  if type != "None" and type != "Default" and type != "xAOD":
156  print("MergeType must be set to either None, Default or xAOD")
157  print('Not', type, "Since that doesn't make sense")
158  sys.exit()
159 
160 #If settings.txt doesn't exist your grid job will fail

◆ checkPRWFile()

def python.grid.checkPRWFile (   Samples,
  cutfile 
)

Definition at line 498 of file grid.py.

498 def checkPRWFile(Samples, cutfile):
499  # Some imports
500  import subprocess, shlex
501 
502  # We need to find the PRW files being used and make use of the checkPRW
503  # checkPRW.py --inDsTxt=my.datasets.txt path/to/prwConfigs/*.root
504  # First, find the PRW names from cutfile
505  settingsFilePath = ROOT.PathResolver.find_file(cutfile, "DATAPATH", ROOT.PathResolver.LocalSearch)
506  print(logger.OKBLUE + " - Processing checks for PRWConfig in " + settingsFilePath + logger.ENDC)
507  tmp = open(settingsFilePath, "r")
508  PRWConfig = None
509  PRWConfig_FS = None
510  PRWConfig_AF = None
511  for line in tmp.readlines():
512  line = line.split('#', 1)[0].strip()
513  if line.startswith("PRWConfigFiles_AF"):
514  PRWConfig_AF = [ ROOT.PathResolver.find_file( x, "CALIBPATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ]
515  PRWConfig_AF.extend( [ ROOT.PathResolver.find_file( x, "DATAPATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ] )
516  PRWConfig_AF.extend( [ ROOT.PathResolver.find_file( x, "PATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ] )
517  elif line.startswith("PRWConfigFiles_FS"):
518  PRWConfig_FS = [ ROOT.PathResolver.find_file( x, "CALIBPATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ]
519  PRWConfig_FS.extend( [ ROOT.PathResolver.find_file( x, "DATAPATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ] )
520  PRWConfig_FS.extend( [ ROOT.PathResolver.find_file( x, "PATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ] )
521  elif line.startswith("PRWConfigFiles"):
522  PRWConfig = [ ROOT.PathResolver.find_file( x, "CALIBPATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ]
523  PRWConfig.extend( [ ROOT.PathResolver.find_file( x, "DATAPATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ] )
524  PRWConfig.extend( [ ROOT.PathResolver.find_file( x, "PATH", ROOT.PathResolver.RecursiveSearch ) for x in line.split()[1:] ] )
525 
526  if PRWConfig and PRWConfig_AF:
527  print(logger.FAIL + " - Problem in cutfile " + settingsFilePath + ": PRWConfigFiles is inconsistent with usage of PRWConfigFiles_AF" + logger.ENDC)
528  return
529  elif PRWConfig and PRWConfig_FS:
530  print(logger.FAIL + " - Problem in cutfile " + settingsFilePath + ": PRWConfigFiles is inconsistent with usage of PRWConfigFiles_FS" + logger.ENDC)
531  return
532  elif PRWConfig and not PRWConfig_FS and not PRWConfig_AF:
533  PRWConfig_FS = PRWConfig
534  PRWConfig_AF = PRWConfig
535  elif not PRWConfig and not PRWConfig_FS and not PRWConfig_AF:
536  print(logger.FAIL + " - Error reading PRWConfigFiles from cutfile" + logger.ENDC)
537  return
538  # else: we assume that PRWConfigFiles_FS and PRWConfigFiles_AF are set
539 
540  # Print the PRW files
541  print(logger.OKGREEN + "PRW files used for FS:" + logger.ENDC)
542  print(logger.OKGREEN + "\n".join(PRWConfig_FS) + logger.ENDC)
543  print(logger.OKGREEN + "PRW files used for AF2:" + logger.ENDC)
544  print(logger.OKGREEN + "\n".join(PRWConfig_AF) + logger.ENDC)
545 
546  # Create a temporary sample list
547  tmpFileNameFS = "samplesforprwcheck_FS.txt"
548  tmpOutFS = open(tmpFileNameFS,"w")
549  tmpFileNameAF = "samplesforprwcheck_AF.txt"
550  tmpOutAF = open(tmpFileNameAF,"w")
551  for List in Samples:
552  SublistSamples = List.datasets
553  for sample_concatenated in SublistSamples: # the listed samples may be comma-separated list of samples
554  for sample in sample_concatenated.split(','): # we need to check all of them, not just the first one
555  if (isData(sample)):
556  continue
557  else:
558  if not isAF2(sample):
559  tmpOutFS.write(sample+"\n")
560  else:
561  tmpOutAF.write(sample+"\n")
562  tmpOutFS.close()
563  tmpOutAF.close()
564 
565  # then do the check
566  if (os.path.getsize(tmpFileNameFS)): # what follows only makes sense if the file isn't empty
567  # Make the FS command
568  cmdFS = "checkPRW.py --inDsTxt %s %s"%(tmpFileNameFS, " ".join(PRWConfig_FS))
569  print(logger.OKBLUE + " - Running command : " + cmdFS + logger.ENDC)
570  # Run
571  procFS = subprocess.Popen(shlex.split(cmdFS))
572  procFS.wait()
573  else:
574  print(logger.OKBLUE + " - No PRWConfig check is needed for FS." + logger.ENDC)
575  if (os.path.getsize(tmpFileNameAF)): # what follows only makes sense if the file isn't empty
576  # Make the AF command
577  cmdAF = "checkPRW.py --inDsTxt %s %s"%(tmpFileNameAF, " ".join(PRWConfig_AF))
578  print(logger.OKBLUE + " - Running command : " + cmdAF + logger.ENDC)
579  # Run
580  procAF = subprocess.Popen(shlex.split(cmdAF))
581  procAF.wait()
582  else:
583  print(logger.OKBLUE + " - No PRWConfig check is needed for AF2." + logger.ENDC)
584  # At the moment, just print the output, but we need to learn what to catch also
585 

◆ convertAODtoTOPQ()

def python.grid.convertAODtoTOPQ (   derivationToUse,
  ptag,
  samples 
)

Definition at line 432 of file grid.py.

432 def convertAODtoTOPQ(derivationToUse, ptag, samples):
433  for sample in samples:
434  for i, ds in enumerate(sample.datasets):
435  sample.datasets[i] = ds.replace('AOD', derivationToUse).replace('/', '') + '_' + ptag
436 

◆ findPackages()

def python.grid.findPackages ( )

Definition at line 174 of file grid.py.

174 def findPackages():
175  cmd = 'cd $ROOTCOREBIN/../; rc find_packages'
176 
177  debug = False
178 
179  if debug:
180  print('finding packages')
181  print(cmd)
182 
183  for l in os.popen(cmd).readlines():
184  if debug:
185  print(l.strip())
186 
187  if debug:
188  print('done')
189 
190 #Given a list of datasets, the command to run and a mode (egamma, muons) this
191 #submits one prun job per run
192 #This is mostly for internal use of the code

◆ getFirstTag()

def python.grid.getFirstTag (   tags,
  letter 
)

gets the first AMI tag of a kind

Definition at line 587 of file grid.py.

587 def getFirstTag(tags,letter):
588  tagList = tags.split('_')
589  first = ''
590  for tag in tagList:
591  if tag.find(letter,0,1) != -1 and tag[1:].isdigit() and first == '':
592  first = tag
593  return first
594 
595 
596 # In MC16, a given DSID can have been "split" into several datasets, were some have more ami-tags
597 # This function takes as input a coma-separated list of datasets, and returns the name of the first sample if we are in such case
598 # It throws an error if the DSID of these datasets is different, or if the first ami-tag of each time is different for the different datasets

◆ getShortenedConcatenatedSample()

def python.grid.getShortenedConcatenatedSample (   sampleName)

Definition at line 599 of file grid.py.

599 def getShortenedConcatenatedSample(sampleName):
600  samples = sampleName.split(',')
601  if len(samples) == 1: # the simplest case
602  return samples[0]
603 
604  # check if the DSIDs are all the same
605  DSID = samples[0].split('.')[1]
606  firstTagOfFirstSample = { 'e':'', 's':'', 'a':'', 'r':'', 'f':'', 'm':'', 'p':'', }
607  isFirstSample = True
608  for s in samples:
609  if s.split('.')[1] != DSID:
610  print(logger.FAIL + " Issue with this concatenated sample: " + sampleName + logger.ENDC)
611  print(logger.FAIL + " This syntax can only work if all dataset containers have the same DSID " + logger.ENDC)
612  raise RuntimeError("Issue with contatenated samples.")
613  AmiTags = s.split('.')[-1]
614  for tagType in firstTagOfFirstSample:
615  if firstTagOfFirstSample[tagType] == '' and isFirstSample:
616  firstTagOfFirstSample[tagType] = getFirstTag(AmiTags,tagType)
617  elif firstTagOfFirstSample[tagType] != getFirstTag(AmiTags,tagType):
618  print(logger.FAIL + " Issue with this concatenated sample: " + sampleName + logger.ENDC)
619  print(logger.FAIL + " This syntax can only work if all dataset containers have the same first tag of each type " + logger.ENDC)
620  print(logger.FAIL + " And it seems there are two samples in this list, one with " + firstTagOfFirstSample[tagType] + " and one with " + getFirstTag(AmiTags,tagType) + " as first " + tagType + "-tag" + logger.ENDC)
621  raise RuntimeError("Issue with contatenated samples.")
622  isFirstSample = False
623  return samples[0] # if we survived all the tests, return the first of the list

◆ isAF2()

def python.grid.isAF2 (   dataset)

Definition at line 483 of file grid.py.

483 def isAF2(dataset):
484  tags = dataset.split('.')[-1]
485  tagList = tags.split('_')
486  for tag in tagList:
487  if tag.find('a')>-1:
488  return True
489  return False
490 

◆ isData()

def python.grid.isData (   dataset)

Definition at line 491 of file grid.py.

491 def isData(dataset):
492  scope = dataset.split('.')[0]
493  if scope.find('data')>-1:
494  return True
495  else:
496  return False
497 

◆ makeDirectory()

def python.grid.makeDirectory (   outputDirectory)

Definition at line 420 of file grid.py.

420 def makeDirectory(outputDirectory):
421  try:
422  os.makedirs(outputDirectory)
423  print('Made directory', outputDirectory)
424  except:
425  #directory already exists
426  pass
427 
428 #Loop through the samples and change the AOD-style name to a DAOD_TOPQ one
429 #The user needs to give derivationToUse (DAOD_TOPQ1 or DAOD_TOPQ2)
430 #The p-tag for the production
431 #And the list of samples

◆ Samples()

def python.grid.Samples (   names)

Definition at line 48 of file grid.py.

48 def Samples(names):
49  samples = []
50  for n in names:
51  #removing whitespaces from concatenated lines - ANALYSISTO-553
52  for ds in range(0,len(availableDatasets[n].datasets)):
53  availableDatasets[n].datasets[ds]=availableDatasets[n].datasets[ds].replace(' ','')
54  samples.append(availableDatasets[n])
55  return samples
56 

◆ submit()

def python.grid.submit (   config,
  allSamples 
)

Definition at line 193 of file grid.py.

193 def submit(config, allSamples):
194  checkForPrun()
195  checkMergeType(config)
196  config.details()
197  if not config.skipShowerCheck:
198  for configFile in config.settingsFile.split(','):
199  checkForShowerAlgorithm(allSamples, configFile)
200  if config.checkPRW:
201  for configFile in config.settingsFile.split(','):
202  checkPRWFile(allSamples, configFile)
203 
204  tarfile = 'top-xaod.tar.gz'
205 
206  # Delete the old tarball if requested
207  if not config.reuseTarBall:
208  try:
209  os.remove(tarfile)
210  except OSError as e:
211  #Number 2 is 'file doesn't exist' which is okay for us
212  if e.errno == 2:
213  pass
214  else:
215  raise
216 
217 
218  #Check for cuts file
219  for configFile in config.settingsFile.split(','):
220  if not checkForFile(configFile):
221  print(logger.WARNING + " WARNING - Did not find config file %s in this dir "%(configFile) + logger.ENDC)
222  print(logger.WARNING + " - Attempt to find this file in a sensible location using PathResolver... " + logger.ENDC)
223  settingsFilePath = ROOT.PathResolver.find_file(configFile, "DATAPATH", ROOT.PathResolver.LocalSearch)
224  if settingsFilePath == "":
225  print(logger.FAIL + "DANGER DANGER. HIGH VOLTAGE" + logger.ENDC)
226  print('%s does not exist in this directory and cannot be found using PathResolver, exiting!' % configFile)
227  sys.exit(1)
228  else:
229  print(logger.WARNING + " - Found an appropriate file %s "%(settingsFilePath) + logger.ENDC)
230 
231  outputFiles = []
232  for configFile in config.settingsFile.split(','):
233  settingsFilePath = ROOT.PathResolver.find_file(configFile, "DATAPATH", ROOT.PathResolver.LocalSearch)
234 
235  #Look in the cuts file for the output filename
236  outputFilename = 'EMPTY'
237  for l in open(settingsFilePath, "r"):
238  #ignore text after comments
239  if l.find('#') > -1:
240  l = l.split('#')[0]
241  if l.find('OutputFilename') > -1:
242  outputFilename = l.replace('OutputFilename', '').strip()
243  if outputFilename == 'EMPTY':
244  print(logger.FAIL + 'OutputFilename not found in ' + settingsFilePath + logger.ENDC)
245  sys.exit(1)
246  else:
247  outputFiles.append(outputFilename)
248 
249  outputFilenames='' # string defining the output file(s)
250  combineArgument='' # argument of command for combining the multiple output files, if need be
251  if len(outputFiles) == 0: # at this stage this shouldn't happen
252  print(logger.FAIL + 'No OutputFileName found' + logger.ENDC)
253  sys.exit(1)
254  elif len(outputFiles) ==1: # the simplest case
255  outF = outputFiles[0]
256  outputFilenames = outF.replace(".root","_root") + ":" + outF
257  else: # len(outputFiles) >=2: multiple output files, or a single combined one
258  # check if the output files are unique
259  if len(outputFiles) != len(set(outputFiles)):
260  print(logger.FAIL + 'Two of the output file names are identical. Check the config files (separated by commas).' + logger.ENDC)
261  sys.exit(1)
262  if config.combine_outputFile == None: # no combination asked
263  iconfigFile = 0
264  for configFile in outputFiles:
265  cFile = configFile.replace(".root","_root") + ":" + configFile
266  outputFilenames += cFile
267  if iconfigFile != len(outputFiles)-1:
268  outputFilenames += ','
269  iconfigFile += 1
270  else: # combination of the multiple output files
271  # retrieving the prefixes
272  prefixes = config.combine_prefixes.split(',')
273  # check if the prefixes are unique
274  if len(prefixes) != len(set(prefixes)):
275  print(logger.FAIL + 'Two of the prefixes (separated by commas) are identical. Check the combine_prefixes option.' + logger.ENDC)
276  sys.exit(1)
277  # check if the prefixes and the output files have same length
278  if len(prefixes) != len(outputFiles):
279  print(logger.FAIL + 'When combining the outputs, there should be as many prefixes as there are output files (separated by commas). Check the combine_prefixes and settingsFile options.' + logger.ENDC)
280  sys.exit(1)
281  # using combined file name
282  outputFilenames = config.combine_outputFile.replace(".root","_root") + ":" + config.combine_outputFile
283  # building the input file argument of the combination
284  iconfigFile = 0
285  for outFile in outputFiles:
286  combineArgument += outFile
287  combineArgument += ":"
288  combineArgument += prefixes[iconfigFile]
289  if iconfigFile != len(outputFiles)-1:
290  combineArgument += ','
291  iconfigFile += 1
292 
293 
294  #print outputFilenames
295 
296  these = []
297  print(logger.OKBLUE + 'For these samples' + logger.ENDC)
298 
299  for sample in allSamples:
300  currentDatasets = sample.datasets
301  actuallyExists = []
302  for ds in currentDatasets:
303  these.append(ds)
304 
305  #check if it's a release - which automatically would set rootVer and cmtConfig
306  if not config.CMake:
307  findPackages()
308 
309  plural = ''
310  if len(these) > 1:
311  plural = 's'
312 
313  print('')
314  print(logger.OKBLUE + 'Starting submission of %d sample%s' % (len(these), plural) + logger.ENDC)
315  print('')
316 
317  isfirst = True
318  for i, d_concatenated in enumerate(these):
319  d = getShortenedConcatenatedSample(d_concatenated) # in case of coma-separated list of samples with same DSID and same first tag of each type
320  print(logger.OKBLUE + 'Submitting %d of %d' % (i+1, len(these)) + logger.ENDC)
321 
322  #Make the output dataset name
323  #for group production it has to start with "group." and we asume that gridUsername is the name of the group (e.g. phys-top)
324  if config.groupProduction:
325  output = 'group.' + config.gridUsername + '.' + config.nameShortener(d) + '.' + config.suffix
326  else:
327  output = 'user.' + config.gridUsername + '.' + config.nameShortener(d) + '.' + config.suffix
328 
329  cmd = 'prun \\\n'
330  #special care for group production - we assume that gridUsername is the name of the group (e.g. phys-top)
331  if config.groupProduction:
332  cmd += '--official --voms atlas:/atlas/' + config.gridUsername + '/Role=production \\\n'
333  cmd += '--inDS=' + d_concatenated + ' \\\n' # the inDS may be a comma-separated list of samples with same DSID and same first tag of each type
334  cmd += '--outDS=' + output + ' \\\n'
335  if config.CMake:
336  CMTCONFIG = os.getenv("CMTCONFIG")
337  cmd += '--useAthenaPackages --cmtConfig=%s \\\n'%(CMTCONFIG)
338  else:
339  cmd += '--useRootCore \\\n'
340  cmd += '--writeInputToTxt=IN:in.txt \\\n'
341  cmd += '--outputs=%s \\\n' % outputFilenames
342  # write the --exec commands - will possibly AnalysisTop several times
343  cmd += '--exec="'
344  iconfigFile=0
345  for configFile in config.settingsFile.split(','):
346  cmd += '%s %s in.txt' % (config.code, configFile)
347  if iconfigFile!=len(config.settingsFile.split(','))-1:
348  cmd += '; '
349  iconfigFile += 1
350  elif combineArgument != '':
351  cmd += '; combineATOutputs ' + config.combine_outputFile + " " + combineArgument
352  cmd += '" \\\n'
353  else:
354  cmd += '" \\\n'
355 
356  #you might really hate a certain site
357  if len(config.excludedSites) > 0:
358  cmd += '--excludedSite=' + config.excludedSites + ' \\\n'
359 
360  #you might really like a certain site
361  if len(config.forceSite) > 0:
362  cmd += '--site ' + config.forceSite + ' \\\n'
363 
364  #tar-up the first time only, to save time when submitting
365  if isfirst:
366  if checkForFile(tarfile) and config.reuseTarBall:# reuse existing tarball if it already exists
367  print(logger.OKBLUE + 'Reusing existing tarball %s' % (tarfile) + logger.ENDC)
368  cmd += '--inTarBall=%s \\\n' % tarfile
369  elif config.reuseTarBall:# reuse existing tarball if it already exists
370  print(logger.WARNING + 'Tarball %s not found - will re-create it' % (tarfile) + logger.ENDC)
371  cmd += '--outTarBall=%s \\\n' % tarfile
372  else:
373  cmd += '--outTarBall=%s \\\n' % tarfile
374  isfirst = False
375  else:
376  cmd += '--inTarBall=%s \\\n' % tarfile
377 
378 
379  #maybe you don't want to submit the job?
380  if config.noSubmit:
381  cmd += '--noSubmit \\\n'
382 
383  #fewer files = happy grid
384  if config.mergeType != "None":
385  cmd += '--mergeOutput \\\n'
386 
387  #Write the output to a specific storage element?
388  if len(config.destSE) > 0:
389  cmd += '--destSE=\"%s\" \\\n' % config.destSE
390 
391  #xAOD merging - slide 9 https://indico.cern.ch/event/401703/contribution/2/2/material/slides/0.pdf
392  #Only add TriggerMenuMetaDataTool option when xAODTriggerCnv is in the release (not in 2.3.15)
393  #--mergeScript="xAODMerge -m xAODMaker::TriggerMenuMetaDataTool %OUT \`echo %IN | sed \'s/,/ /g\'\`" \\\n'
394  if config.mergeType == "xAOD":
395  cmd += '--mergeScript="xAODMerge %OUT \`echo %IN | sed \'s/,/ /g\'\`" \\\n'
396 
397  #how much memory, not sure what default is when not set
398  if len(config.memory) > 0:
399  cmd += '--memory=%s \\\n' % config.memory
400 
401  #how many files
402  if len(config.maxNFilesPerJob) > 0:
403  cmd += '--maxNFilesPerJob=%s \\\n' % config.maxNFilesPerJob
404 
405  #other options
406  if len(config.otherOptions) > 0:
407  cmd += '%s \\\n' % config.otherOptions
408 
409  #Make sure the command actually ends
410  cmd += '\n'
411 
412  #show people what you're about to do
413  print(cmd)
414 
415  #then do it
416  for l in os.popen(cmd).readlines():
417  print('>> ', l.strip())
418 
419 #Make a directory if it doesn't exist

Variable Documentation

◆ availableDatasets

dictionary python.grid.availableDatasets = {}

Definition at line 12 of file grid.py.

replace
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition: hcg.cxx:307
CaloCellPos2Ntuple.int
int
Definition: CaloCellPos2Ntuple.py:24
python.grid.AvailableDatasets
def AvailableDatasets()
Definition: grid.py:45
python.grid.getShortenedConcatenatedSample
def getShortenedConcatenatedSample(sampleName)
Definition: grid.py:599
python.grid.checkMergeType
def checkMergeType(configuration)
Definition: grid.py:153
python.grid.submit
def submit(config, allSamples)
Definition: grid.py:193
python.grid.convertAODtoTOPQ
def convertAODtoTOPQ(derivationToUse, ptag, samples)
Definition: grid.py:432
python.grid.basicInDSNameShortener
def basicInDSNameShortener(*args)
Definition: grid.py:57
python.grid.checkForPrun
def checkForPrun()
Definition: grid.py:164
python.grid.checkForShowerAlgorithm
def checkForShowerAlgorithm(Samples, cutfile)
Definition: grid.py:442
python.grid.makeDirectory
def makeDirectory(outputDirectory)
Definition: grid.py:420
python.grid.isAF2
def isAF2(dataset)
Definition: grid.py:483
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
CxxUtils::set
constexpr std::enable_if_t< is_bitmask_v< E >, E & > set(E &lhs, E rhs)
Convenience function to set bits in a class enum bitmask.
Definition: bitmask.h:224
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
python.grid.Add
def Add(name)
Definition: grid.py:41
python.grid.checkForFile
def checkForFile(filename)
Definition: grid.py:161
Trk::open
@ open
Definition: BinningType.h:40
python.grid.findPackages
def findPackages()
Definition: grid.py:174
python.grid.getFirstTag
def getFirstTag(tags, letter)
gets the first AMI tag of a kind
Definition: grid.py:587
Muon::print
std::string print(const MuPatSegment &)
Definition: MuonTrackSteering.cxx:28
python.grid.isData
def isData(dataset)
Definition: grid.py:491
python.grid.Samples
def Samples(names)
Definition: grid.py:48
python.grid.checkPRWFile
def checkPRWFile(Samples, cutfile)
Definition: grid.py:498
Trk::split
@ split
Definition: LayerMaterialProperties.h:38