ATLAS Offline Software
htcondor_naf.py
Go to the documentation of this file.
1 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
2 
3 import subprocess,sys,os
4 
5 joblist = []
6 dependencelist = {} #"id":["childid"]
7 
8 from . import batchJobBase
9 
11 
12  def submit(self):
13  if self.memMB == 1:
14  self.memMB = 2000
15  elif self.memMB == 2:
16  self.memMB = 6000
17 
18  self.id=os.path.relpath(self.basedir+"/"+self.name+".sh", "../")
19  self.id = self.id.replace(".","-")
20 
21  joblist.append(self)
22  for parentid in self.dependsOnOk+self.dependsOnAny:
23  dependencelist.setdefault(parentid, []).append(self.id)
24 
25  #create submit file
26  submitfile = ""
27  submitfile += "executable = "+self.basedir+"/"+self.name+".sh\n"
28  submitfile += "output = "+self.basedir+"/"+self.name+".log\n"
29  submitfile += "error = "+self.basedir+"/"+self.name+".log\n"
30  submitfile += "log = outHTC.log\n"
31  submitfile += "+RequestRuntime = "+str(int(self.hours*3600))+"\n"
32  submitfile += "RequestCpus = "+str(max(self.nCores,int(self.memMB/2000.)))+"\n"
33  submitfile += "queue\n"
34  with open(self.basedir+"/"+self.name+".sub", 'w') as f:
35  f.write(submitfile)
36  print("Created "+str(self.id)+" ("+self.basedir+"/"+self.name+".sub")
37 
38 def finalizeJobs(dryRun):
39 
40  #create dagfile
41  dagfile = ""
42  for job in joblist:
43  dagfile += "JOB "+job.id+" "+job.basedir+"/"+job.name+".sub\n"
44 
45  for parent,children in dependencelist.items():
46  dagfile += "PARENT "+parent+" CHILD"
47  for child in children:
48  dagfile += " "+child
49  dagfile += "\n"
50 
51  #save file
52  with open("dagfile.dag", 'w') as f:
53  f.write(dagfile)
54 
55  #submit job
56  batchname = os.path.basename(os.path.normpath(os.getcwd()))
57  cmd = "condor_submit_dag -force -batch-name "+batchname+" dagfile.dag"
58 
59  if dryRun:
60  print (cmd+"\n")
61  print ("dagfile: \n"+dagfile)
62  else:
63  print (cmd)
64  p = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
65  retcode = p.communicate()
66  if len(retcode[0]):
67  print (retcode[0])
68  if len(retcode[1]):
69  print (retcode[1])
70  if p.returncode:
71  print ("ERROR: error while submitting job")
72  print ("return code: " + str(p.returncode))
73  sys.exit(11)
74 
replace
std::string replace(std::string s, const std::string &s2, const std::string &s3)
Definition: hcg.cxx:307
max
#define max(a, b)
Definition: cfImp.cxx:41
sherpaTarCreator.batchJobBase.batchJobBase.name
name
Definition: batchJobBase.py:9
CaloCellPos2Ntuple.int
int
Definition: CaloCellPos2Ntuple.py:24
sherpaTarCreator.batchJobBase.batchJobBase.memMB
memMB
Definition: batchJobBase.py:13
sherpaTarCreator.htcondor_naf.batchJob.submit
def submit(self)
Definition: htcondor_naf.py:12
sherpaTarCreator.batchJobBase.batchJobBase.basedir
basedir
Definition: batchJobBase.py:19
sherpaTarCreator.htcondor_naf.batchJob
Definition: htcondor_naf.py:10
dumpHVPathFromNtuple.append
bool append
Definition: dumpHVPathFromNtuple.py:91
sherpaTarCreator.batchJobBase.batchJobBase.id
id
Definition: batchJobBase.py:20
sherpaTarCreator.batchJobBase.batchJobBase
Definition: batchJobBase.py:5
sherpaTarCreator.batchJobBase.batchJobBase.hours
hours
Definition: batchJobBase.py:11
sherpaTarCreator.batchJobBase.batchJobBase.nCores
nCores
Definition: batchJobBase.py:12
sherpaTarCreator.htcondor_naf.finalizeJobs
def finalizeJobs(dryRun)
Definition: htcondor_naf.py:38
sherpaTarCreator.batchJobBase.batchJobBase.dependsOnOk
dependsOnOk
Definition: batchJobBase.py:21
Trk::open
@ open
Definition: BinningType.h:40
sherpaTarCreator.batchJobBase.batchJobBase.dependsOnAny
dependsOnAny
Definition: batchJobBase.py:22
str
Definition: BTagTrackIpAccessor.cxx:11
dbg::print
void print(std::FILE *stream, std::format_string< Args... > fmt, Args &&... args)
Definition: SGImplSvc.cxx:70