ATLAS Offline Software
Loading...
Searching...
No Matches
AtlRunQueryUtils.py
Go to the documentation of this file.
1#!/bin/env
2
3# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
4#
5# ----------------------------------------------------------------
6# Script : AtlRunQueryUtils.py
7# Project: AtlRunQuery
8# Purpose: Utility functions for AtlRunQuery
9# Authors: Andreas Hoecker (CERN), Joerg Stelzer (DESY)
10# Created: Jan 20, 2009
11# ----------------------------------------------------------------
12#
13
14from CoolRunQuery.utils.AtlRunQueryTimer import timer
15
16import sys, os, time, re, calendar
17from math import exp,sqrt,pi
18from copy import copy
19import cx_Oracle
20import struct
21
22import http.client
23import urllib.parse
24
25import DQDefects
26
27def checkURL(url):
28 p = urllib.parse(url)
29 h = http.client.HTTP(p[1])
30 h.putrequest('HEAD', p[2])
31 h.endheaders()
32 if h.getreply()[0] == 200:
33 return 1
34 return 0
35
36class RunPeriods():
37 def findPeriod( self, runno ):
38 from CoolRunQuery.AtlRunQueryCOMA import ARQ_COMA
39 s = ",".join( ARQ_COMA.get_periods_for_run(runno) )
40 return s
41
43 def __init__(self, names):
44 for number, name in enumerate(names.split()):
45 setattr(self, name, number)
46
48 hostname = os.getenv('HOSTNAME')
49 print ("Execution on host: %r" % hostname )
50 if not hostname:
51 onserver = False
52 else:
53 onserver = ( re.match(r'aiatlas.*\.cern\.ch',hostname) is not None )
54 print ("Execution on server: %r" % onserver)
55 return onserver
56
57def importroot(batch=True):
58 import sys
59 cmdline_args = sys.argv
60 sys.argv[1:]=[]
61 if batch:
62 sys.argv += ['-b']
63 import ROOT
64 ROOT.gROOT.SetBatch(batch)
65 if batch:
66 del sys.argv[1]
67 ROOT.gErrorIgnoreLevel = 2000 # to avoid printout when creating images
68 sys.argv=cmdline_args
69 return ROOT
70
71
72def prettyNumber( n, width=-1, delim=',',decimal='.' ):
73 """Converts a float to a string with appropriately placed commas"""
74 if width >= 0:
75 s = "%.*f" % (width, n)
76 else:
77 s = str(n)
78 dec = s.find(decimal)
79 if dec == -1:
80 dec = len(s)
81 threes = int((dec-1)/3)
82 for i in range(threes):
83 loc = dec-3*(i+1)
84 s = s[:loc] + delim + s[loc:]
85 return s
86
87def durationInSeconds( duration_string ):
88 lst = duration_string.split()
89 sec = 0
90 for entry in lst:
91 if 's' in entry:
92 sec += int(entry.replace('s',''))
93 elif 'm' in entry:
94 sec += int(entry.replace('m',''))*60
95 elif 'h' in entry:
96 sec += int(entry.replace('h',''))*3600
97 elif 'd' in entry:
98 sec += int(entry.replace('d',''))*3600*24
99 elif 'w' in entry:
100 sec += int(entry.replace('w',''))*3600*24*7
101 elif 'y' in entry:
102 sec += int(entry.replace('y',''))*3600*24*365
103 else:
104 print ('Big troubles... in function "AtlRunQueryUtils::durationInSeconds": cannot decode string "%s"' % entry)
105 sys.exit(1)
106 return sec
107
109 def __init__(self):
110 self.openConn = {}
111 self.pw = {}
112
113 def GetDBConn(self, schema, db):
114 """for example schema=COOLONL_TRIGGER', db='CONDBR2"""
115 if (schema,db) in self.openConn:
116 return self.openConn[(schema,db)]
117 try:
118 if schema=="DEFECTS":
119 #from AthenaCommon.Logging import logging # this is needed because of some problem in DQUtils logger
120 # the logger there, if called first makes the athena logger crash
121 defdb = DQDefects.DefectsDB("COOLOFL_GLOBAL/CONDBR2",tag=db)
122 defdb.closeDatabase = lambda: None
123 self.openConn[(schema,db)] = defdb
124 else:
125 logging=False
126 from CoolConvUtilities.AtlCoolLib import indirectOpen
127 self.openConn[(schema,db)] = indirectOpen("%s/%s"%(schema,db),True, logging)
128 except Exception:
129 import traceback
130 traceback.print_exc()
131 sys.exit(-1)
132 return self.openConn[(schema,db)]
133
134 def get_auth(self,key):
135 if key not in self.pw:
136 from os import environ as env
137 #lookup = XMLReader(env['CORAL_DBLOOKUP_PATH']+"/dblookup.xml")
138 #for s in lookup.servicelist.logicalservices:
139 # print ("Service",s['name'])
140 # for p in s.services:
141 # print (" name",p['name'])
142 auth = XMLReader(env['CORAL_AUTH_PATH']+"/authentication.xml")
143 for c in auth.connectionlist.connections:
144 if key!=c['name']:
145 continue
146 self.pw[key] = dict([(p['name'],p['value']) for p in c.parameters])
147 break
148 if key not in self.pw:
149 print ("Can not authenticate DB",key)
150 sys.exit(0)
151 return self.pw[key]
152
153
155 if 'run' not in self.openConn:
156 auth = self.get_auth('oracle://ATLAS_COOLPROD/ATLAS_COOLOFL_TRIGGER')
157 self.openConn['run'] = cx_Oracle.connect("%s/%s@ATLAS_COOLPROD" % (auth['user'],auth['password']))
158 return self.openConn['run']
159
161 if 'sfo' not in self.openConn:
162 auth = self.get_auth('oracle://ATLAS_CONFIG/ATLAS_SFO_T0_R')
163 with timer("Opening Connection to ATLAS_SFO_T0_R @ ATLAS_CONFIG"):
164 self.openConn['sfo'] = cx_Oracle.connect("%s/%s@ATLAS_CONFIG" % (auth['user'],auth['password']))
165 return self.openConn['sfo']
166
168 if 'tier0' not in self.openConn:
169 auth = self.get_auth('oracle://ATLAS_T0/ATLAS_T0')
170 self.openConn['tier0'] = cx_Oracle.connect("%s/%s@ATLAS_T0" % (auth['user'],auth['password']))
171 return self.openConn['tier0']
172
174 if 'pvss' not in self.openConn:
175 auth = self.get_auth('oracle://ATLAS_PVSSPROD/ATLAS_PVSS_READER')
176 self.openConn['pvss'] = cx_Oracle.connect("%s/%s@ATLAS_PVSSPROD" % (auth['user'],auth['password']))
177 return self.openConn['pvss']
178
179 def CloseAll(self):
180 for (dbname,dbconn) in self.openConn.items():
181 if isinstance(dbconn,cx_Oracle.Connection):
182 dbconn.close()
183 else:
184 dbconn.closeDatabase()
185
186
188
190 gr = [str(no)]
191 if not gr[0].isdigit():
192 return gr[0]
193 while gr[0]:
194 gr[0:1]=[gr[0][:-3],gr[0][-3:]]
195 return ','.join(gr[1:])
196
197def filesize(no):
198 if no is None:
199 return "n.a."
200 if no > 0x4000000000000:
201 return "%1.1f PB" % (no*1.0/0x4000000000000)
202 if no > 0x10000000000:
203 return "%1.1f TB" % (no*1.0/0x10000000000)
204 if no > 0x40000000:
205 return "%1.1f GB" % (no*1.0/0x40000000)
206 if no > 0x100000:
207 return "%1.1f MB" % (no*1.0/0x100000)
208 if no > 0x400:
209 return "%1.1f kB" % (no*1.0/0x400)
210
211 return "%i B" % no
212
213
214# a helper function to decode range lists and merge the intervals
215# par: listOfRanges: [(b1,e1),(b2,e2),....,(bn,en)] -> [(b1,e2),....,(bn,en)] if b2-1<=e1
216def MergeRanges(listOfRanges):
217 listOfRanges.sort()
218 newRR=[]
219 for rr in listOfRanges:
220 if len(newRR)==0 or rr[0]-1>newRR[-1][1]:
221 newRR.append(copy(rr))
222 else:
223 newRR[-1] = [newRR[-1][0], max(rr[1],newRR[-1][1]) ]
224 return newRR
225
226# a helper function to decode range lists and merge the intervals (like above)
227# par: listOfRanges: [(b1,e1],(b2,e2],....,(bn,en]] -> [(b1,e2],....,(bn,en]] if b2<=e1
228def MergeLBRanges(listOfRanges):
229 listOfRanges.sort()
230 newRR=[]
231 for rr in listOfRanges:
232 if len(newRR)==0 or rr[0]>newRR[-1][1]:
233 newRR.append(copy(rr))
234 else:
235 newRR[-1] = (newRR[-1][0], max(rr[1],newRR[-1][1]) )
236 return newRR
237
239
240 def __init__(self, cols, rows):
241 self.cols = cols
242 self.rows = rows
243 # initialize matrix and fill with zeroes
244 self.matrix = []
245 for i in range(rows):
246 ea_row = []
247 for j in range(cols):
248 ea_row.append(0)
249 self.matrix.append(ea_row)
250
251 def setitem(self, col, row, v):
252 self.matrix[col-1][row-1] = v
253
254 def getitem(self, col, row):
255 return self.matrix[col-1][row-1]
256
257 def __repr__(self):
258 outStr = ""
259 for i in range(self.rows):
260 outStr += 'Row %s = %s\n' % (i+1, self.matrix[i])
261 return outStr
262
263 def __iter__(self):
264 for row in range(self.rows):
265 for col in range(self.cols):
266 yield (self.matrix, row, col)
267
269 if re.match(r"^\d+$",s):
270 return int(s)
271 # convert the string into seconds since epoch
272
273 # string is meant to be UTC time (hence we need calendar.timegm
274 # and not time.mktime to convert the tuple into seconds)
275 t = int(1E9*calendar.timegm(time.strptime(s,"%d.%m.%Y")))
276 #print ("turning",s,"into time",t)
277 #print ("cross-check:",time.strftime("%d.%m.%Y %H:%M:%S",time.gmtime(t/1E9)))
278 return t
279
281 """ convert string into seconds since epoch
282
283 string is meant to be UTC time (hence we need calendar.timegm
284 and not time.mktime to convert the tuple into seconds)
285
286 format can be '1.5.2010_14:23:10', '1.5.2010 14:23:10', or '1.5.2010'
287 """
288 try:
289 t = time.strptime(t,"%d.%m.%Y")
290 except ValueError:
291 try:
292 t = time.strptime(t,"%d.%m.%Y %H:%M:%S")
293 except ValueError:
294 t = time.strptime(t,"%d.%m.%Y_%H:%M:%S")
295
296 return int(calendar.timegm(t))
297
299 """ convert string into seconds since epoch
300
301 string is meant to be in local time (hence we use time.mktime
302 and not calender.timegm to convert the tuple into seconds)
303
304 format can be '1.5.2010_14:23:10', '1.5.2010 14:23:10', or '1.5.2010'
305 """
306 try:
307 t = time.strptime(t,"%d.%m.%Y")
308 except ValueError:
309 try:
310 t = time.strptime(t,"%d.%m.%Y %H:%M:%S")
311 except ValueError:
312 t = time.strptime(t,"%d.%m.%Y_%H:%M:%S")
313
314 return int(time.mktime(t))
315
317 # convert seconds since epoch into utc
318 return time.strftime("%d.%m.%Y %H:%M:%S",time.gmtime(s))
319
320
322 # convert seconds since epoch into localtime
323 return time.strftime("%d.%m.%Y %H:%M:%S",time.localtime(s))
324
325
326
327def GetRanges(rangestr, intRepFnc=stringToIntOrTime, maxval=1<<30):
328 if type(rangestr)==list:
329 ranges = rangestr
330 else:
331 if rangestr[:4]=='STR:':
332 return [ [rangestr[4:], rangestr[4:]] ]
333 ranges = rangestr.split(',')
334 listOfRanges = []
335 for r in ranges:
336 if r == '-':
337 listOfRanges += [[0,maxval]]
338 elif not ('-' in r or '+' in r):
339 # single value
340 x = intRepFnc(r)
341 listOfRanges += [[x,x]]
342 elif r[-1] == '+':
343 listOfRanges += [[intRepFnc(r[:-1]),maxval]]
344 elif r[-1] == '-':
345 listOfRanges += [[0,intRepFnc(r[:-1])]]
346 else:
347 startend = r.split('-')
348 if len(startend)!=2:
349 raise RuntimeError ("Range format error '%s'" % r)
350 listOfRanges += [[intRepFnc(x) for x in startend]]
351 return MergeRanges(listOfRanges)
352
353
354
355
356def full_time_string(s,startofday):
357 try:
358 time.strptime(s,"%d.%m.")
359 year=str(time.gmtime().tm_year)
360 return s+year+"_00:00:00" if startofday else s+year+"_23:59:59"
361 except ValueError:
362 try:
363 time.strptime(s,"%d.%m.%Y")
364 return s+"_00:00:00" if startofday else s+"_23:59:59"
365 except ValueError:
366 return s
367
368def GetTimeRanges(timeranges, intRepFnc=timeStringToSecondsUTC, maxval=1<<30):
369 if type(timeranges)==list:
370 timeranges = ','.join(timeranges)
371
372 timeranges = timeranges.split(',')
373 listOfRanges = []
374 listOfRangesHR = []
375
376 for r in timeranges:
377 start = 0
378 end = 'inf'
379 if r == '-':
380 listOfRangesHR += [['0','inf']]
381 elif not ('-' in r or '+' in r):
382 # single value
383 start = full_time_string( r, startofday=True )
384 end = full_time_string( r, startofday=False )
385 elif r[-1] == '+':
386 start = full_time_string( r[:-1], startofday=True )
387 elif r[-1] == '-':
388 end = full_time_string( r[:-1], startofday=False )
389 else:
390 try:
391 start,end = r.split('-')
392 except ValueError:
393 raise RuntimeError ("Time format '%s' wrong, should be 'from-until'" % r)
394 start = full_time_string( start, startofday=True )
395 end = full_time_string( end, startofday=False )
396
397 listOfRangesHR += [[start,end]]
398 start = 0 if start==0 else intRepFnc(start)
399 end = maxval if end=='inf' else intRepFnc(end)
400 listOfRanges += [[start,end]]
401
402 return MergeRanges(listOfRanges),listOfRangesHR
403
404
405def SmartRangeCalulator(runlist,singleRuns=True):
406 if len(runlist) == 0:
407 return []
408 if isinstance(runlist[0], list):
409 return runlist
410 if singleRuns:
411 rr = [[r.runNr,r.runNr] for r in runlist]
412 else:
413 rr = [[runlist[0].runNr,runlist[-1].runNr]]
414 return rr
415
416# code from Eric Torrence
417# Optional argument to nval to specify number of values to read
418def bConvertList(b, nbyte=1, nval=1):
419 # routine to store an unsigned int (1, 2, 4 or 8 byte) in a blob
420 packopt=dict([[1,'B'],[2,'H'],[4,'f'],[8,'d']])
421 if nbyte in packopt:
422 # print ('bConvert - b:[', b[0:nbyte], '] nbyte:', nbyte, ' fmt:', packopt[nbyte], type(b))
423 fmt = '%d%s' % (nval, packopt[nbyte])
424 ival=struct.unpack(fmt, b[0:nval*nbyte])
425 else:
426 print ('bConvertList: Unrecognized pack option')
427 sys.exit()
428 return list(ival)
429
430
431def unpackRun1BCIDMask(blob, nb1, nb2, nbc):
432 size = nb1+nb2+nbc
433 a = bConvertList(blob, 2, size)
434 beam1 = a[0:nb1]
435 beam2 = a[nb1:nb1+nb2]
436 coll = a[nb1+nb2:]
437 #print ('unpackRun1BCIDMask found:')
438 #print (' Beam1:', beam1)
439 #print (' Beam2:', beam2)
440 #print (' Coll: ', coll)
441 return beam1, beam2, coll
442
443
445 beam1=[]
446 beam2=[]
447 coll=[]
448 rawData = bConvertList(blob, 1, 3564)
449 for i in range(3564):
450 val = rawData[i]
451 if val & 0x01:
452 beam1.append(i)
453 if val & 0x02:
454 beam2.append(i)
455 if (val & 0x03) == 0x03:
456 coll.append(i)
457 #print ('unpackRun2BCIDMask found:')
458 #print (' Beam1:', beam1)
459 #print (' Beam2:', beam2)
460 #print (' Coll: ', coll)
461 return beam1, beam2, coll
462
463
464
465def Poisson( n, mu ):
466 # only valid for small mu and integer n
467 if n < 0:
468 return 0
469 else:
470 p = exp(-mu)
471 for i in range(n):
472 p *= mu
473 p /= float(i+1)
474 return p
475
476def ComputePileup( lumi_mb, sigma, nbunch, fOrbit ):
477 print("Calling compute pileup with")
478 print(" lumi_mb :", lumi_mb)
479 print(" sigma :", sigma)
480 print(" # bunches:", nbunch)
481 print(" # fOrbit :", fOrbit)
482 p = []
483 # sanity
484 if nbunch <= 0:
485 return 0, None
486
487 # compute first probability of interaction
488 nint = lumi_mb * sigma / fOrbit / nbunch
489
490 # large
491 if nint > 100:
492 return nint, None
493
494 # small 'nint', compute poisson probabilities
495 for n in range(40):
496 p.append(Poisson(n,nint))
497 if n > 20 and p[-1] < 1e40:
498 break
499 return nint, p
500
501def Pileup( args ):
502 # orbit revolution frequency
503 fOrbit = 11245.511 # [Hz] given by: 40.0790e6 Hz (bunch frequency) / 3564 (nbunches)
504
505 if len(args) < 3:
506 return '<font color="red">ERROR: need at least 3 arguments for pileup calculation: inst.-lumi cross-section-in-mb nbunch [nevents]</font>'
507 try:
508 fstarg = float(args[0])
509 if fstarg < 1e20:
510 # input expects first two arguments to be number of protons and transverse beam size
511 nprotons = fstarg
512 sigtrans = float(args[1]) # given in microns
513 sigtrans *= 1e-4 # transform to cm
514
515 sigma = float(args[2])
516 nbunch = float(args[3])
517 lumi = nprotons**2 * fOrbit * nbunch / (4.0 * pi * sigtrans**2)
518 nevents = 1
519 if len(args) == 5:
520 nevents = float(args[4])
521 else:
522 # input expects first argument to be luminosity in cm-2s-1
523 lumi = float(args[0])
524 sigma = float(args[1])
525 nbunch = int(args[2])
526 nevents = 1
527 if len(args) == 4:
528 nevents = int(args[3])
529
530 # compute pileup
531 lumi_mb = lumi/1.e27 # luminosity in mb-1s-1
532 nint, plist = ComputePileup( lumi_mb, sigma, nbunch, fOrbit )
533 print("Resultat:")
534 print(" nint ",nint)
535 print(" plist ",plist)
536 referenceInfo = '7TeV: 60.3&pm;2.1 mb'
537 referenceInfo += ' [<a href="https://arxiv.org/abs/1104.0326" target="_blank" title="Measurement of the Inelastic Proton-Proton Cross Section at &sqrt;s=7 TeV with the ATLAS Detector at the LHC">arXiv:1104.0326</a>], '
538 referenceInfo += '13TeV: 78.1&pm;2.9mb'
539 referenceInfo += ' [<a href="https://arxiv.org/abs/1606.02625" target="_blank" title="Measurement of the Inelastic Proton-Proton Cross Section at &sqrt;s=13 TeV with the ATLAS Detector at the LHC">arXiv:1606.02625</a>]'
540 # create output string
541 s = ' '
542 s += '<table class="datasettable" style="font-size: 140%">'
543 s += '<tr><td>Instantaneous luminosity :</td><td> %g cm&minus;2s&minus;1 (= %g mb&minus;1s&minus;1)</td></tr>' % (lumi, lumi_mb)
544 s += '<tr><td>Inelastic cross section :</td><td> %g mb &nbsp;(%s)</td></tr>' % (sigma, referenceInfo)
545 s += '<tr><td>Number of colliding bunches :</td><td> %g</td></tr>' % nbunch
546 s += '<tr><td colspan="2"><hr style="width:100%; #999999; background-color: #999999; height:1px; margin-left:0px; border:0"></td></tr>'
547 s += '<tr><td>Inelastic interaction rate:</td><td>%g Hz</td></tr>' % (lumi_mb*sigma)
548 s += '<tr><td>Average number of interactions per crossing:&nbsp;&nbsp;</td><td> %g</td></tr>' % nint
549 s += '</table>'
550 s += '<hr style="width:100%; #999999; background-color: #999999; height:0px; border:0">\n<p>\n'
551 if nint > 100:
552 s += 'Very large pileup probability (assume Gaussian distribution): %g +- %g' % (nint, sqrt(nint))
553 else:
554 s += '<table class="pileuptable">'
555 s += '<tr><th>Num. of interactions per filled bunch crossing</th><th>Probability per filled bunch crossing</th><th>Probability per triggered minimum bias event*</th>'
556 if nevents>1:
557 s += '<th>Expected number of events in sample*</th>'
558 s += '</tr>'
559
560 pref = 1.0-plist[0] # probability for a zero bias trigger to have triggered an event
561 psum = 0
562 for i,p in enumerate(plist):
563 if i>=1:
564 s += '<tr><td>&gt;= %i</td><td>%g</td><td>%g</td>' % (i,1.0-psum,(1.0-psum)/pref)
565 if nevents > 1:
566 nevexp = (1.0-psum)/pref*nevents
567 s += '<td> %g</td>' % (nevexp)
568 s += '</tr>'
569 if p < 1e-15:
570 break
571 psum += p
572 s += '</table><p></p>'
573 s += '<font size=-2>*assuming 100% trigger efficiency for inelastic events</font><br>'
574
575 return s
576
577 except ValueError:
578 return '<font color="red">ERROR: only numerical arguments allowed</font>'
579
580
581def get_run_range(start,end=None):
582 runs = get_runs(start,end)
583 if len(runs) == 0:
584 return None
585 if len(runs) == 1:
586 return (runs[0],runs[0])
587 return (runs[0],runs[-1])
588
590 lt = last.replace('last','').strip()
591 nsec = durationInSeconds( lt )
592 start = time.gmtime( time.mktime(time.gmtime()) - nsec )
593 return get_runs(time.strftime("%d.%m.%Y_%H:%M:%S",start))
594
595def get_runs(start,end=None):
596 """start and end are given in the format '1.5.2010_14:23:10', '1.5.2010 14:23:10', or '1.5.2010'"""
597
598 co = coolDbConn.GetAtlasRunDBConnection()
599 cu = co.cursor()
600
601 #available:
602 # NAME,RUNNUMBER,STARTAT,DURATION,CREATEDBY,HOST,PARTITIONNAME,CONFIGSCHEMA,CONFIGDATA,COMMENTS
603 records = 'RUNNUMBER'
604
605 t = time.gmtime( timeStringToSecondsUTC(start) )
606 starttime = time.strftime("%Y%m%dT%H%M%S",t)
607
608 if not end:
609 q = "SELECT %s FROM ATLAS_RUN_NUMBER.RUNNUMBER WHERE STARTAT>'%s' ORDER BY RUNNUMBER desc" % (records,starttime)
610 else:
611 t = time.gmtime( timeStringToSecondsUTC(end) )
612 endtime = time.strftime("%Y%m%dT%H%M%S",t)
613 q = "SELECT %s FROM ATLAS_RUN_NUMBER.RUNNUMBER WHERE STARTAT>'%s' and STARTAT<'%s' ORDER BY RUNNUMBER" % (records,starttime,endtime)
614
615 cu.arraysize=100
616 cu.execute(q)
617
618 res = cu.fetchall()
619 return [r[0] for r in res]
620
621
622def get_run_range2(start,end=None):
623 """
624 find all runs between two timestamps
625 start and end are given in the format '1.5.2010_14:23:10', '1.5.2010 14:23:10', or '1.5.2010'
626 """
627
628 # first we find the last run before the specified start time
629 # second we check if it spans over the specified start time
630 # - if it does then we use this run number
631 # - if it does not we find the first run after the specified start time and use this run number
632 # if an end is given we search for the last run that starts before the specified end time
633
634 co = coolDbConn.GetAtlasRunDBConnection()
635 cu = co.cursor()
636
637 #available:
638 # NAME,RUNNUMBER,STARTAT,DURATION,CREATEDBY,HOST,PARTITIONNAME,CONFIGSCHEMA,CONFIGDATA,COMMENTS
639
640 start_seconds_utc = timeStringToSecondsUTC(start)
641 start_gmtime = time.gmtime( start_seconds_utc )
642 start_fstring = time.strftime("%Y%m%dT%H%M%S", start_gmtime)
643
644 # last run that started before the begin of the range
645 subq = "SELECT MAX(RUNNUMBER) FROM ATLAS_RUN_NUMBER.RUNNUMBER WHERE STARTAT<'%s' AND PARTITIONNAME='ATLAS'" % start_fstring
646 q = "select RUNNUMBER,STARTAT,DURATION from ATLAS_RUN_NUMBER.RUNNUMBER where RUNNUMBER=(%s)" % subq
647 cu.execute(q)
648 try:
649 run1,startat,duration = cu.fetchone()
650
651 startAtGmtime = time.strptime( startat, "%Y%m%dT%H%M%S" ) # parse the format from the RunNumber DB
652 startAtUtcSecond = calendar.timegm( startAtGmtime )
653 # startAtHuman = time.strftime("%Y%m%dT%H%M%S", startAtGmtime)
654
655 endAtUtcSecond = startAtUtcSecond + duration
656 # endAtGmtime = time.gmtime( endAtUtcSecond )
657 # endAtHuman = time.strftime("%Y%m%dT%H%M%S", endAtGmtime)
658
659 # print("start: ", start)
660 # print("Last run before start: ",run1)
661 # print(" starts at: ",startat)
662 # print(" lasts for: ", duration)
663 # print("start sec", startAtUtcSecond)
664 # print("start gmt", startAtGmtime)
665 # print("start hr ", startAtHuman)
666 # print("end sec", endAtUtcSecond)
667 # print("end gmt", endAtGmtime)
668 # print("end hr ", endAtHuman)
669
670 # note that duration is not exact, but always a bit larger than the length of the run
671
672 if endAtUtcSecond < start_seconds_utc:
673 print("Run started and ended before the specified start time, so need to take the first one that started of the specified start time")
674 q = "SELECT MIN(RUNNUMBER) FROM ATLAS_RUN_NUMBER.RUNNUMBER WHERE STARTAT>'%s' AND PARTITIONNAME='ATLAS'" % start_fstring
675 cu.execute(q)
676 try:
677 run1 = cu.fetchone()[0]
678 except cx_Oracle.Error:
679 run1 = None
680 except cx_Oracle.Error:
681 run1 = None
682
683 run2 = None
684 if end is not None:
685 # last run that started before the end of the range
686 start_gmtime = time.gmtime( timeStringToSecondsUTC(end) )
687 endtime = time.strftime("%Y%m%dT%H%M%S",start_gmtime)
688 q = "SELECT MAX(RUNNUMBER) FROM ATLAS_RUN_NUMBER.RUNNUMBER WHERE STARTAT<'%s'" % endtime
689 cu.execute(q)
690 run2 = cu.fetchone()[0]
691
692 return (run1,run2)
693
694
695
698 def __init__(self,element):
699 self.element = element
700 self.tag = element.tag
701 self.attributes = dict(element.items())
702 self.children = list(element)
703 self.readchildren()
704
705 def items(self):
706 return self.attributes.items()
707 def __str__(self):
708 return "<%s %s>" % (self.tag, " ".join(['%s="%s"' % x for x in self.items()]))
709 def __repr__(self):
710 return self.tag
711 def __getitem__(self,k):
712 if k not in self.attributes:
713 raise KeyError ("'%s'. XML element '%s' has attributes %s" % (k,self.tag, self.attributes.keys()))
714 return self.attributes[k]
715
716 def readchildren(self):
717 self.childtags = []
719 for c in self.children:
720 self._childtagdict.setdefault(c.tag,[]).append(XMLReader.XMLElement(c))
721 if c.tag not in self.childtags:
722 self.childtags += [c.tag]
723 for t in self.childtags:
724 self.__dict__['%ss'%t] = self._childtagdict[t]
725 if len(self._childtagdict[t])==1:
726 self.__dict__['%s'%t] = self._childtagdict[t][0]
727
728 def __getattr__(self,name):
729 raise AttributeError ("'%s'. XML element '%s' has tags %s" % (name,self.tag, ["%ss" % t for t in self.childtags]))
730
731
732 def __init__(self,filename):
733 import xml.etree.ElementTree as ET
734 self.doc = ET.parse(filename)
735 root = XMLReader.XMLElement(self.doc.getroot())
736 self.__filename = filename
737 self.__root = root
738 self.__dict__[root.tag] = root
739
740 def __getattr__(self,name):
741 raise AttributeError ("'%s'. XML document '%s' has root tag '%s'" % (name,self.__filename, self.__root.tag))
void print(char *figname, TCanvas *c1)
#define max(a, b)
Definition cfImp.cxx:41
prettyNumber(n, width=-1, delim=',', decimal='.')
GetRanges(rangestr, intRepFnc=stringToIntOrTime, maxval=1<< 30)
GetTimeRanges(timeranges, intRepFnc=timeStringToSecondsUTC, maxval=1<< 30)
SmartRangeCalulator(runlist, singleRuns=True)
ComputePileup(lumi_mb, sigma, nbunch, fOrbit)
unpackRun1BCIDMask(blob, nb1, nb2, nbc)