ATLAS Offline Software
getMetadata.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
4 from __future__ import print_function
5 
6 __author__ = "Will Buttinger"
7 __doc__ = """Extract dataset parameters from AMI, and write them to a text file.\nExamples:\n\n\ngetMetadata.py --inDS="mc15_13TeV.361103%DAOD_TRUTH%" --fields=dataset_number,ldn,nfiles,events,crossSection,genFiltEff,generator_name"""
8 
9 import logging
10 import sys
11 
12 from future import standard_library
13 standard_library.install_aliases()
14 import subprocess
15 
16 # Python 2.x/3.x compatibility
17 if sys.version_info[0] >= 3:
18  unicode = str # strings are unicode in Python3
19 
20 #pinched from pandatools!
21 def readDsFromFile(txtName):
22  import re
23  dsList = []
24  try:
25  # read lines
26  txt = open(txtName)
27  for tmpLine in txt:
28  # remove \n
29  tmpLine = re.sub('\n','',tmpLine)
30  # remove white spaces
31  tmpLine = tmpLine.strip()
32  # skip comment or empty
33  if tmpLine.startswith('#') or tmpLine == '':
34  continue
35  # append
36  dsList += [tmpLine]
37  # close file
38  txt.close()
39  except Exception:
40  errType,errValue = sys.exc_info()[:2]
41  logging.error("cannot read datasets from %s due to %s:%s",txtName,errType,errValue)
42  sys.exit(-1)
43  return dsList
44 
45 
46 
47 def isfloat(x):
48  try:
49  float(x)
50  except ValueError:
51  return False
52  else:
53  return True
54 
55 def isint(x):
56  try:
57  a = float(x)
58  b = int(a)
59  except ValueError:
60  return False
61  else:
62  return a == b
63 
64 
65 
66 
67 def main():
68  logging.basicConfig(format='%(levelname)s:%(message)s')
69 
70  import time,datetime
71  from pytz import timezone
72  import argparse
73 
74  try:
75  import pyAMI.client
76  import pyAMI.atlas.api as AtlasAPI
77  import pyAMI.config
78  except ImportError:
79  logging.error("Unable to find pyAMI client. Please try this command first: lsetup pyAMI")
80  return -1
81 
82 
83  extraFieldDefaults = {} #{"approx_crossSection":None,"approx_GenFiltEff":1.0}
84 
85  fieldDefaults = {"subprocessID":0,"dataset_number":0}
86  #populate the fieldDefaults ... for all, assume 'None'
87  for field in pyAMI.config.tables['datasets'].keys():
88  if str(field) == "cross_section": continue #special exception because this field only present in
89  if str(field) in fieldDefaults.keys(): continue
90  if str(field).startswith("@"): continue
91  fieldDefaults[str(field)] = None
92 
93 
94  #check the voms proxy
95  status,out = subprocess.getstatusoutput("voms-proxy-info -fqan -exists")
96  if status!=0:
97  logging.error("Please renew your certificate with this command: voms-proxy-init -voms atlas")
98  return -1
99 
100  try:
101  client = pyAMI.client.Client('atlas')
102  AtlasAPI.init()
103  except Exception:
104  logging.error("Could not establish pyAMI session. Are you sure you have a valid certificate? Do: voms-proxy-init -voms atlas")
105  return -1
106 
107  #need to collect the ami dataset parameter defaults
108  paramExplains = [] #for the help message only
109 
110  paramUnits = dict()
111 
112  paramDefaults = {}
113 
114  res = client.execute('ListPhysicsParameterDefs',format='dom_object')
115  for r in res.get_rows() : #r is OrderedDict
116  explainString = "%s: %s" % (r[u'PARAMNAME'],r[u'DESCRIPTION'])
117  if r[u'UNITS']!=u'NULL':
118  explainString += " (units: %s)" % r[u'UNITS']
119  paramUnits[r[u'PARAMNAME']] = r[u'UNITS']
120  if r[u'HASDEFAULT']==u'N' : paramDefaults[str(r[u'PARAMNAME'])] = None
121  else:
122  explainString += " (default value = %s)" % r[u'DEFAULTVALUE']
123  if r[u'PARAMTYPE']==u'number': paramDefaults[str(r[u'PARAMNAME'])] = float(r[u'DEFAULTVALUE']) #FIXME: Assumes all parameters are floats
124  elif r[u'PARAMTYPE']==u'string': paramDefaults[str(r[u'PARAMNAME'])] = str(r[u'DEFAULTVALUE'])
125  paramExplains += [explainString]
126 
127  paramDefaults["crossSection_pb"] = None
128  paramUnits["crossSection_pb"] = "pb"
129  paramExplains += ["crossSection_pb: Same as crossSection except in pb units (units: pb)"]
130 
131  cern_time = timezone('UCT')
132  current_time = datetime.datetime.fromtimestamp(time.time(),cern_time).strftime('%Y-%m-%d %H:%M:%S')
133 
134  from argparse import RawTextHelpFormatter
135  parser = argparse.ArgumentParser(description=__doc__,formatter_class=RawTextHelpFormatter)
136  parser.add_argument('--inDS',nargs='+',default=[""],help="List of datasets to retrieve parameters for")
137  parser.add_argument('--inDsTxt',default="",help="Alternative to --inDS, can specify the datasets from an input file")
138  parser.add_argument('--fields',nargs='+',help="List of parameters to extract. Available parameters are: \n\n %s\n\nYou can also include any from:\n %s\nYou can also do keyword_xxx to add a bool branch for keywords" % ("\n ".join(paramExplains),", ".join(list(fieldDefaults.keys())+list(extraFieldDefaults.keys()))),default=["dataset_number","crossSection","kFactor","genFiltEff"])
139  parser.add_argument('--timestamp',default=current_time,help="The timestamp to query parameters at, specified in Universal Central Time (UCT). If left blank, will take the current time")
140  parser.add_argument('--physicsGroups',nargs='+',default=["PMG,MCGN"],help="Physics group from which to retrieve parameters, listed in order of priority (highest first). Default value is 'PMG,MCGN' (i.e. try to use PMG values, fallback on MCGN values if unavailable). Allowed groups are:\n PMG (this is the PMG's group name), BPHY, COSM, DAPR, EGAM, EXOT, FTAG, HIGG, HION, IDET, IDTR, JETM, LARG, MCGN (this is the AMI default group name), MDET, MUON, PHYS, REPR, SIMU, STDM, SUSY, TAUP, TCAL, TDAQ, THLT, TOPQ, TRIG, UPGR, VALI")
141 
142  parser.add_argument('--oldTimestamp',default="",help="If specified, will instead display a diff between the old and new timestamp, showing explanation of any changed parameters")
143 
144  parser.add_argument('--explainFields',nargs='+',default=[],help="The fields you would like explained .. will appear as comment lines after each row in the output")
145  parser.add_argument('--explainInfo',nargs='+',default=[],help="Properties of the parameter you want to show in the explanation. Can list from: explanation, insert_time, physicsGroup, createdby")
146  parser.add_argument('--outFile',default=sys.stdout,type=argparse.FileType('w'),help="Where to print the output to. Leave blank to print to stdout")
147  parser.add_argument('--delim',default="",help="The delimiter character. Defaults to spaces leading to nice formatting table")
148  parser.add_argument('-v',action='store_true',help="Verbose output for debugging")
149 
150  args = parser.parse_args()
151 
152  if args.v: logging.getLogger().setLevel(logging.DEBUG)
153  else: logging.getLogger().setLevel(logging.INFO)
154  logging.debug(args.inDS)
155  logging.debug(args.fields)
156  logging.debug(args.timestamp)
157 
158  if args.timestamp=="the dawn of time":
159  logging.error("Unfortunately we don't know any parameters from this time period... but we're working on it!")
160  return 9999
161 
162  #split elements of fields by comma to get full list
163  args.fields = sum((y.split(',') for y in args.fields),[])
164  args.fields = [x.strip() for x in args.fields] #strips whitespace
165  #look for keyword_ fields, these are special ...
166  args.keywords=[]
167  for f in args.fields:
168  if f.startswith("keyword_"):
169  k = f[8:]
170  #and then add each keyword to the extraFieldDefaults so it is recognised thusly
171  extraFieldDefaults["keyword_%s"%k]=bool(False)
172  args.keywords += [k]
173 
174  #same for physics groups
175  args.physicsGroups = sum((y.split(',') for y in args.physicsGroups),[])
176  args.physicsGroups = [x.strip() for x in args.physicsGroups] #strips whitespace
177 
178 
179  #same for explainFields and explainInfo
180  args.explainFields = sum((y.split(',') for y in args.explainFields),[])
181  args.explainFields = [x.strip() for x in args.explainFields] #strips whitespace
182  args.explainInfo = sum((y.split(',') for y in args.explainInfo),[])
183  args.explainInfo = [x.strip() for x in args.explainInfo] #strips whitespace
184 
185  if args.inDsTxt != '': args.inDS = readDsFromFile(args.inDsTxt)
186 
187  #and same for inDS
188  args.inDS = sum((y.split(',') for y in args.inDS),[])
189  args.inDS = [x.strip() for x in args.inDS] #strips whitespace
190 
191 
192 
193 
194 
195  #1. check field values are allowed, we obtain default field values at same time..
196  #2. For each entry in inDS, if contains wildcard we obtain list of DS, otherwise check DS exists. During this time we obtain the datasetid and numEvents properties, incase we need them
197  #3. For each of these DS, get parameters from ami matching the timestamp. Organize into fields and index by subprocessID
198  #4. Output a line to our output file
199 
200 
201 
202 
203  #1.
204  #before adding all the ami parameters, identify which of provided fields are: 1). Obtained from list_datasets command (dsFields) 2). actual parameters
205  dsFields = [ x for x in args.fields if x in fieldDefaults.keys() and x not in ["subprocessID","ldn"] ]
206  extraFields = [ x for x in args.fields if x in extraFieldDefaults.keys() ]
207  paramFields = [ x for x in args.fields if x in paramDefaults.keys() ]
208 
209  if len(paramFields)>0 and args.physicsGroups==[""]:
210  logging.error("You must specify at least one physics group. See -h for allowed groups")
211  return -1
212 
213  #combine paramDefaults with fieldDefaults
214  fieldDefaults.update(paramDefaults)
215  #and with extra fields
216  fieldDefaults.update(extraFieldDefaults)
217 
218  for field in args.fields:
219  if field not in fieldDefaults:
220  logging.error("%s is not a recognised field. Allowed fields are:", field)
221  logging.error(fieldDefaults.keys())
222  return -1
223 
224 
225  if args.oldTimestamp!="":
226  logging.info("oldTimestamp option specified. Running in diff mode...")
227  args.explainFields = args.fields
228  args.explainInfo = ["explanation","insert_time","physicsGroup","createdby"]
229 
230  #2.
231  #replace all '*' with '%' and strip "/"
232  args.inDS = [ds.replace("*","%") for ds in args.inDS]
233  args.inDS = [ds.rstrip("/") for ds in args.inDS]
234 
235  if len(args.inDS)==0 or (len(args.inDS)==1 and args.inDS[0]==""):
236  logging.error("No datasets provided. Please specify datasets with the --inDS or --inDsTxt options")
237  return -1
238 
239  logging.info("Fetching list of datasets from AMI (this may take a few minutes)...")
240 
241 
242  #obtain list of datasets
243  res = AtlasAPI.list_datasets(client,patterns=args.inDS,fields=dsFields+['ldn'],ami_status="VALID") #changed status from %, to only catch valid now: wb 08/2015
244 
245  logging.info("...Found %d datasets matching your selection", len(res))
246 
247  if len(res)==0:
248  return 0
249 
250  #NOTE: Should we allow retrieval of the extra information: keyword, genfiltereff, approx crossection, .. these all come from GetDatasetInfo ami command
251 
252  dataset_values = dict()
253  for r in res:
254  mydict = dict()
255  dataset_values[str(r['ldn'])] = mydict
256  for field in r.items():
257  if str(field[0]) == "ldn": continue
258  if str(field[0]) not in args.fields: continue
259  mydict[str(field[0])] = str(field[1])
260  #also if we have the 'extra fields or keywords' we will need to execute AtlasAPI.get_dataset_info ..
261  if len(extraFields)>0 or len(args.keywords)>0:
262  info_res = AtlasAPI.get_dataset_info(client,str(r['ldn']))
263  #print(info_res)
264  if len(info_res)==0:
265  logging.error("Unable to retrieve dataset info for %s", r['ldn'])
266  return -1
267  for field in extraFields:
268  #ignore the keyword_ fields
269  if field.startswith("keyword_"): continue
270  mydict[field] = float(info_res[0][unicode(field)]) if isfloat(info_res[0][unicode(field)]) else extraFieldDefaults[field]
271  for k in args.keywords:
272  mydict["keyword_%s" % k] = int( (k in str(info_res[0][unicode('keyword')]).split(",")) )
273 
274  #sort dataset_values as well as possible
275  from collections import OrderedDict
276  sorted_values = OrderedDict()
277  for ds in args.inDS:
278  if ds in dataset_values.keys():
279  sorted_values[ds] = dataset_values[ds]
280 
281  for ds in dataset_values.keys():
282  if ds not in sorted_values.keys():
283  sorted_values[ds] = dataset_values[ds]
284  dataset_values = sorted_values
285 
286  logging.debug(dataset_values)
287 
288  #res = client.execute(['GetDatasetInfo
289 
290  for ds in args.inDS:
291  if '%' not in ds and ds not in dataset_values.keys():
292  logging.warning("Unknown dataset: %s", ds)
293 
294  datasetsToQuery = ",".join(dataset_values.keys())
295 
296  #if using inDsTxt, retain any comment or blank lines in structure of output
297  complete_values = OrderedDict()
298  if args.inDsTxt != "":
299  # read lines
300  commentcount=0
301  import re
302  txt = open(args.inDsTxt)
303  for tmpLine in txt:
304  # remove \n
305  tmpLine = re.sub('\n','',tmpLine)
306  # remove white spaces
307  tmpLine = tmpLine.strip()
308  # skip comment or empty
309  if tmpLine.startswith('#') or tmpLine == '':
310  complete_values['comment%d'%(commentcount)] = tmpLine
311  commentcount = commentcount+1
312  continue
313  # append
314  tmpLine = tmpLine.rstrip("/")
315  if tmpLine in dataset_values.keys():
316  complete_values[tmpLine] = dataset_values[tmpLine]
317  else:
318  print("cannot find %s" % tmpLine)
319  # close file
320  txt.close()
321  dataset_values = complete_values
322 
323  logging.info("Obtaining %s for selected datasets at timestamp=%s... (please be patient)",
324  args.fields, args.timestamp)
325 
326  #do as one query, to be efficient
327  if(args.timestamp==current_time):
328  res = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% datasetsToQuery,"--timestamp='%s'"%args.timestamp], format='dom_object')
329  else:
330  res = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% datasetsToQuery,"--timestamp='%s'"%args.timestamp,"--history=true"], format='dom_object')
331 
332  #organize results by dataset
333  parameterQueryResults = dict()
334  for r in res.get_rows():
335  if r[u'logicalDatasetName'] not in parameterQueryResults.keys():
336  parameterQueryResults[r[u'logicalDatasetName']] = []
337  parameterQueryResults[r[u'logicalDatasetName']] += [r] #puts row in the list for this dataset
338 
339 
340  if args.oldTimestamp!="" :
341  logging.info("Obtaining %s for selected datasets at timestamp=%s... (please be patient)",
342  args.fields,args.oldTimestamp)
343  res2 = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% datasetsToQuery,"--timestamp='%s'"%args.oldTimestamp,"--history=true"], format='dom_object')
344  old_parameterQueryResults = dict()
345  for r in res2.get_rows():
346  if r[u'logicalDatasetName'] not in old_parameterQueryResults.keys():
347  old_parameterQueryResults[r[u'logicalDatasetName']] = []
348  old_parameterQueryResults[r[u'logicalDatasetName']] += [r] #puts row in the list for this dataset
349 
350  headerString = ""
351  doneHeader=False
352  commentCache = ""
353  commentCount = 0
354 
355  #result is a list of lists (each list is 1 row)
356  outputTable = []
357  tableHeaders = []
358 
359  for ds in dataset_values.keys():
360  if ds.startswith('comment'):
361  if commentCount > 0 : commentCache += "\n"
362  commentCache += dataset_values[ds]
363  commentCount=commentCount+1
364  continue
365  #obtain list of parameters for this dataset
366  #if(args.timestamp==current_time):
367  # res = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% ds,"--timestamp='%s'"%args.timestamp], format='dom_object')
368  #else:
369  # res = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% ds,"--timestamp='%s'"%args.timestamp,"--history=true"], format='dom_object')
370  res = parameterQueryResults.get(ds,[])
371  if args.oldTimestamp!="": res2 = old_parameterQueryResults.get(ds,[])
372 
373  #first we have to determine how many subprocesses this ds has
374  dsSubprocesses = [0] #always have the 0 subprocess
375  for r in res:
376  sp = int(r[u'subprocessID'])
377  if sp not in dsSubprocesses: dsSubprocesses += [sp]
378 
379  #now for each subprocess we have to locate each required field value (in paramFields)
380  #rank by physicsGroup
381  for sp in dsSubprocesses:
382  paramVals = dict()
383  paramVals2 = dict()
384  groupsWithVals = dict() #held for helpful output
385  #need to keep explanations for requested fields
386  explainInfo = dict()
387  for i in args.explainFields: explainInfo[i] = dict()
388 
389  for param in paramFields:
390  groupsWithVals[param] = []
391  bestGroupIndex = len(args.physicsGroups)
392  import copy
393  paramVals[param] = copy.copy(fieldDefaults[param])
394  for r in res:
395  if int(r[u'subprocessID']) != sp: continue
396  if str(r[u'paramName']) != param and not (param=="crossSection_pb" and str(r[u'paramName'])=="crossSection"): continue
397  if str(r[u'physicsGroup']) not in args.physicsGroups:
398  groupsWithVals[param] += [(str(r[u'physicsGroup']),str(r[u'paramValue']))]
399  continue
400  if args.physicsGroups.index(str(r[u'physicsGroup'])) > bestGroupIndex : continue
401  if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!", param, r[u'physicsGroup'], ds, sp)
402  paramVals[param] = str(r[u'paramValue'])
403  if param=="crossSection_pb": paramVals[param] = str(float(paramVals[param])*1000.0)
404  bestGroupIndex=args.physicsGroups.index(str(r[u'physicsGroup']))
405  #keep the explanation info
406  for e in args.explainInfo:
407  if unicode(e) not in r:
408  logging.error("Unrecognised explainInfo field: %s", e)
409  return -1
410  explainInfo[param][e]=str(r[unicode(e)])
411  if args.oldTimestamp!="":
412  bestGroupIndex = len(args.physicsGroups)
413  paramVals2[param] = copy.copy(fieldDefaults[param])
414  for r in res2:
415  if int(r[u'subprocessID']) != sp: continue
416  if str(r[u'paramName']) != param and not (param=="crossSection_pb" and str(r[u'paramName'])=="crossSection"): continue
417  if str(r[u'physicsGroup']) not in args.physicsGroups: continue
418  if args.physicsGroups.index(str(r[u'physicsGroup'])) > bestGroupIndex : continue
419  if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!", param, r[u'physicsGroup'], ds, sp)
420  paramVals2[param] = str(r[u'paramValue'])
421  if param=="crossSection_pb": paramVals2[param] = str(float(paramVals2[param])*1000.0)
422  bestGroupIndex=args.physicsGroups.index(str(r[u'physicsGroup']))
423  #at this stage, parameters reside in paramVals dict or dataset_values[ds] dict
424  #print them in the requested order .. if any is "None" then stop, because it doesn't have a default value and didn't find a value for it either
425  rowString = ""
426  rowList = []
427  firstPrint=False
428  for param in args.fields:
429  val = None
430  if param == "ldn": val = ds
431  elif param == "subprocessID": val = sp
432  elif param in dataset_values[ds].keys(): val = dataset_values[ds][param]
433  else: val = paramVals.get(param,None)
434  if val is None:
435  if args.outFile != sys.stdout: logging.warning("dataset %s (subprocess %d) does not have parameter %s, which has no default.",ds,sp,param)
436  if len(groupsWithVals.get(param,[]))>0:
437  logging.warning("The follow physicsGroups have defined that parameter though:")
438  logging.warning(groupsWithVals[param])
439  val = "#UNKNOWN#"
440  #return -1
441  #if isfloat(str(val)): val = "%.6g" % float(val)
442  if args.oldTimestamp!="":
443  #diff val to old val
444  val2 = None
445  if param == "ldn": val2 = ds
446  elif param == "subprocessID": val2 = sp
447  elif param in dataset_values[ds].keys(): val2 = dataset_values[ds][param]
448  else: val2 = paramVals2.get(param,None)
449  if val2 is None: val2 = "#UNKNOWN#"
450  #if isfloat(str(val2)): val2 = "%.6g" % float(val)
451  if(str(val)!=str(val2)):
452  if not firstPrint: print("%s:" % ds)
453  firstPrint=True
454  print(" %s : %s ---> %s" % (param,str(val2),str(val)))
455  print(" insert_time : %s" % explainInfo[param]['insert_time'])
456  print(" explanation : %s" % explainInfo[param]['explanation'])
457  print(" createdby : %s" % explainInfo[param]['createdby'])
458  print(" physicsGroup : %s" % explainInfo[param]['physicsGroup'])
459  continue
460 
461  rowList += [str(val)]
462  if rowString != "" and args.delim!="": rowString += args.delim
463  rowString += str(val)
464  #inspect the type of str(val) to build up the header
465  if not doneHeader:
466  headerString += param
467  if args.outFile != sys.stdout:
468  if type(fieldDefaults[param])==bool: headerString += "/O:"
469  elif type(fieldDefaults[param])==int: headerString += "/I:"
470  elif type(fieldDefaults[param])==float: headerString += "/D:"
471  elif isfloat(str(val)): headerString += "/D:"
472  #elif isint(str(val)): headerString += "/I:" TO BE SAFE WE MAKE ALL NUMERIC FIELDS FLOATS, EXCEPT if the default value is type int
473  else: headerString += "/C:"
474  else:
475  v = param
476  if param in paramUnits:
477  headerString += " [%s]" % paramUnits[param]
478  v += " [%s]" % paramUnits[param]
479  tableHeaders += [v]
480  headerString += " "
481  if args.oldTimestamp!="": continue #print nothing more for diff mode
482  if not doneHeader:
483  doneHeader=True
484  if args.outFile!=sys.stdout: print(headerString[:-1],file=args.outFile)
485  if commentCount > 0:
486  if args.outFile!=sys.stdout and args.delim!="": print(commentCache,file=args.outFile)
487  outputTable += [["COMMENT",commentCache]]
488  commentCache = ''
489  commentCount = 0
490  if args.outFile != sys.stdout and args.delim!="": print(rowString,file=args.outFile)
491  outputTable += [rowList]
492  #also print the required explanations
493  for (field,expl) in explainInfo.items():
494  outString = "#%s: { " % field
495  doneFirst=False
496  for eField in args.explainInfo:
497  if doneFirst: outString += " , "
498  if eField not in expl.keys(): outString += " %s: <NONE .. value is default>"%eField
499  else: outString += "%s: %s" % (eField,expl[eField])
500  doneFirst=True
501  outString += " }"
502  print(outString,file=args.outFile)
503 
504  if args.oldTimestamp!="":
505  args.outFile.close()
506  return 0
507 
508  #print the table in nicely formatted state
509  if args.outFile == sys.stdout or args.delim=="":
510  #determine column widths
511  columnWidths = [0]*len(args.fields)
512  for i in range(0,len(tableHeaders)):
513  columnWidths[i] = len(tableHeaders[i])
514  for r in outputTable:
515  if len(r)>0 and r[0]=="COMMENT": continue
516  for i in range(0,len(r)):
517  if len(r[i])>columnWidths[i]: columnWidths[i]=len(r[i])
518  lineout = ""
519  for i in range(0,len(tableHeaders)):
520  lineout += tableHeaders[i].ljust(columnWidths[i]) + " "
521  print(lineout)
522  for r in outputTable:
523  lineout = ""
524  if len(r)>0 and r[0]=="COMMENT": lineout = r[1]
525  else:
526  for i in range(0,len(r)):
527  lineout += r[i].ljust(columnWidths[i]) + " "
528  print(lineout,file=args.outFile)
529 
530  #print the footer, which is the command to reproduce this output
531  import os
532  if args.outFile != sys.stdout:
533  #remove comment from dataset_values
534  datasetss = [x for x in dataset_values.keys() if not x.startswith("comment")]
535 
536  print("",file=args.outFile)
537  print("#lsetup \"asetup %s,%s\" pyAMI" % (os.environ.get('AtlasProject','UNKNOWN!'),os.environ.get('AtlasVersion','UNKNOWN!')),file=args.outFile)
538  print("#getMetadata.py --timestamp=\"%s\" --physicsGroups=\"%s\" --fields=\"%s\" --inDS=\"%s\"" % (args.timestamp,",".join(args.physicsGroups),",".join(args.fields),",".join(datasetss)),file=args.outFile )
539  logging.info("Results written to: %s", args.outFile.name)
540 
541  args.outFile.close()
542 
543 
544 if __name__ == "__main__":
545  sys.exit(main())
getMetadata.isfloat
def isfloat(x)
Definition: getMetadata.py:47
CaloCellPos2Ntuple.int
int
Definition: CaloCellPos2Ntuple.py:24
getMetadata.readDsFromFile
def readDsFromFile(txtName)
Definition: getMetadata.py:21
getMetadata.unicode
unicode
Definition: getMetadata.py:18
pool::DbPrintLvl::setLevel
void setLevel(MsgLevel l)
Definition: DbPrint.h:32
convertTimingResiduals.sum
sum
Definition: convertTimingResiduals.py:55
plotBeamSpotVxVal.range
range
Definition: plotBeamSpotVxVal.py:195
histSizes.list
def list(name, path='/')
Definition: histSizes.py:38
print
void print(char *figname, TCanvas *c1)
Definition: TRTCalib_StrawStatusPlots.cxx:25
TCS::join
std::string join(const std::vector< std::string > &v, const char c=',')
Definition: Trigger/TrigT1/L1Topo/L1TopoCommon/Root/StringUtils.cxx:10
Trk::open
@ open
Definition: BinningType.h:40
getMetadata.isint
def isint(x)
Definition: getMetadata.py:55
python.CaloScaleNoiseConfig.type
type
Definition: CaloScaleNoiseConfig.py:78
if
if(febId1==febId2)
Definition: LArRodBlockPhysicsV0.cxx:567
getMetadata.main
def main()
Definition: getMetadata.py:67
str
Definition: BTagTrackIpAccessor.cxx:11
python.Bindings.keys
keys
Definition: Control/AthenaPython/python/Bindings.py:798
xAOD::bool
setBGCode setTAP setLVL2ErrorBits bool
Definition: TrigDecision_v1.cxx:60
readCCLHist.float
float
Definition: readCCLHist.py:83
Trk::split
@ split
Definition: LayerMaterialProperties.h:38