396 _internalToClassicMap = {
'conditions_tag' :
'conditionsTag',
397 'beam_type' :
'beamType',
398 'geometry' :
'geometryVersion',
399 'nentries' :
'events',
403 _internalToGpickleMap = {
'file_guid' :
'GUID',
404 'checkSum' :
'checkSum',
405 'nentries' :
'events',
406 'file_size' :
'size',
409 _internalToGpickleMoreMap = {
'beam_type' :
'beamType',
410 'conditions_tag' :
'conditionsTag',
411 'geometry' :
'geometryVersion',
416 def __init__(self, fileArg):
417 self._fileArg = fileArg
424 def python(self, fast = False, type = 'full'):
427 fileArgProps = {
'dataset': self._fileArg.dataset,
428 'nentries': self._fileArg.getnentries(fast),
431 fileArgProps = {
'dataset' : self._fileArg.dataset,
432 'type' : self._fileArg.type,
434 'argName' : self._fileArg.name,
438 'Unknown file report type ({0}) in the file report for {1}'.format(type, self._fileArg))
443 uniqueBasenames =
set([ os.path.basename(fname)
for fname
in self._fileArg.value ])
444 uniqueDirectories =
set([ os.path.dirname(os.path.relpath(os.path.normpath(fname)))
for fname
in self._fileArg.value ])
445 if len(uniqueBasenames) != len(self._fileArg.value):
446 msg.info(
'Detected two files with the same basename in a file argument - report for file {0} will be produced with the path as a key'.format(self._fileArg))
447 basenameReport =
False
448 elif len(uniqueDirectories) > 1:
449 msg.warning(
'Detected output files in different directories - report for file {0} will be produced with the path as a key'.format(self._fileArg))
450 basenameReport =
False
452 basenameReport =
True
454 for fname
in self._fileArg.value:
457 subFile = self.singleFilePython(fname, fast = fast, type = type)
459 subFile = self.singleFilePython(fname, fast = fast, type = type, basename =
False)
460 if subFile
is not None:
463 msg.info(
'Suppressing file {0}, nentries is 0'.format(subFile[
'name']))
464 suppressed.append(subFile[
'name'])
466 fileArgProps[
'subFiles'].append(subFile)
475 def singleFilePython(self, filename, fast = False, type = 'full', basename = True):
476 if filename
not in self._fileArg.value:
478 'Unknown file ({0}) in the file report for {1}'.format(filename, self._fileArg))
480 entry = {
'name': os.path.basename(filename)}
482 entry = {
'name': os.path.relpath(os.path.normpath(filename))}
485 entry.update(self._fileArg.
getMetadata(files = filename, populate =
not fast, metadataKeys = [
'file_guid'])[filename])
488 entry.update(self._fileArg.
getMetadata(files = filename, populate =
not fast, maskMetadataKeys = [
'io',
'_exists',
'integrity',
'file_type'])[filename])
491 'Unknown file report type ({0}) in the file report for {1}'.format(type, self._fileArg))
501 def classicEltreeList(self, fast = False):
503 for fname
in self._fileArg.value:
504 treeList.append(self.classicSingleEltree(fname, fast = fast))
512 def classicSingleEltree(self, filename, fast = False):
513 if filename
not in self._fileArg.value:
515 'Unknown file ({0}) in the file report for {1}'.format(filename, self._fileArg))
516 tree = ElementTree.Element(
'File', ID = str(self._fileArg.getSingleMetadata(fname = filename, metadataKey =
'file_guid', populate =
not fast)))
520 if myKey ==
'beam_type':
521 beamType = self._fileArg.getSingleMetadata(fname = filename, metadataKey = myKey, populate =
not fast)
522 if isinstance(beamType, list):
523 if len(beamType) == 0:
524 ElementTree.SubElement(tree,
'metadata', att_name = classicKey, att_value =
'')
526 ElementTree.SubElement(tree,
'metadata', att_name = classicKey, att_value = str(beamType[0]))
529 ElementTree.SubElement(tree,
'metadata', att_name = classicKey, att_value = str(beamType))
531 ElementTree.SubElement(tree,
'metadata', att_name = classicKey,
532 att_value = str(self._fileArg.getSingleMetadata(fname = filename, metadataKey = myKey, populate =
not fast)))
534 ElementTree.SubElement(tree,
'metadata', att_name =
'fileType', att_value = str(self._fileArg.type))
535 if self._fileArg.dataset
is not None:
536 ElementTree.SubElement(tree,
'metadata', att_name =
'dataset', att_value = self._fileArg.dataset)
543 def classicPython(self, fast = False):
545 for fname
in self._fileArg.value:
546 fileList.append(self.classicSinglePython(fname, fast = fast))
552 def classicSinglePython(self, filename, fast = False):
553 if filename
not in self._fileArg.value:
555 'Unknown file ({0}) in the file report for {1}'.format(filename, self._fileArg))
557 fileDict = {
'lfn' : filename,
558 'dataset' : self._fileArg.dataset,
561 for myKey, classicKey
in self._internalToGpickleMap.items():
562 fileDict[classicKey] = self._fileArg.getSingleMetadata(fname = filename, metadataKey = myKey, populate =
not fast)
563 if classicKey ==
'checkSum' and fileDict[classicKey] ==
'UNDEFINED':
565 fileDict[classicKey] =
None
566 elif fileDict[classicKey] ==
'UNDEFINED':
568 del fileDict[classicKey]
570 fileDict[
'more'] = {
'metadata' : {
'fileType' : self._fileArg.type}}
571 for myKey, classicKey
in self._internalToGpickleMoreMap.items():
572 value = self._fileArg.getSingleMetadata(fname = filename, metadataKey = myKey, populate =
not fast)
573 if value !=
'UNDEFINED':
574 fileDict[
'more'][
'metadata'][classicKey] = value
637 exeResource = {
'cpuTime': report.roundoff(exe.cpuTime),
638 'wallTime': report.roundoff(exe.wallTime),
640 'cpuTime': report.roundoff(exe.preExeCpuTime),
641 'wallTime': report.roundoff(exe.preExeWallTime),
644 'cpuTime': report.roundoff(exe.postExeCpuTime),
645 'wallTime': report.roundoff(exe.postExeWallTime),
648 'cpuTime': report.roundoff(exe.validationCpuTime),
649 'wallTime': report.roundoff(exe.validationWallTime),
652 'cpuTime': report.roundoff(exe.cpuTimeTotal),
653 'wallTime': report.roundoff(exe.wallTimeTotal),
658 exeResource[
'memory'] = exe.memStats
660 exeResource[
'memoryAnalysis'] = exe.memAnalysis
662 exeResource[
'nevents'] = exe.eventCount
663 if exe.name==
'ReSim':
664 exeResource[
'resimevents'] = exe.reSimEvent
666 exeResource[
'mpworkers'] = exe.athenaMP
667 exeResource[
'cpuTimePerWorker'] = report.roundoff(exe.cpuTime/exe.athenaMP)
669 exeResource[
'dbData'] = exe.dbMonitor[
'bytes']
670 exeResource[
'dbTime'] = report.roundoff(exe.dbMonitor[
'time'])
671 report._dbDataTotal += exeResource[
'dbData']
672 report._dbTimeTotal += exeResource[
'dbTime']