def _getProductionSummary(): clip = _Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations(conddict) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod) metadata = [] gLogger.info("Will run on prods %s" % str(prodids)) for prodID in prodids: if prodID < clip.minprod: continue meta = {} meta['ProdID'] = prodID res = trc.getTransformation(str(prodID)) if not res['OK']: gLogger.error("Error getting transformation %s" % prodID) continue prodtype = res['Value']['Type'] proddetail = res['Value']['Description'] if prodtype == 'MCReconstruction' or prodtype == 'MCReconstruction_Overlay': meta['Datatype'] = 'DST' elif prodtype == 'MCGeneration': meta['Datatype'] = 'gen' elif prodtype == 'MCSimulation': meta['Datatype'] = 'SIM' elif prodtype in ['Split', 'Merge']: gLogger.warn("Invalid query for %s productions" % prodtype) continue else: gLogger.error("Unknown production type %s" % prodtype) continue res = fc.findFilesByMetadata(meta) if not res['OK']: gLogger.error(res['Message']) continue lfns = res['Value'] nb_files = len(lfns) path = "" if not len(lfns): gLogger.warn("No files found for prod %s" % prodID) continue path = os.path.dirname(lfns[0]) res = fc.getDirectoryUserMetadata(path) if not res['OK']: gLogger.warn('No meta data found for %s' % path) continue dirmeta = {} dirmeta['proddetail'] = proddetail dirmeta['prodtype'] = prodtype dirmeta['nb_files'] = nb_files dirmeta.update(res['Value']) lumi = 0. nbevts = 0 addinfo = None files = 0 xsec = 0.0 if not full_detail: lfn = lfns[0] info = _getFileInfo(lfn) nbevts = info[1] * len(lfns) lumi = info[0] * len(lfns) addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 else: for lfn in lfns: info = _getFileInfo(lfn) lumi += info[0] nbevts += info[1] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if not lumi: xsec = 0 files = 0 depthDict = {} depSet = set() res = fc.getFileAncestors(lfns, [1, 2, 3, 4]) temp_ancestorlist = [] if res['OK']: for lfn, ancestorsDict in res['Value']['Successful'].items(): for ancestor, dep in ancestorsDict.items(): depthDict.setdefault(dep, []) if ancestor not in temp_ancestorlist: depthDict[dep].append(ancestor) depSet.add(dep) temp_ancestorlist.append(ancestor) depList = list(depSet) depList.sort() for ancestor in depthDict[depList[-1]]: info = _getFileInfo(ancestor) lumi += info[0] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if xsec and files: xsec /= files dirmeta['CrossSection'] = xsec else: dirmeta['CrossSection'] = 0.0 if nbevts: dirmeta['NumberOfEvents'] = nbevts #if not lumi: # dirmeta['Luminosity']=0 # dirmeta['CrossSection']=0 #else: # if nbevts: # dirmeta['CrossSection']=nbevts/lumi # else: # dirmeta['CrossSection']=0 #if addinfo: # if 'xsection' in addinfo: # if 'sum' in addinfo['xsection']: # if 'xsection' in addinfo['xsection']['sum']: # dirmeta['CrossSection']=addinfo['xsection']['sum']['xsection'] if 'NumberOfEvents' not in dirmeta: dirmeta['NumberOfEvents'] = 0 #print processesdict[dirmeta['EvtType']] dirmeta['detail'] = '' if dirmeta['EvtType'] in processesdict: if 'Detail' in processesdict[dirmeta['EvtType']]: detail = processesdict[dirmeta['EvtType']]['Detail'] else: detail = dirmeta['EvtType'] if not prodtype == 'MCGeneration': res = trc.getTransformationInputDataQuery(str(prodID)) if res['OK']: if 'ProdID' in res['Value']: dirmeta['MomProdID'] = res['Value']['ProdID'] if 'MomProdID' not in dirmeta: dirmeta['MomProdID'] = 0 dirmeta['detail'] = _translate(detail) metadata.append(dirmeta) detectors = {} detectors['ILD'] = {} corres = { "MCGeneration": 'gen', "MCSimulation": 'SIM', "MCReconstruction": "REC", "MCReconstruction_Overlay": "REC" } detectors['ILD']['SIM'] = [] detectors['ILD']['REC'] = [] detectors['SID'] = {} detectors['SID']['SIM'] = [] detectors['SID']['REC'] = [] detectors['sid'] = {} detectors['sid']['SIM'] = [] detectors['sid']['REC'] = [] detectors['gen'] = [] for channel in metadata: if 'DetectorType' not in channel: detectors['gen'].append( (channel['detail'], channel['Energy'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents'] / channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], str(channel['proddetail']))) else: if not channel['DetectorType'] in detectors: gLogger.error("This is unknown detector", channel['DetectorType']) continue detectors[channel['DetectorType']][corres[ channel['prodtype']]].append( (channel['detail'], channel['Energy'], channel['DetectorType'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents'] / channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], channel['MomProdID'], str(channel['proddetail']))) with open("tables.html", "w") as of: of.write("""<!DOCTYPE html> <html> <head> <title> Production summary </title> </head> <body> """) if len(detectors['gen']): of.write("<h1>gen prods</h1>\n") table = Table(header_row=('Channel', 'Energy', 'ProdID', 'Tasks', 'Average Evts/task', 'Statistics', 'Cross Section (fb)', 'Comment')) for item in detectors['gen']: table.rows.append(item) of.write(str(table)) gLogger.info("Gen prods") gLogger.info(str(table)) if len(detectors['ILD']): of.write("<h1>ILD prods</h1>\n") for ptype in detectors['ILD'].keys(): if len(detectors['ILD'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['ILD'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("ILC CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['SID']): of.write("<h1>SID prods</h1>\n") for ptype in detectors['SID'].keys(): if len(detectors['SID'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['SID'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("SID CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['sid']): of.write("<h1>sid dbd prods</h1>\n") for ptype in detectors['sid'].keys(): if len(detectors['sid'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['sid'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("sid DBD prods %s" % ptype) gLogger.info(str(table)) of.write(""" </body> </html> """) gLogger.notice("Check ./tables.html in any browser for the results") dexit(0)
if __name__=="__main__": clip = Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations( conddict ) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod)
class ProcessListComplexTestCase(unittest.TestCase): """ Test the different methods of the class, providing a usable CFG """ def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list') self.prol.cfg.createNewSection('Processes') def test_updateproclist_and_getters(self): self.prol.cfg.createNewSection('Processes/myTestProcDeleteMe') self.prol.cfg.setOption( 'Processes/myTestProcDeleteMe/someRandomOption', True) dict_1 = {'CrossSection': 'someCross'} dict_1.update(STD_PROC_DICT) dict_2 = {'CrossSection': 'some_other_val'} dict_2.update(STD_PROC_DICT) process_dict = {} process_dict['MytestProcess'] = dict_1 process_dict['myTestProcDeleteMe'] = dict_2 result = self.prol.updateProcessList(process_dict) assertDiracSucceeds(result, self) conf = self.prol.cfg self.assertFalse( conf.existsKey('Processes/myTestProcDeleteMe/someRandomOption')) options = [ 'Processes/MytestProcess/CrossSection', 'Processes/myTestProcDeleteMe/CrossSection' ] assertEqualsImproved((map(conf.getOption, options)), (['someCross', 'some_other_val']), self) assertEqualsImproved( (self.prol.getCSPath('myTestProcDeleteMe'), self.prol.getInFile('myTestProcDeleteMe'), self.prol.existsProcess('myTestProcDeleteMe'), self.prol.existsProcess(''), self.prol.existsProcess('invalidProcess'), self.prol.existsProcess('myTestProcDeleteMeToo')), ('/test/cs/path/ball.tar', 'my/file.in', S_OK(True), S_OK(True), S_OK(False), S_OK(False)), self) assertListContentEquals(self.prol.getProcesses(), ['myTestProcDeleteMe', 'MytestProcess'], self) all_processes_dict = self.prol.getProcessesDict() assertEqualsImproved(len(all_processes_dict), 2, self) assertEqualsImproved( ('myTestProcDeleteMe' in all_processes_dict, 'MytestProcess' in all_processes_dict), (True, True), self) self.prol.printProcesses() def test_writeproclist(self): expected_write = 'Processes\n{\n mytestprocess123\n {\n TarBallCSPath = /test/cs/path/bal.tarr\n Detail = TestNoDetails\n Generator = mytestGen21\n Model = testmodel3001\n Restrictions = \n InFile = my/file.in\n CrossSection = 0\n }\n}\n' self.prol._addEntry( 'mytestprocess123', { 'TarBallCSPath': '/test/cs/path/bal.tarr', 'Detail': 'TestNoDetails', 'Generator': 'mytestGen21', 'Model': 'testmodel3001', 'Restrictions': '', 'InFile': 'my/file.in' }) exists_dict = { '/temp/dir': False, '/temp/dir/mytempfile.txt': True, '/my/folder/testpath.xml': True } fhandle_mock = Mock() with patch('tempfile.mkstemp', new=Mock(return_value=('handle', '/temp/dir/mytempfile.txt'))), \ patch('__builtin__.file', new=Mock(return_value=fhandle_mock)) as file_mock, \ patch('os.makedirs') as mkdir_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('shutil.move') as move_mock, \ patch('os.close') as close_mock: assertDiracSucceedsWith_equals( self.prol.writeProcessList('/my/folder/testpath.xml'), '/my/folder/testpath.xml', self) mkdir_mock.assert_called_once_with('/temp/dir') file_mock.assert_called_once_with('/temp/dir/mytempfile.txt', 'w') fhandle_mock.write.assert_called_once_with(expected_write) close_mock.assert_called_once_with('handle') move_mock.assert_called_once_with('/temp/dir/mytempfile.txt', '/my/folder/testpath.xml') def test_writeproclist_notwritten(self): exists_dict = {'myTmpNameTestme': True} cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock self.prol.location = '/my/folder/testpath2.txt' with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith(self.prol.writeProcessList(), 'failed to write repo', self) close_mock.assert_called_once_with('myhandle') remove_mock.assert_called_once_with('myTmpNameTestme') def test_writeproclist_notwritten_noremove(self): exists_dict = {'myTmpNameTestme': False} cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith( self.prol.writeProcessList('/my/folder/testpath2.txt'), 'failed to write repo', self) close_mock.assert_called_once_with('myhandle') self.assertFalse(remove_mock.called) def test_writeproclist_move_fails(self): exists_dict = {'/my/folder/testpath2.txt': False} cfg_mock = Mock() cfg_mock.writeToFile.return_value = True self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))), \ patch('shutil.move', new=Mock(side_effect=OSError('mytestErr_os'))): assertDiracFailsWith( self.prol.writeProcessList('/my/folder/testpath2.txt'), 'failed to write repo', self) close_mock.assert_called_once_with('myhandle') self.assertFalse(remove_mock.called) def test_uploadproclist(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('/local/path/proc.list') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock with patch('shutil.copy') as copy_mock, \ patch('subprocess.call') as proc_mock: self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120') assertMockCalls( copy_mock, [('myTestProcess.list', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/'), ('myTestProcess.list', '/local/path/proc.list')], self) proc_mock.assert_called_once_with([ 'svn', 'ci', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/proc.list', "-m'Process list for whizard version v120'" ], shell=False) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_remove_fails(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_ERROR('my_test_err') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() conf_mock = Mock() conf_mock.getOption.return_value = S_OK('somepath') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises(KeyboardInterrupt) as ki: self.prol.uploadProcessListToFileCatalog('asd', 'v1') key_interrupt = ki.exception assertEqualsImproved(key_interrupt.args, ('abort_my_test', ), self) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_upload_fails(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_ERROR('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('somepath') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises(KeyboardInterrupt) as ki: self.prol.uploadProcessListToFileCatalog('asd', 'v1') key_interrupt = ki.exception assertEqualsImproved(key_interrupt.args, ('abort_my_test', ), self) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_copy_and_commit_fail(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('somepath') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=OSError('oserr_testme_keeprunning'))), \ patch('subprocess.call', new=Mock(side_effect=OSError('subproc_test_err'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120') DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_skip_copy(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=IOError('dont_call_me'))), \ patch('subprocess.call', new=Mock(side_effect=IOError('dont_call_me_either'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120') DIRAC.gConfig = backup_conf module_patcher.stop()
class ProcessListComplexTestCase( unittest.TestCase ): """ Test the different methods of the class, providing a usable CFG """ def setUp( self ): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList( 'myTestProcess.list' ) self.prol.cfg.createNewSection( 'Processes' ) def test_updateproclist_and_getters( self ): self.prol.cfg.createNewSection( 'Processes/myTestProcDeleteMe' ) self.prol.cfg.setOption( 'Processes/myTestProcDeleteMe/someRandomOption', True ) dict_1 = { 'CrossSection' : 'someCross' } dict_1.update( STD_PROC_DICT ) dict_2 = { 'CrossSection' : 'some_other_val' } dict_2.update( STD_PROC_DICT ) process_dict = {} process_dict[ 'MytestProcess' ] = dict_1 process_dict[ 'myTestProcDeleteMe' ] = dict_2 result = self.prol.updateProcessList( process_dict ) assertDiracSucceeds( result, self ) conf = self.prol.cfg self.assertFalse( conf.existsKey( 'Processes/myTestProcDeleteMe/someRandomOption' ) ) options = [ 'Processes/MytestProcess/CrossSection', 'Processes/myTestProcDeleteMe/CrossSection' ] assertEqualsImproved( ( map( conf.getOption, options ) ), ( [ 'someCross', 'some_other_val' ] ), self ) assertEqualsImproved( ( self.prol.getCSPath( 'myTestProcDeleteMe' ), self.prol.getInFile( 'myTestProcDeleteMe' ), self.prol.existsProcess( 'myTestProcDeleteMe' ), self.prol.existsProcess( '' ), self.prol.existsProcess( 'invalidProcess' ), self.prol.existsProcess( 'myTestProcDeleteMeToo' ) ), ( '/test/cs/path/ball.tar', 'my/file.in', S_OK(True), S_OK(True), S_OK(False), S_OK(False) ), self ) assertListContentEquals( self.prol.getProcesses(), [ 'myTestProcDeleteMe', 'MytestProcess' ], self ) all_processes_dict = self.prol.getProcessesDict() assertEqualsImproved( len(all_processes_dict), 2, self ) assertEqualsImproved( ('myTestProcDeleteMe' in all_processes_dict, 'MytestProcess' in all_processes_dict), ( True, True ), self ) self.prol.printProcesses() def test_writeproclist( self ): expected_write = 'Processes\n{\n mytestprocess123\n {\n TarBallCSPath = /test/cs/path/bal.tarr\n Detail = TestNoDetails\n Generator = mytestGen21\n Model = testmodel3001\n Restrictions = \n InFile = my/file.in\n CrossSection = 0\n }\n}\n' self.prol._addEntry( 'mytestprocess123', { 'TarBallCSPath' : '/test/cs/path/bal.tarr', 'Detail' : 'TestNoDetails', 'Generator' : 'mytestGen21', 'Model' : 'testmodel3001', 'Restrictions' : '', 'InFile' : 'my/file.in' } ) exists_dict = { '/temp/dir' : False, '/temp/dir/mytempfile.txt' : True, '/my/folder/testpath.xml' : True } fhandle_mock = Mock() file_mock = Mock(return_value=fhandle_mock) with patch('tempfile.mkstemp', new=Mock(return_value=('handle', '/temp/dir/mytempfile.txt'))), \ patch('__builtin__.file', new=file_mock), \ patch('__builtin__.open', new=file_mock), \ patch('os.makedirs') as mkdir_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('shutil.move') as move_mock, \ patch('os.close') as close_mock: assertDiracSucceedsWith_equals( self.prol.writeProcessList( '/my/folder/testpath.xml' ), '/my/folder/testpath.xml', self ) mkdir_mock.assert_called_once_with( '/temp/dir' ) file_mock.assert_called_once_with( '/temp/dir/mytempfile.txt', 'w' ) fhandle_mock.write.assert_called_once_with( expected_write ) close_mock.assert_called_once_with( 'handle' ) move_mock.assert_called_once_with( '/temp/dir/mytempfile.txt', '/my/folder/testpath.xml' ) def test_writeproclist_notwritten( self ): exists_dict = { 'myTmpNameTestme' : True } cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock self.prol.location = '/my/folder/testpath2.txt' with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith( self.prol.writeProcessList(), 'failed to write repo', self ) close_mock.assert_called_once_with( 'myhandle' ) remove_mock.assert_called_once_with( 'myTmpNameTestme') def test_writeproclist_notwritten_noremove( self ): exists_dict = { 'myTmpNameTestme' : False } cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith( self.prol.writeProcessList( '/my/folder/testpath2.txt' ), 'failed to write repo', self ) close_mock.assert_called_once_with( 'myhandle' ) self.assertFalse( remove_mock.called ) def test_writeproclist_move_fails( self ): exists_dict = { '/my/folder/testpath2.txt' : False } cfg_mock = Mock() cfg_mock.writeToFile.return_value = True self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))), \ patch('shutil.move', new=Mock(side_effect=OSError('mytestErr_os'))): assertDiracFailsWith( self.prol.writeProcessList( '/my/folder/testpath2.txt' ), 'failed to write repo', self ) close_mock.assert_called_once_with( 'myhandle' ) self.assertFalse( remove_mock.called ) def test_uploadproclist( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK( '/local/path/proc.list' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock with patch('shutil.copy') as copy_mock, \ patch('subprocess.call') as proc_mock: self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120' ) assertMockCalls( copy_mock, [ ( 'myTestProcess.list', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/' ), ( 'myTestProcess.list', '/local/path/proc.list' ) ], self ) proc_mock.assert_called_once_with( [ 'svn', 'ci', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/proc.list', "-m'Process list for whizard version v120'" ], shell=False ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_remove_fails( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_ERROR('my_test_err') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() conf_mock = Mock() conf_mock.getOption.return_value = S_OK( 'somepath' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises( KeyboardInterrupt ) as ki: self.prol.uploadProcessListToFileCatalog( 'asd', 'v1' ) key_interrupt = ki.exception assertEqualsImproved( key_interrupt.args, ( 'abort_my_test', ), self ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_upload_fails( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_ERROR('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK( 'somepath' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises( KeyboardInterrupt ) as ki: self.prol.uploadProcessListToFileCatalog( 'asd', 'v1' ) key_interrupt = ki.exception assertEqualsImproved( key_interrupt.args, ( 'abort_my_test', ), self ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_copy_and_commit_fail( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK( 'somepath' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=OSError('oserr_testme_keeprunning'))), \ patch('subprocess.call', new=Mock(side_effect=OSError('subproc_test_err'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120' ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_skip_copy( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=IOError('dont_call_me'))), \ patch('subprocess.call', new=Mock(side_effect=IOError('dont_call_me_either'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120' ) DIRAC.gConfig = backup_conf module_patcher.stop()
md5sum) result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) gLogger.verbose("Done uploading the tar ball") os.remove(appTar) #Set for all new processes the TarBallURL for process in inputlist.keys(): inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar) gLogger.verbose("Updating process list:") knownprocess = pl.getProcessesDict() knownprocess.update(inputlist) pl.updateProcessList(knownprocess) gLogger.verbose("Done Updating process list") #Return to initial location os.chdir(startdir) pl.writeProcessList() gLogger.verbose("Removing process list from storage") res = rm.removeFile(path_to_process_list) if not res['OK']: gLogger.error("Could not remove process list from storage, do it by hand") dexit(2)
def _getProductionSummary(): clip = _Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations( conddict ) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod) metadata = [] gLogger.info("Will run on prods %s" % str(prodids)) for prodID in prodids: if prodID<clip.minprod: continue meta = {} meta['ProdID']=prodID res = trc.getTransformation(str(prodID)) if not res['OK']: gLogger.error("Error getting transformation %s" % prodID ) continue prodtype = res['Value']['Type'] proddetail = res['Value']['Description'] if prodtype == 'MCReconstruction' or prodtype == 'MCReconstruction_Overlay' : meta['Datatype']='DST' elif prodtype == 'MCGeneration': meta['Datatype']='gen' elif prodtype == 'MCSimulation': meta['Datatype']='SIM' elif prodtype in ['Split','Merge']: gLogger.warn("Invalid query for %s productions" % prodtype) continue else: gLogger.error("Unknown production type %s"% prodtype) continue res = fc.findFilesByMetadata(meta) if not res['OK']: gLogger.error(res['Message']) continue lfns = res['Value'] nb_files = len(lfns) path = "" if not len(lfns): gLogger.warn("No files found for prod %s" % prodID) continue path = os.path.dirname(lfns[0]) res = fc.getDirectoryUserMetadata(path) if not res['OK']: gLogger.warn('No meta data found for %s' % path) continue dirmeta = {} dirmeta['proddetail'] = proddetail dirmeta['prodtype'] = prodtype dirmeta['nb_files']=nb_files dirmeta.update(res['Value']) lumi = 0. nbevts = 0 addinfo = None files = 0 xsec = 0.0 if not full_detail: lfn = lfns[0] info = _getFileInfo(lfn) nbevts = info[1]*len(lfns) lumi = info[0]*len(lfns) addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 else: for lfn in lfns: info = _getFileInfo(lfn) lumi += info[0] nbevts += info[1] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if not lumi: xsec = 0 files = 0 depthDict = {} depSet = set() res = fc.getFileAncestors(lfns,[1,2,3,4]) temp_ancestorlist = [] if res['OK']: for lfn,ancestorsDict in res['Value']['Successful'].items(): for ancestor,dep in ancestorsDict.items(): depthDict.setdefault(dep,[]) if ancestor not in temp_ancestorlist: depthDict[dep].append(ancestor) depSet.add(dep) temp_ancestorlist.append(ancestor) depList = list(depSet) depList.sort() for ancestor in depthDict[depList[-1]]: info = _getFileInfo(ancestor) lumi += info[0] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if xsec and files: xsec /= files dirmeta['CrossSection']=xsec else: dirmeta['CrossSection']=0.0 if nbevts: dirmeta['NumberOfEvents']=nbevts #if not lumi: # dirmeta['Luminosity']=0 # dirmeta['CrossSection']=0 #else: # if nbevts: # dirmeta['CrossSection']=nbevts/lumi # else: # dirmeta['CrossSection']=0 #if addinfo: # if 'xsection' in addinfo: # if 'sum' in addinfo['xsection']: # if 'xsection' in addinfo['xsection']['sum']: # dirmeta['CrossSection']=addinfo['xsection']['sum']['xsection'] if 'NumberOfEvents' not in dirmeta: dirmeta['NumberOfEvents']=0 #print processesdict[dirmeta['EvtType']] dirmeta['detail']='' if dirmeta['EvtType'] in processesdict: if 'Detail' in processesdict[dirmeta['EvtType']]: detail = processesdict[dirmeta['EvtType']]['Detail'] else: detail=dirmeta['EvtType'] if not prodtype == 'MCGeneration': res = trc.getTransformationInputDataQuery(str(prodID)) if res['OK']: if 'ProdID' in res['Value']: dirmeta['MomProdID']=res['Value']['ProdID'] if 'MomProdID' not in dirmeta: dirmeta['MomProdID']=0 dirmeta['detail']= _translate(detail) metadata.append(dirmeta) detectors = {} detectors['ILD'] = {} corres = {"MCGeneration":'gen',"MCSimulation":'SIM',"MCReconstruction":"REC","MCReconstruction_Overlay":"REC"} detectors['ILD']['SIM'] = [] detectors['ILD']['REC'] = [] detectors['SID'] = {} detectors['SID']['SIM'] = [] detectors['SID']['REC'] = [] detectors['sid'] = {} detectors['sid']['SIM'] = [] detectors['sid']['REC'] = [] detectors['gen']=[] for channel in metadata: if 'DetectorType' not in channel: detectors['gen'].append((channel['detail'], channel['Energy'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents']/channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'],str(channel['proddetail']))) else: if not channel['DetectorType'] in detectors: gLogger.error("This is unknown detector", channel['DetectorType']) continue detectors[channel['DetectorType']][corres[channel['prodtype']]].append((channel['detail'], channel['Energy'], channel['DetectorType'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents']/channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], channel['MomProdID'], str(channel['proddetail']))) with open("tables.html","w") as of: of.write("""<!DOCTYPE html> <html> <head> <title> Production summary </title> </head> <body> """) if len(detectors['gen']): of.write("<h1>gen prods</h1>\n") table = Table(header_row = ('Channel', 'Energy','ProdID','Tasks','Average Evts/task','Statistics','Cross Section (fb)','Comment')) for item in detectors['gen']: table.rows.append( item ) of.write(str(table)) gLogger.info("Gen prods") gLogger.info(str(table)) if len(detectors['ILD']): of.write("<h1>ILD prods</h1>\n") for ptype in detectors['ILD'].keys(): if len(detectors['ILD'][ptype]): of.write("<h2>%s</h2>\n"%ptype) table = Table(header_row = ('Channel', 'Energy','Detector','ProdID','Number of Files','Events/File','Statistics','Cross Section (fb)','Origin ProdID','Comment')) for item in detectors['ILD'][ptype]: table.rows.append( item ) of.write(str(table)) gLogger.info("ILC CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['SID']): of.write("<h1>SID prods</h1>\n") for ptype in detectors['SID'].keys(): if len(detectors['SID'][ptype]): of.write("<h2>%s</h2>\n"%ptype) table = Table(header_row = ('Channel', 'Energy','Detector','ProdID','Number of Files','Events/File','Statistics','Cross Section (fb)','Origin ProdID','Comment')) for item in detectors['SID'][ptype]: table.rows.append( item ) of.write(str(table)) gLogger.info("SID CDR prods %s"%ptype) gLogger.info(str(table)) if len(detectors['sid']): of.write("<h1>sid dbd prods</h1>\n") for ptype in detectors['sid'].keys(): if len(detectors['sid'][ptype]): of.write("<h2>%s</h2>\n"%ptype) table = Table(header_row = ('Channel', 'Energy','Detector','ProdID','Number of Files','Events/File','Statistics','Cross Section (fb)','Origin ProdID','Comment')) for item in detectors['sid'][ptype]: table.rows.append( item ) of.write(str(table)) gLogger.info("sid DBD prods %s"%ptype) gLogger.info(str(table)) of.write(""" </body> </html> """) gLogger.notice("Check ./tables.html in any browser for the results") dexit(0)