def testH_AnalyticsCompMethods_tests(self):
     """
     _testH_AnalyticsCompMethods_tests_
     Tests the Analytics component methods
     """
     self.createFileDocinFilesDB()
     self.config.AsyncTransfer.max_retry = 0
     Analytics = AnalyticsDaemon(config = self.config)
     Analytics.updateWorkflowSummaries()
     query = {}
     state = self.monitoring_db.loadView('UserMonitoring', 'StatesByWorkflow', query)['rows'][0]['value']
     assert state["new"] == 1
     # Run the daemon
     Transfer = TransferDaemon(config = self.config)
     Transfer.algorithm( )
     Analytics.updateWorkflowSummaries()
     state = self.monitoring_db.loadView('UserMonitoring', 'StatesByWorkflow', query)['rows'][0]['value']
     assert state["failed"] == 1
     self.createFileDocinFilesDB( state = 'done', publication_state = 'published' )
     Analytics.updateJobSummaries()
     query = { 'reduce':True }
     numberWorkflow = self.monitoring_db.loadView('UserMonitoring', 'FilesByWorkflow', query)['rows'][0]['value']
     assert numberWorkflow == 1
     query = { }
     Analytics.updateWorkflowSummaries()
     state = self.monitoring_db.loadView('UserMonitoring', 'StatesByWorkflow', query)['rows'][0]['value']
     assert state["published"] == 1
 def testH_AnalyticsCompMethods_tests(self):
     """
     _testH_AnalyticsCompMethods_tests_
     Tests the Analytics component methods
     """
     self.createFileDocinFilesDB()
     self.config.AsyncTransfer.max_retry = 0
     Analytics = AnalyticsDaemon(config = self.config)
     Analytics.updateWorkflowSummaries()
     query = {}
     state = self.monitoring_db.loadView('UserMonitoring', 'StatesByWorkflow', query)['rows'][0]['value']
     assert state["new"] == 1
     # Run the daemon
     Transfer = TransferDaemon(config = self.config)
     Transfer.algorithm( )
     Analytics.updateWorkflowSummaries()
     state = self.monitoring_db.loadView('UserMonitoring', 'StatesByWorkflow', query)['rows'][0]['value']
     assert state["failed"] == 1
     self.createFileDocinFilesDB( state = 'done', publication_state = 'published' )
     Analytics.updateJobSummaries()
     query = { 'reduce':True }
     numberWorkflow = self.monitoring_db.loadView('UserMonitoring', 'FilesByWorkflow', query)['rows'][0]['value']
     assert numberWorkflow == 1
     query = { }
     Analytics.updateWorkflowSummaries()
     state = self.monitoring_db.loadView('UserMonitoring', 'StatesByWorkflow', query)['rows'][0]['value']
     assert state["published"] == 1
    def testD_InteractionWithTheSource_testUpdateFWJR(self):
        """
        _testD_InteractionWithTheSource_testUpdateFWJR_
        Tests the components: gets data from DB source and duplicate
        them in files_db and see if the component can update the fwjr when the transfer is done.
        """
        self.createTestDocinDBSource()
        LFNDuplicator = LFNSourceDuplicator(config = self.config)
        LFNDuplicator.algorithm( )
        time.sleep(10)
        # Run the daemon
        Transfer = TransferDaemon(config = self.config)
        Transfer.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        # Get files acuired
        document = self.db.document(files_acquired[0]['id'])
        sites = self.sites
        site_tfc_map = {}
        for site in sites:
            site_tfc_map[site] = get_tfc_rules(site)
        # Mark the document as good
        worker = TransferWorker([document['user'], None, None], site_tfc_map, self.config.AsyncTransfer)
        worker.mark_good([document['lfn']])
        query = { 'reduce':False, 'key':[ document['jobid'] , document['job_end_time'] ] }
        result = self.dbSource.loadView('FWJRDump', 'fwjrByJobIDTimestamp', query)['rows']
        docSource = self.dbSource.document(result[0]['id'])

        assert docSource['fwjr']['steps'].has_key('asyncStageOut1') == True
    def testD_InteractionWithTheSource_testUpdateFWJR(self):
        """
        _testD_InteractionWithTheSource_testUpdateFWJR_
        Tests the components: gets data from DB source and duplicate
        them in files_db and see if the component can update the fwjr when the transfer is done.
        """
        self.createTestDocinDBSource()
        LFNDuplicator = LFNSourceDuplicator(config = self.config)
        LFNDuplicator.algorithm( )
        time.sleep(10)
        # Run the daemon
        Transfer = TransferDaemon(config = self.config)
        Transfer.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        # Get files acuired
        document = self.db.document(files_acquired[0]['id'])
        sites = self.sites
        site_tfc_map = {}
        for site in sites:
            site_tfc_map[site] = get_tfc_rules(site)
        # Mark the document as good
        worker = TransferWorker([document['user'], None, None], site_tfc_map, self.config.AsyncTransfer)
        worker.mark_good([document['lfn']])
        query = { 'reduce':False, 'key':[ document['jobid'] , document['job_end_time'] ] }
        result = self.dbSource.loadView('FWJRDump', 'fwjrByJobIDTimestamp', query)['rows']
        docSource = self.dbSource.document(result[0]['id'])

        assert docSource['fwjr']['steps'].has_key('asyncStageOut1') == True
    def testA_BasicTest_testFileTransfer(self):
        """
        _BasicFunctionTest_
        Tests the components, by seeing if they can process documents.
        """
        self.createFileDocinFilesDB()
        Transfer = TransferDaemon(config = self.config)
        Transfer.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 1

        for i in range(1, 5):
            self.createFileDocinFilesDB( str(i) )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 5
    def testA_BasicTest_testFileTransfer(self):
        """
        _BasicFunctionTest_
        Tests the components, by seeing if they can process documents.
        """
        self.createFileDocinFilesDB()
        Transfer = TransferDaemon(config = self.config)
        Transfer.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 1

        for i in range(1, 5):
            self.createFileDocinFilesDB( str(i) )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 5
    def testB_InteractionWithTheSource_testDocumentDuplicationAndThenTransfer(self):
        """
        _testB_InteractionWithTheSource_testDocumentDuplication_
        Tests the components: gets data from DB source and duplicate
        them in files_db and see if the component can process them.
        """
        self.createTestDocinDBSource()
        LFNDuplicator = LFNSourceDuplicator(config = self.config)
        LFNDuplicator.algorithm( )
        time.sleep(10)
        query = { 'reduce':False }
        active_files = self.db.loadView('AsyncTransfer', 'ftscp', query)['rows']

        assert len(active_files) == 1

        Transfer = TransferDaemon(config = self.config)
        Transfer.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 1

        for i in range(1, 5):
            self.createTestDocinDBSource( str(i) )
        LFNDuplicator_1 = LFNSourceDuplicator(config = self.config)
        LFNDuplicator_1.algorithm( )
        time.sleep(20)
        query = {'reduce':False }
        active1_files = self.db.loadView('AsyncTransfer', 'ftscp', query)['rows']

        assert len(active1_files) == 5

        Transfer_1 = TransferDaemon(config = self.config)
        Transfer_1.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 5
    def testB_InteractionWithTheSource_testDocumentDuplicationAndThenTransfer(self):
        """
        _testB_InteractionWithTheSource_testDocumentDuplication_
        Tests the components: gets data from DB source and duplicate
        them in files_db and see if the component can process them.
        """
        self.createTestDocinDBSource()
        LFNDuplicator = LFNSourceDuplicator(config = self.config)
        LFNDuplicator.algorithm( )
        time.sleep(10)
        query = { 'reduce':False }
        active_files = self.db.loadView('AsyncTransfer', 'ftscp', query)['rows']

        assert len(active_files) == 1

        Transfer = TransferDaemon(config = self.config)
        Transfer.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 1

        for i in range(1, 5):
            self.createTestDocinDBSource( str(i) )
        LFNDuplicator_1 = LFNSourceDuplicator(config = self.config)
        LFNDuplicator_1.algorithm( )
        time.sleep(20)
        query = {'reduce':False }
        active1_files = self.db.loadView('AsyncTransfer', 'ftscp', query)['rows']

        assert len(active1_files) == 5

        Transfer_1 = TransferDaemon(config = self.config)
        Transfer_1.algorithm( )
        query = {'reduce':False}
        files_acquired = self.db.loadView('monitor', 'filesAcquired', query)['rows']
        query = {'reduce':False}
        files_new = self.db.loadView('monitor', 'filesNew', query)['rows']

        assert ( len(files_acquired) + len(files_new) ) == 5