Beispiel #1
0
    def setUp(self):
        """
        set up shared array of type int with given size
        """
        self.size = 10
        self.data = Array('i', range(self.size))

        # start TestDataService
        self.server = Root()
Beispiel #2
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
Beispiel #3
0
    def setUp(self):
        """
        set up shared array of type int with given size
        """
        self.size = 10
        self.data = Array('i', range(self.size))

        # start TestDataService
        self.server = Root()
Beispiel #4
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://127.0.0.1:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs3', 'phedex', 'google_maps', 'ip']
        # Do not perform DAS map test, since we overwrite system and urls.
        # This is done to use standard DAS maps, but use local URLs, which
        # cause DAS hash map to be be wrong during a test
        config['map_test'] = False

        # Add fake mapping records
        self.clear_collections()
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs3', 'dbs3.yml')
        self.add_service('phedex', 'phedex.yml')

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
Beispiel #5
0
class testUtils(unittest.TestCase):
    """
    A test class for the DAS task manager class
    """
    def setUp(self):
        """
        set up shared array of type int with given size
        """
        self.size = 10
        self.data = Array('i', range(self.size))

        # start TestDataService
        self.server = Root()

    def test_task_manager(self):
        """Test task manager"""
        expect = [idx for idx in range(self.size)]
        mypool = TaskManager()
        tasks  = []
        for idx in expect:
            tasks.append(mypool.spawn(worker, idx, self.data))
        mypool.joinall(tasks)
        result = [idx for idx in self.data]
        self.assertEqual(result, expect)

    def test_plugin_task_manager(self):
        """Test plugin task manager"""
        mgr  = PluginTaskManager(bus=self.server.engine, debug=1)
        mgr.subscribe()
        self.server.start()
        jobs = []
        jobs.append(mgr.spawn(daemon))
        mgr.clear(jobs)
        print "\njoin the task at %s\n" % time.time()
        time.sleep(2)
        print "\nstop server at %s\n" % time.time()
        self.server.stop()
Beispiel #6
0
class testUtils(unittest.TestCase):
    """
    A test class for the DAS task manager class
    """
    def setUp(self):
        """
        set up shared array of type int with given size
        """
        self.size = 10
        self.data = Array('i', range(self.size))

        # start TestDataService
        self.server = Root()

    def test_task_manager(self):
        """Test task manager"""
        expect = [idx for idx in range(self.size)]
        mypool = TaskManager()
        tasks  = []
        for idx in expect:
            tasks.append(mypool.spawn(worker, idx, self.data))
        mypool.joinall(tasks)
        result = [idx for idx in self.data]
        self.assertEqual(result, expect)

    def test_plugin_task_manager(self):
        """Test plugin task manager"""
        mgr  = PluginTaskManager(bus=self.server.engine, debug=1)
        mgr.subscribe()
        self.server.start()
        jobs = []
        jobs.append(mgr.spawn(daemon))
        mgr.clear(jobs)
        time.sleep(2) # let jobs finish
        self.server.stop()

    def test_uidset(self):
        """Test UidSet class"""
        tasks = UidSet()
        tasks.add(1)
        self.assertEqual(1 in tasks, True)
        self.assertEqual(2 in tasks, False)
        tasks.add(1)
        tasks.add(2)
        self.assertEqual(1 in tasks, True)
        self.assertEqual(2 in tasks, True)
        self.assertEqual(tasks.get(1), 2) # we should have 2 values of 1
        self.assertEqual(tasks.get(2), 1) # we should have 1 value of 2
        tasks.discard(2)
        self.assertEqual(2 in tasks, False)
        tasks.discard(1)
        self.assertEqual(tasks.get(1), 1) # now we should have 1 value of 1
        tasks.discard(1)
        self.assertEqual(1 in tasks, False)

    def test_assign_priority(self):
        """Test priority assignment"""
        tasks  = TaskManager(qtype='PriorityQueue')
        uid1   = '1.1.1.1'
        tasks._uids.add(uid1)
        uid2   = '2.2.2.2'
        tasks._uids.add(uid1)
        result = tasks.assign_priority(uid1) # no tasks in a queue
        self.assertEqual(result, 0)
        tasks._tasks = TestQueue(empty=False)
        res1   = [tasks._uids.add(uid1) for r in xrange(20)]
        self.assertEqual(tasks.assign_priority(uid1), 2)
        res2   = [tasks._uids.add(uid2) for r in xrange(50)]
        self.assertEqual(tasks.assign_priority(uid2), 5)

    def test_priority_task_manager(self):
        """Test priority task manager"""
        data   = [idx for idx in xrange(0, 100)]
        shared_data = Array('i', len(data))
        mypool = TaskManager(qtype='PriorityQueue')
        tasks  = []
        for idx in data:
            if  idx%2:
                tasks.append(mypool.spawn(worker, idx, shared_data, uid=1))
            else:
                tasks.append(mypool.spawn(worker, idx, shared_data, uid=2))
        mypool.joinall(tasks)
        result = [idx for idx in shared_data]
        self.assertEqual(result, data)
Beispiel #7
0
class testCMSFakeDataServices(unittest.TestCase):
    """
    A test class for the DAS core module
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()

    def add_service(self, system, ymlfile):
        """
        Add Fake data service mapping records. We provide system name
        which match corresponding name in DASTestDataService and
        associated with this system YML map file.
        """
        fname  = os.path.join(DASPATH, 'services/maps/%s' % ymlfile)
        url    = self.base + '/%s' % system
        for record in read_service_map(fname):
            record['url'] = url
            record['system'] = system
            self.mgr.add(record)
        for record in read_service_map(fname, 'notations'):
            record['system'] = system
            self.mgr.add(record)

    def clear_collections(self):
        """clean-up test collections"""
        conn = Connection(host=self.dburi)
        for dbname in ['mapping', 'analytics', 'das', 'parser', 'keylearning']:
            db = conn[dbname]
            if  dbname != 'das':
                db.drop_collection(self.collname)
            else:
                db.drop_collection(self.dascache)
                db.drop_collection(self.dasmerge)
                db.drop_collection(self.dasmr)
            

    def tearDown(self):
        """Invoke after each test"""
        self.server.stop()
#        self.mgr.delete_db_collection()
#        self.clear_collections()

    def testDBSService(self):
        """test DASCore with test DBS service"""
        query  = "primary_dataset=abc" # invoke query to fill DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "primary_dataset=abc" # invoke query to get results from DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('primary_dataset.name')
        expect = 'abc'
        self.assertEqual(expect, result)

    def testPhedexAndSiteDBServices(self):
        """test DASCore with test PhEDEx and SiteDB services"""
        query  = "site=T3_US_Cornell" # invoke query to fill DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "site=T3_US_Cornell | grep site.name" # invoke query to get results from DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = 'T3_US_Cornell'
        self.assertEqual(expect, DotDict(result[0]).get('site.name'))
        expect = ['_id', 'das_id', 'site', 'cache_id', 'das', 'qhash']
        expect.sort()
        rkeys = result[0].keys()
        rkeys.sort()
        self.assertEqual(expect, rkeys)

    def testAggregators(self):
        """test DASCore aggregators via zip service"""
        query  = "zip=1000"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "zip=1000 | count(zip.place.city)"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = {"function": "count", "result": {"value": 2}, 
                  "key": "zip.place.city", "_id":0}
        self.assertEqual(expect, result[0])

    def testIPService(self):
        """test DASCore with IP service"""
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "ip=137.138.141.145 | grep ip.address"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('ip.address')
        expect = '137.138.141.145'
        self.assertEqual(expect, result)

    def testRecords(self):
        """test records DAS keyword with all services"""
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "site=T3_US_Cornell"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "records | grep ip.address"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('ip.address')
        expect = '137.138.141.145'
        self.assertEqual(expect, result)

        query  = "records | grep site.name"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = 'T3_US_Cornell'
        self.assertEqual(expect, DotDict(result[0]).get('site.name'))

        query  = "records"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        res    = []
        for row in result:
            if  row.has_key('ip'):
                res.append(DotDict(row).get('ip.address'))
            if  row.has_key('site'):
                for item in row['site']:
                    if  item.has_key('name') and item['name'] not in res:
                        res.append(item['name'])
        res.sort()
        expect = ['137.138.141.145', 'T3_US_Cornell']
        self.assertEqual(expect, res)
Beispiel #8
0
class testUtils(unittest.TestCase):
    """
    A test class for the DAS task manager class
    """
    def setUp(self):
        """
        set up shared array of type int with given size
        """
        self.size = 10
        self.data = Array('i', range(self.size))

        # start TestDataService
        self.server = Root()

    def test_task_manager(self):
        """Test task manager"""
        expect = [idx for idx in range(self.size)]
        mypool = TaskManager()
        tasks = []
        for idx in expect:
            tasks.append(mypool.spawn(worker, idx, self.data))
        mypool.joinall(tasks)
        result = [idx for idx in self.data]
        self.assertEqual(result, expect)

    def test_plugin_task_manager(self):
        """Test plugin task manager"""
        mgr = PluginTaskManager(bus=self.server.engine, debug=1)
        mgr.subscribe()
        self.server.start()
        jobs = []
        jobs.append(mgr.spawn(daemon))
        mgr.clear(jobs)
        time.sleep(2)  # let jobs finish
        self.server.stop()

    def test_uidset(self):
        """Test UidSet class"""
        tasks = UidSet()
        tasks.add(1)
        self.assertEqual(1 in tasks, True)
        self.assertEqual(2 in tasks, False)
        tasks.add(1)
        tasks.add(2)
        self.assertEqual(1 in tasks, True)
        self.assertEqual(2 in tasks, True)
        self.assertEqual(tasks.get(1), 2)  # we should have 2 values of 1
        self.assertEqual(tasks.get(2), 1)  # we should have 1 value of 2
        tasks.discard(2)
        self.assertEqual(2 in tasks, False)
        tasks.discard(1)
        self.assertEqual(tasks.get(1), 1)  # now we should have 1 value of 1
        tasks.discard(1)
        self.assertEqual(1 in tasks, False)

    def test_assign_priority(self):
        """Test priority assignment"""
        tasks = TaskManager(qtype='PriorityQueue', qfreq=10)
        uid1 = '1.1.1.1'
        tasks._uids.add(uid1)
        uid2 = '2.2.2.2'
        tasks._uids.add(uid1)
        result = tasks.assign_priority(uid1)  # no tasks in a queue
        self.assertEqual(int(result), 0)
        tasks._tasks = TestQueue(empty=False)
        res1 = [tasks._uids.add(uid1) for r in range(20)]
        self.assertEqual(int(tasks.assign_priority(uid1)), 2)
        res2 = [tasks._uids.add(uid2) for r in range(50)]
        self.assertEqual(int(tasks.assign_priority(uid2)), 5)

    def test_priority_task_manager(self):
        """Test priority task manager"""
        data = [idx for idx in range(0, 30)]
        shared_data = Array('i', len(data))
        mypool = TaskManager(qtype='PriorityQueue', qfreq=10)
        tasks = []
        for idx in data:
            if idx % 2:
                tasks.append(mypool.spawn(worker, idx, shared_data, uid=1))
            else:
                tasks.append(mypool.spawn(worker, idx, shared_data, uid=2))
        mypool.joinall(tasks)
        result = [idx for idx in shared_data]
        self.assertEqual(result, data)