class testDASMapping(unittest.TestCase): """ A test class for the DAS mappingdb class """ def setUp(self): """ set up DAS core module """ debug = 0 self.db = "test_mapping.db" config = deepcopy(das_readconfig()) dburi = config["mongodb"]["dburi"] logger = PrintManager("TestDASMapping", verbose=debug) config["logger"] = logger config["verbose"] = debug dbname = "test_mapping" collname = "db" config["mappingdb"] = dict(dburi=dburi, dbname=dbname, collname=collname) # add some maps to mapping db conn = MongoClient(dburi) conn.drop_database(dbname) self.coll = conn[dbname][collname] self.pmap = { "presentation": { "block": [{"ui": "Block name", "das": "block.name"}, {"ui": "Block size", "das": "block.size"}] }, "type": "presentation", } self.coll.insert(self.pmap) ver_token = verification_token(self.coll.find(exhaust=True)) rec = {"verification_token": ver_token, "type": "verification_token"} self.coll.insert(rec) self.mgr = DASMapping(config) def tearDown(self): """Invoke after each test""" self.mgr.delete_db() def test_api(self): """test methods for api table""" self.mgr.delete_db() self.mgr.init() apiversion = "DBS_2_0_8" url = "http://a.com" dformat = "JSON" expire = 100 api = "listRuns" params = {"apiversion": apiversion, "path": "required", "api": api} rec = { "system": "dbs", "urn": api, "format": dformat, "url": url, "params": params, "expire": expire, "lookup": "run", "wild_card": "*", "das_map": [dict(das_key="run", rec_key="run.run_number", api_arg="path")], "type": "service", } self.mgr.add(rec) smap = { api: { "url": url, "expire": expire, "keys": ["run"], "format": dformat, "wild_card": "*", "cert": None, "ckey": None, "services": "", "lookup": "run", "params": {"path": "required", "api": api, "apiversion": "DBS_2_0_8"}, } } rec = { "system": "dbs", "urn": "listBlocks", "format": dformat, "url": url, "expire": expire, "lookup": "block", "params": { "apiversion": apiversion, "api": "listBlocks", "block_name": "*", "storage_element_name": "*", "user_type": "NORMAL", }, "das_map": [ {"das_key": "block", "rec_key": "block.name", "api_arg": "block_name"}, { "das_key": "site", "rec_key": "site.se", "api_arg": "storage_element_name", "pattern": "re.compile('([a-zA-Z0-9]+\.){2}')", }, ], "type": "service", } self.mgr.add(rec) system = "dbs" api = "listBlocks" daskey = "block" rec_key = "block.name" api_input = "block_name" res = self.mgr.list_systems() self.assertEqual(["dbs"], res) res = self.mgr.list_apis() # self.assertEqual([api], res) res.sort() self.assertEqual(["listBlocks", "listRuns"], res) res = self.mgr.lookup_keys(system, api, daskey) self.assertEqual([rec_key], res) value = "" res = self.mgr.das2api(system, api, rec_key, value) self.assertEqual([api_input], res) # adding another params which default is None res = self.mgr.das2api(system, api, rec_key, value) self.assertEqual([api_input], res) res = self.mgr.api2das(system, api_input) self.assertEqual([daskey], res) # adding notations notations = { "system": system, "type": "notation", "notations": [ {"api_output": "storage_element_name", "rec_key": "se", "api": ""}, {"api_output": "number_of_events", "rec_key": "nevents", "api": ""}, ], } self.mgr.add(notations) res = self.mgr.notation2das(system, "number_of_events") self.assertEqual("nevents", res) # API keys res = self.mgr.api2daskey(system, api) self.assertEqual(["block", "site"], res) # build service map smap.update( { api: { "url": url, "expire": expire, "cert": None, "ckey": None, "keys": ["block", "site"], "format": dformat, "wild_card": "*", "services": "", "lookup": daskey, "params": { "storage_element_name": "*", "api": api, "block_name": "*", "user_type": "NORMAL", "apiversion": "DBS_2_0_8", }, } } ) res = self.mgr.servicemap(system) self.assertEqual(smap, res) def test_presentation(self): """test presentation method""" self.mgr.init() expect = self.pmap["presentation"]["block"] result = self.mgr.presentation("block") self.assertEqual(expect, result) def test_notations(self): """test notations method""" self.mgr.init() system = "test" rec = { "notations": [ {"api_output": "site.resource_element.cms_name", "rec_key": "site.name", "api": ""}, {"api_output": "site.resource_pledge.cms_name", "rec_key": "site.name", "api": ""}, {"api_output": "admin.contacts.cms_name", "rec_key": "site.name", "api": ""}, ], "system": system, "type": "notation", } self.mgr.add(rec) expect = rec["notations"] result = self.mgr.notations(system)[system] self.assertEqual(expect, result)
class testCMSFakeDataServices(unittest.TestCase): """ A test class for the DAS core module """ def setUp(self): """ set up DAS core module """ debug = 0 # read DAS config and make fake Mapping DB entry collname = 'test_collection' self.dasmerge = 'test_merge' self.dascache = 'test_cache' self.dasmr = 'test_mapreduce' self.collname = collname config = das_readconfig() dburi = config['mongodb']['dburi'] self.dburi = dburi logger = PrintManager('TestCMSFakeDataServices', verbose=debug) self.base = 'http://127.0.0.1:8080' # URL of DASTestDataService self.expire = 100 config['logger'] = logger config['loglevel'] = debug config['verbose'] = debug config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname) config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100) config['dasdb'] = {'dbname': 'das', 'cachecollection': self.dascache, 'mrcollection': self.dasmr, 'mergecollection': self.dasmerge} config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'} config['parserdb'] = {'collname': collname, 'dbname': 'parser', 'enable': True, 'sizecap': 10000} config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip'] # Do not perform DAS map test, since we overwrite system and urls. # This is done to use standard DAS maps, but use local URLs, which # cause DAS hash map to be be wrong during a test config['map_test'] = False # setup DAS mapper self.mgr = DASMapping(config) # create fresh DB self.clear_collections() self.mgr.delete_db_collection() self.mgr.init() # Add fake mapping records self.add_service('ip', 'ip.yml') self.add_service('google_maps', 'google_maps.yml') self.add_service('dbs', 'dbs.yml') self.add_service('phedex', 'phedex.yml') self.add_service('sitedb', 'sitedb.yml') # mongo parser self.mongoparser = ql_manager(config) config['mongoparser'] = self.mongoparser # create DAS handler self.das = DASCore(config) # start TestDataService self.server = Root(config) self.server.start() def add_service(self, system, ymlfile): """ Add Fake data service mapping records. We provide system name which match corresponding name in DASTestDataService and associated with this system YML map file. """ fname = os.path.join(DASPATH, 'services/maps/%s' % ymlfile) url = self.base + '/%s' % system for record in read_service_map(fname): record['url'] = url record['system'] = system self.mgr.add(record) for record in read_service_map(fname, 'notations'): record['system'] = system self.mgr.add(record) def clear_collections(self): """clean-up test collections""" conn = Connection(host=self.dburi) for dbname in ['mapping', 'analytics', 'das', 'parser', 'keylearning']: db = conn[dbname] if dbname != 'das': db.drop_collection(self.collname) else: db.drop_collection(self.dascache) db.drop_collection(self.dasmerge) db.drop_collection(self.dasmr) def tearDown(self): """Invoke after each test""" self.server.stop() # self.mgr.delete_db_collection() # self.clear_collections() def testDBSService(self): """test DASCore with test DBS service""" query = "primary_dataset=abc" # invoke query to fill DAS cache dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.call(dquery) expect = "ok" self.assertEqual(expect, result) query = "primary_dataset=abc" # invoke query to get results from DAS cache dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) result = [r for r in result] result = DotDict(result[0]).get('primary_dataset.name') expect = 'abc' self.assertEqual(expect, result) def testPhedexAndSiteDBServices(self): """test DASCore with test PhEDEx and SiteDB services""" query = "site=T3_US_Cornell" # invoke query to fill DAS cache dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.call(dquery) expect = "ok" self.assertEqual(expect, result) query = "site=T3_US_Cornell | grep site.name" # invoke query to get results from DAS cache dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) result = [r for r in result] expect = 'T3_US_Cornell' self.assertEqual(expect, DotDict(result[0]).get('site.name')) expect = ['_id', 'das_id', 'site', 'cache_id', 'das', 'qhash'] expect.sort() rkeys = result[0].keys() rkeys.sort() self.assertEqual(expect, rkeys) def testAggregators(self): """test DASCore aggregators via zip service""" query = "zip=1000" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.call(dquery) expect = "ok" self.assertEqual(expect, result) query = "zip=1000 | count(zip.place.city)" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) result = [r for r in result] result = result[0] # take first result if result.has_key('das'): del result['das'] # strip off DAS info expect = {"function": "count", "result": {"value": 2}, "key": "zip.place.city", "_id":0} self.assertEqual(expect, result) def testIPService(self): """test DASCore with IP service""" query = "ip=137.138.141.145" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.call(dquery) expect = "ok" self.assertEqual(expect, result) query = "ip=137.138.141.145 | grep ip.address" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) result = [r for r in result] result = DotDict(result[0]).get('ip.address') expect = '137.138.141.145' self.assertEqual(expect, result) def testRecords(self): """test records DAS keyword with all services""" query = "ip=137.138.141.145" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.call(dquery) expect = "ok" self.assertEqual(expect, result) query = "site=T3_US_Cornell" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.call(dquery) expect = "ok" self.assertEqual(expect, result) query = "records | grep ip.address" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) result = [r for r in result] result = DotDict(result[0]).get('ip.address') expect = '137.138.141.145' self.assertEqual(expect, result) query = "records | grep site.name" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) result = [r for r in result] expect = 'T3_US_Cornell' self.assertEqual(expect, DotDict(result[0]).get('site.name')) query = "records" dquery = DASQuery(query, mongoparser=self.mongoparser) result = self.das.get_from_cache(dquery, collection=self.dasmerge) res = [] for row in result: if row.has_key('das') and row['das'].has_key('empty_record'): if row['das'].get('empty_record'): continue if row.has_key('ip'): res.append(DotDict(row).get('ip.address')) if row.has_key('site'): for item in row['site']: if item.has_key('name') and item['name'] not in res: res.append(item['name']) res.sort() expect = ['137.138.141.145', 'T3_US_Cornell'] self.assertEqual(expect, res)