コード例 #1
0
ファイル: das_handlers.py プロジェクト: dmwm/DAS
async def process(request):
    """Process user request"""
    loop = asyncio.get_event_loop()
    params = dict(urllib.parse.parse_qsl(request.query_string))
    uinput = params.get('input', None)
    inst = params.get('instance', 'prod/global')
    if not uinput:
        raise Exception("No input query")
    try:
        dasquery = DASQuery(uinput, instance=inst)
    except Exception as exp:
        traceback.print_exc()
        return {'status':'fail', 'query':uinput}
    pid = dasquery.qhash
    time0 = time.time()
    dascore = DASCore()
    print("### dascore", dascore, time.time()-time0)
    if  dascore.incache(dasquery):
        res = [r for r in dascore.get_from_cache(dasquery)]
        data = {'status':'ok', 'data': res, 'nresults':len(res), 'mongo_query':dasquery.mongo_query}
        return data
    asyncio.ensure_future(workflow(dascore, dasquery, loop))
#     try:
#         task = loop.create_task(workflow(dasquery,loop))
#     except asyncio.CancelledError:
#         print('Tasks with query=%s has been canceled' % query)
#     finally:
#         task.cancel()
    # if we want to wait for task to complete before return we must call
    # res = await task
    # otherwise we let task to be executed in a future and return right away
    data = {'status': 'ok', 'pid':pid}
    return data
コード例 #2
0
ファイル: DASCacheModel.py プロジェクト: ktf/DAS
def worker(query, expire):
    """
    Worker function which invoke DAS core to update cache for input query
    """
    dascore = DASCore()
    status  = dascore.call(query)
    return status
コード例 #3
0
ファイル: das_core_t.py プロジェクト: perrozzi/DAS
class testDASCore(unittest.TestCase):
    """
    A test class for the DAS core module
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.das = DASCore(debug=debug, multitask=False)
        config = deepcopy(das_readconfig())
        dburi = config['mongodb']['dburi']
        connection = MongoClient(dburi)
        connection.drop_database('das') 

    def testAggregators(self):
        """test DASCore aggregators via zip service"""
        # test DAS workflow
        query = "file dataset=/ZMM/Summer11-DESIGN42_V11_428_SLHC1-v1/GEN-SIM | grep file.size | sum(file.size)"
        dquery = DASQuery(query)
        result = self.das.call(dquery)
        result = self.das.get_from_cache(dquery)
        result = [r for r in result][0]
        if  'das' in result:
            del result['das'] # strip off DAS info
        expect = {"function": "sum", "result": {"value": 5658838455}, 
                  "key": "file.size", "_id":0}
        # the result may have value == 'N/A' when test is run w/o certificates (travis)
        # in this cas we just skip it
        if result['result']['value'] != 'N/A':
            self.assertEqual(expect, result)
コード例 #4
0
ファイル: DASCacheModel.py プロジェクト: perrozzi/DAS
def worker(query, expire):
    """
    Worker function which invoke DAS core to update cache for input query
    """
    dascore = DASCore()
    status = dascore.call(query)
    return status
コード例 #5
0
ファイル: das_core_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.das = DASCore(debug=debug, multitask=False)
     config = deepcopy(das_readconfig())
     dburi = config['mongodb']['dburi']
     connection = MongoClient(dburi)
     connection.drop_database('das') 
コード例 #6
0
    def __init__(self, config=None, query=None, sleep=600):
        self.dascore = DASCore(config, nores=True)
        logdir       = getarg(config, 'logdir', '/tmp')
        self.pidfile = os.path.join(logdir, 'robot-%s.pid' % genkey(query))

        if (hasattr(os, "devnull")):
            devnull  = os.devnull
        else:
            devnull  = "/dev/null"

        self.stdin   = devnull # we do not read from stdinput
        self.stdout  = getarg(config, 'stdout', devnull)
        self.stderr  = getarg(config, 'stderr', devnull)
        self.query   = query
        self.sleep   = sleep
コード例 #7
0
ファイル: das_web_srv.py プロジェクト: dhootha/DAS
    def init(self):
        """Init DAS web server, connect to DAS Core"""
        try:
            self.reqmgr     = RequestManager(lifetime=self.lifetime)
            self.dasmgr     = DASCore(engine=self.engine)
            self.repmgr     = CMSRepresentation(self.dasconfig, self.dasmgr)
            self.daskeys    = self.dasmgr.das_keys()
            self.gfs        = db_gridfs(self.dburi)
            self.daskeys.sort()
            self.dasmapping = self.dasmgr.mapping
            self.dbs_url    = self.dasmapping.dbs_url()
            self.dbs_global = self.dasmapping.dbs_global_instance()
            self.dbs_instances = self.dasmapping.dbs_instances()
            self.dasmapping.init_presentationcache()
            self.colors = {'das':gen_color('das')}
            for system in self.dasmgr.systems:
                self.colors[system] = gen_color(system)
            if  not self.daskeyslist:
                keylist = [r for r in self.dasmapping.das_presentation_map()]
                keylist.sort(key=lambda r: r['das'])
                self.daskeyslist = keylist

        except ConnectionFailure as _err:
            tstamp = dastimestamp('')
            mythr  = threading.current_thread()
            print("### MongoDB connection failure thread=%s, id=%s, time=%s" \
                    % (mythr.name, mythr.ident, tstamp))
        except Exception as exc:
            print_exc(exc)
            self.dasmgr  = None
            self.reqmgr  = None
            self.dbs_url = None
            self.dbs_global = None
            self.dbs_instances = []
            self.daskeys = []
            self.colors  = {}
            self.q_rewriter = None
            return

        # KWS and Query Rewriting failures are not fatal
        try:
            # init query rewriter, if needed
            if self.dasconfig['query_rewrite']['pk_rewrite_on']:
                self.q_rewriter = CMSQueryRewrite(self.repmgr,
                                                  self.templatepage)
        except Exception as exc:
            print_exc(exc)
            self.q_rewriter = None
コード例 #8
0
ファイル: das_web_srv.py プロジェクト: zdenekmaxa/DAS
 def init(self):
     """Init DAS web server, connect to DAS Core"""
     try:
         self.logcol     = DASLogdb(self.dasconfig)
         self.reqmgr     = RequestManager(self.dburi, lifetime=self.lifetime)
         self.dasmgr     = DASCore(engine=self.engine)
         self.repmgr     = CMSRepresentation(self.dasconfig, self.dasmgr)
         self.daskeys    = self.dasmgr.das_keys()
         self.gfs        = db_gridfs(self.dburi)
         self.daskeys.sort()
         self.dasmapping = self.dasmgr.mapping
         self.dasmapping.init_presentationcache()
         self.colors = {}
         for system in self.dasmgr.systems:
             self.colors[system] = gen_color(system)
         self.sitedbmgr   = SiteDBService(self.dasconfig)
     except Exception as exc:
         print_exc(exc)
         self.dasmgr = None
         self.daskeys = []
         self.colors = {}
         return
     # Start Onhold_request daemon
     if  self.dasconfig['web_server'].get('onhold_daemon', False):
         self.process_requests_onhold()
コード例 #9
0
ファイル: das_core_t.py プロジェクト: ktf/DAS
class testDASCore(unittest.TestCase):
    """
    A test class for the DAS core module
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.das = DASCore(debug=debug, multitask=False)
        config = deepcopy(das_readconfig())
        dburi = config['mongodb']['dburi']
        connection = Connection(dburi)
        connection.drop_database('das') 

    def testAggregators(self):
        """test DASCore aggregators via zip service"""
        # test DAS workflow
        query  = "zip=14850 | grep zip.code | count(zip.code)"
        dquery = DASQuery(query)
        result = self.das.call(dquery)
        result = self.das.get_from_cache(dquery)
        result = [r for r in result][0]
        if  result.has_key('das'):
            del result['das'] # strip off DAS info
        expect = {"function": "count", "result": {"value": 1}, 
                  "key": "zip.code", "_id":0}
        self.assertEqual(expect, result)

    def testIPService(self):
        """test DASCore with IP service"""
        ipaddr = socket.gethostbyname('cmsweb.cern.ch')
        # test DAS workflow
        query  = "ip=%s" % ipaddr
        dquery = DASQuery(query)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        # test results
        query  = "ip=%s | grep ip.address" % ipaddr
        dquery = DASQuery(query)
        result = self.das.get_from_cache(dquery)
        result = [r for r in result][0]
        result = DotDict(result).get('ip.address')
        expect = ipaddr
        self.assertEqual(expect, result)
コード例 #10
0
ファイル: das_services_t.py プロジェクト: zdenekmaxa/DAS
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
コード例 #11
0
 def create(self, **kwargs):
     "Create DASCore object"
     dashash = genkey(str(kwargs))
     if dashash in self.params:
         return self.params[dashash]
     else:
         das = DASCore(**kwargs)
         self.params[dashash] = das
         return das
コード例 #12
0
ファイル: das_core_t.py プロジェクト: zdenekmaxa/DAS
class testDASCore(unittest.TestCase):
    """
    A test class for the DAS core module
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.das = DASCore(debug=debug)
        config = deepcopy(das_readconfig())
        dburi = config['mongodb']['dburi']
        connection = Connection(dburi)
        connection.drop_database('das') 

    def testAggregators(self):
        """test DASCore aggregators via zip service"""
        # test DAS workflow
        query  = "zip=14850 | grep zip.Placemark.address | count(zip.Placemark.address)"
        dquery = DASQuery(query)
        result = self.das.call(dquery)
        result = self.das.get_from_cache(dquery)
        result = [r for r in result][0]
        expect = {"function": "count", "result": {"value": 1}, 
                  "key": "zip.Placemark.address", "_id":0}
        self.assertEqual(expect, result)

    def testIPService(self):
        """test DASCore with IP service"""
        # test DAS workflow
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        # test results
        query  = "ip=137.138.141.145 | grep ip.City"
        dquery = DASQuery(query)
        result = self.das.get_from_cache(dquery)
        result = [r for r in result][0]
        result = DotDict(result).get('ip.City')
        expect = 'Geneva'
        self.assertEqual(expect, result)
コード例 #13
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://127.0.0.1:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs3', 'phedex', 'google_maps', 'ip']
        # Do not perform DAS map test, since we overwrite system and urls.
        # This is done to use standard DAS maps, but use local URLs, which
        # cause DAS hash map to be be wrong during a test
        config['map_test'] = False

        # Add fake mapping records
        self.clear_collections()
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs3', 'dbs3.yml')
        self.add_service('phedex', 'phedex.yml')

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
コード例 #14
0
ファイル: das_core_t.py プロジェクト: zdenekmaxa/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.das = DASCore(debug=debug)
     config = deepcopy(das_readconfig())
     dburi = config['mongodb']['dburi']
     connection = Connection(dburi)
     connection.drop_database('das') 
コード例 #15
0
 def __init__(self, config={}):
     DASWebManager.__init__(self, config)
     try:
         # try what is supplied from WebTools framework
         cdict = self.config.dictionary_()
         self.cachesrv = cdict.get('cache_server_url',
                                   'http://localhost:8211')
         self.base = '/dascontrollers'
     except:
         # stand-alone version
         self.cachesrv = config.get('cache_server_url',
                                    'http://localhost:8211')
         self.base = '/das'
     self.dasmgr = DASCore()
     self.daskeys = self.dasmgr.das_keys()
     self.daskeys.sort()
     self.dasmapping = self.dasmgr.mapping
     self.daslogger = self.dasmgr.logger
     self.pageviews = ['xml', 'list', 'json', 'yuijson']
     msg = "DASSearch::init is started with base=%s" % self.base
     self.daslogger.debug(msg)
     print(msg)
コード例 #16
0
ファイル: ir_entity_attributes.py プロジェクト: perrozzi/DAS
def manual_tests():
    """
    manual tests
    """
    from DAS.keywordsearch.metadata.schema_adapter_factory import get_schema
    from DAS.core.das_core import DASCore

    schema_adapter = get_schema(DASCore(multitask=False))
    fields_by_entity = schema_adapter.list_result_fields()
    ir_matcher = SimpleIREntityAttributeMatcher(fields_by_entity)

    def print_results(*args, **kwargs):
        """ run search and print results - used for testsing """
        ir_matcher.search_index(*args, **kwargs)

    if False:
        print_results(
            keywords=u'files of Zmm with number of events more than 10',
            result_type=u'dataset')
        print_results(keywords=u'number events', result_type=u'dataset')
        print_results(keywords=u'number evented', result_type=u'dataset')
        print_results(keywords=u'dataset.nevents', result_type=u'dataset')
        print_results(keywords=u'dataset.numevents', result_type=u'dataset')

        # block.replica.subscribed vs block.replica.custodial
        #  (the deepest name in here is the most important)
        print_results(keywords=u'replica fraction', result_type=u'block')
        print_results(keywords=u'replica fraction', result_type=u'site')
        print_results(keywords=u'custodial replica', result_type=u'block')
        print_results(keywords=u'replica_fraction', result_type=u'site')

        print('=========================================================')

        print_results(keywords=u'number', result_type=u'dataset')
        print_results(keywords=u'of', result_type=u'dataset')
        print_results(keywords=u'events', result_type=u'dataset')
        print_results(keywords=u'number of', result_type=u'dataset')
        print_results(keywords=u'of events', result_type=u'dataset')
        print_results(keywords=u'Number OF Events', result_type=u'dataset')
    print('Q: dataset_fraction')
    print_results(keywords=u'dataset_fraction', result_type=u'site')
    print('Q: dataset fraction')
    print_results(keywords=u'dataset fraction', result_type=u'site')
    print('Q: dataset part')
    print_results(keywords=u'dataset part', result_type=u'site')
    print('============================================')
    print('Q: file')
    print_results(keywords=u'file in', result_type='file', limit=4)
    print('============================================')
    print('Q: file in')
    print_results(keywords=u'file in', result_type='file', limit=4)
コード例 #17
0
ファイル: das_web_srv.py プロジェクト: ktf/DAS
    def init(self):
        """Init DAS web server, connect to DAS Core"""
        try:
            self.reqmgr = RequestManager(lifetime=self.lifetime)
            self.dasmgr = DASCore(engine=self.engine)
            self.repmgr = CMSRepresentation(self.dasconfig, self.dasmgr)
            self.daskeys = self.dasmgr.das_keys()
            self.gfs = db_gridfs(self.dburi)
            self.daskeys.sort()
            self.dasmapping = self.dasmgr.mapping
            self.dbs_url = self.dasmapping.dbs_url()
            self.dbs_global = self.dasmapping.dbs_global_instance()
            self.dbs_instances = self.dasmapping.dbs_instances()
            self.dasmapping.init_presentationcache()
            self.colors = {"das": gen_color("das")}
            for system in self.dasmgr.systems:
                self.colors[system] = gen_color(system)
            # get SiteDB from global scope
            self.sitedbmgr = SERVICES.get("sitedb2", None)
            # Start DBS daemon
            if self.dataset_daemon:
                self.dbs_daemon(self.dasconfig["web_server"])
            if not self.daskeyslist:
                keylist = [r for r in self.dasmapping.das_presentation_map()]
                keylist.sort(key=lambda r: r["das"])
                self.daskeyslist = keylist

        except ConnectionFailure as _err:
            tstamp = dastimestamp("")
            mythr = threading.current_thread()
            print "### MongoDB connection failure thread=%s, id=%s, time=%s" % (mythr.name, mythr.ident, tstamp)
        except Exception as exc:
            print_exc(exc)
            self.dasmgr = None
            self.reqmgr = None
            self.dbs_url = None
            self.dbs_global = None
            self.dbs_instances = []
            self.daskeys = []
            self.colors = {}
            self.q_rewriter = None
            return

        # KWS and Query Rewriting failures are not fatal
        try:
            # init query rewriter, if needed
            if self.dasconfig["query_rewrite"]["pk_rewrite_on"]:
                self.q_rewriter = CMSQueryRewrite(self.repmgr, self.templatepage)
        except Exception as exc:
            print_exc(exc)
            self.q_rewriter = None
コード例 #18
0
ファイル: das_kwdsearch_t.py プロジェクト: perrozzi/DAS
    def __init__(cls, name, bases, d):
        type.__init__(cls, name, bases, d)

        # set up only once
        if hasattr(cls, 'global_dbs_inst') and cls.global_dbs_inst:
            return

        print('setUp in metaclass: getting dbs manager ' \
              '(and fetching datasets if needed)')
        cls.global_dbs_mngr = initialize_global_dbs_mngr(update_required=False)
        cls.global_dbs_inst = get_global_dbs_inst()
        cls.kws = KeywordSearch(dascore=DASCore(multitask=False))
        dasconfig = das_readconfig()
        cls.timeout = dasconfig['keyword_search']['timeout']
コード例 #19
0
ファイル: das_robot.py プロジェクト: ktf/DAS
    def __init__(self, config=None, query=None, sleep=600):
        self.dascore = DASCore(config, nores=True)
        logdir       = getarg(config, 'logdir', '/tmp')
        self.pidfile = os.path.join(logdir, 'robot-%s.pid' % genkey(query))

        if (hasattr(os, "devnull")):
            devnull  = os.devnull
        else:
            devnull  = "/dev/null"

        self.stdin   = devnull # we do not read from stdinput
        self.stdout  = getarg(config, 'stdout', devnull)
        self.stderr  = getarg(config, 'stderr', devnull)
        self.query   = query
        self.sleep   = sleep
コード例 #20
0
ファイル: kws_web_srv.py プロジェクト: perrozzi/DAS
 def init(self):
     """Init DAS web server, connect to DAS Core"""
     try:
         self.dasmgr = DASCore(multitask=False)
         self.dbs_instances = self.dasmgr.mapping.dbs_instances()
         self.dbs_global = self.dasmgr.mapping.dbs_global_instance()
         if KeywordSearchHandler:
             self.kws = KeywordSearchHandler(self.dasmgr)
     except ConnectionFailure:
         tstamp = dastimestamp('')
         mythr = threading.current_thread()
         print("### MongoDB connection failure thread=%s, id=%s, time=%s" \
               % (mythr.name, mythr.ident, tstamp))
     except Exception as exc:
         print_exc(exc)
         self.dasmgr = None
         self.kws = None
コード例 #21
0
ファイル: DASSearch.py プロジェクト: zdenekmaxa/DAS
 def __init__(self, config={}):
     DASWebManager.__init__(self, config)
     try:
         # try what is supplied from WebTools framework
         cdict = self.config.dictionary_()
         self.cachesrv = cdict.get("cache_server_url", "http://localhost:8211")
         self.base = "/dascontrollers"
     except:
         # stand-alone version
         self.cachesrv = config.get("cache_server_url", "http://localhost:8211")
         self.base = "/das"
     self.dasmgr = DASCore()
     self.daskeys = self.dasmgr.das_keys()
     self.daskeys.sort()
     self.dasmapping = self.dasmgr.mapping
     self.daslogger = self.dasmgr.logger
     self.pageviews = ["xml", "list", "json", "yuijson"]
     msg = "DASSearch::init is started with base=%s" % self.base
     self.daslogger.debug(msg)
     print msg
コード例 #22
0
ファイル: das_robot.py プロジェクト: ktf/DAS
class Robot(object):
    """
    DAS Robot (daemon) class to fetch data from provided URL/API
    and store them into DAS cache.
    """
    def __init__(self, config=None, query=None, sleep=600):
        self.dascore = DASCore(config, nores=True)
        logdir       = getarg(config, 'logdir', '/tmp')
        self.pidfile = os.path.join(logdir, 'robot-%s.pid' % genkey(query))

        if (hasattr(os, "devnull")):
            devnull  = os.devnull
        else:
            devnull  = "/dev/null"

        self.stdin   = devnull # we do not read from stdinput
        self.stdout  = getarg(config, 'stdout', devnull)
        self.stderr  = getarg(config, 'stderr', devnull)
        self.query   = query
        self.sleep   = sleep

    def daemonize(self):
        """
        do the UNIX double-fork magic, see Stevens' "Advanced
        Programming in the UNIX Environment" for details (ISBN 0201563177)
        http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
        """
        try:
            pid = os.fork()
            if  pid > 0:
                # exit first parent
                sys.exit(0)
        except OSError as err:
            sys.stderr.write("fork #1 failed: %d (%s)\n" \
                % (err.errno, err.strerror))
            sys.exit(1)

        # decouple from parent environment
        os.chdir("/")
        os.umask(0)
        os.setsid()

        # do second fork
        try:
            pid = os.fork()
            if  pid > 0:
                # exit from second parent
                sys.exit(0)
        except OSError as err:
            sys.stderr.write("fork #2 failed: %d (%s)\n" \
                % (err.errno, err.strerror))
            sys.exit(1)

        # redirect standard file descriptors
        sys.stdout.flush()
        sys.stderr.flush()
        stdi = file(self.stdin, 'r')
        stdo = file(self.stdout, 'a+')
        stde = file(self.stderr, 'a+', 0)
        os.dup2(stdi.fileno(), sys.stdin.fileno())
        os.dup2(stdo.fileno(), sys.stdout.fileno())
        os.dup2(stde.fileno(), sys.stderr.fileno())

        # write pidfile
        atexit.register(self.delpid)
        pid = str(os.getpid())
        file(self.pidfile, 'w+').write("%s\n" % pid)

    def delpid(self):
        """Delete PID file"""
        os.remove(self.pidfile)

    def start(self):
        """
        Start the daemon
        """
        # Check for a pidfile to see if the daemon already runs
        try:
            pidf = file(self.pidfile,'r')
            pid  = int(pidf.read().strip())
            pidf.close()
        except IOError:
            pid = None

        if pid:
            message = "pidfile %s already exist. Daemon already running?\n"
            sys.stderr.write(message % self.pidfile)
            sys.exit(1)

        # Start the daemon
        self.daemonize()
        self.run()

    def stop(self):
        """
        Stop the daemon
        """
        # Get the pid from the pidfile
        try:
            pidf = file(self.pidfile, 'r')
            pid = int(pidf.read().strip())
            pidf.close()
        except IOError:
            pid = None

        if not pid:
            message = "pidfile %s does not exist. Daemon not running?\n"
            sys.stderr.write(message % self.pidfile)
            return # not an error in a restart

        # Try killing the daemon process
        try:
            while 1:
                os.kill(pid, SIGTERM)
                time.sleep(0.1)
        except OSError as err:
            if err.find("No such process") > 0:
                if os.path.exists(self.pidfile):
                    os.remove(self.pidfile)
            else:
                print_exc(err)
                sys.exit(1)

    def restart(self):
        """
        Restart the daemon
        """
        self.stop()
        self.start()

    def status(self):
        """
        Return status information about Robot instance.
        """
        # Get the pid from the pidfile
        try:
            pidf  = file(self.pidfile, 'r')
            pid = int(pidf.read().strip())
            pidf.close()
        except IOError:
            pid = None

        if  not pid:
            message = "pidfile %s does not exist. Daemon not running?\n"
            sys.stderr.write(message % self.pidfile)
            return # not an error in a restart

        print "DAS populator information"
        print "PID    :", pid
        print "pidfile:", self.pidfile
        print "stdin  :", self.stdin
        print "stdout :", self.stdout
        print "stderr :", self.stderr
        print "sleep  :", self.sleep
        print "query  :", self.query


    def run(self):
        """
        Method which will be called after the process has been
        daemonized by start() or restart().
        """
        if  not self.query:
            print "DAS query is not provided"
            sys.exit(1)

        while True:
            self.dascore.call(self.query, add_to_analytics=False)
            time.sleep(self.sleep)
コード例 #23
0
# main
#
if __name__ == '__main__':
    optManager = DASOptionParser()
    (opts, args) = optManager.getOpt()

    if not len([val for val in opts.__dict__.values() if val]):
        print("Run with --help for more options")
        sys.exit(0)

    t0 = time.time()
    if opts.verbose:
        debug = opts.verbose
    else:
        debug = 0
    MGR = DASCore(debug=debug)
    DAS = DASCouchDB(MGR)
    if opts.listviews:
        for viewname, viewmap in DAS.views.items():
            print()
            print("DAS view:", viewname)
            print(viewmap['map'])
        sys.exit(0)

    if opts.delete:
        if opts.system:
            msg = "Delete '%s' docs in '%s' couch DB" % \
                (opts.system, opts.delete)
            DAS.delete(opts.delete, opts.system)
        else:
            msg = "Delete '%s' couch DB" % opts.delete
コード例 #24
0
ファイル: schema_adapter2.py プロジェクト: perrozzi/DAS
        print('entity_names')
        pprint.pprint(self.entity_names)
        print('search_field_names')
        pprint.pprint(self._lookup_keys)
        #print 'ENTITY FIELDS (BY LOOKUP):'
        #pprint.pprint(dict(self._fields_dict))
        print('ENTITY FIELDS (BY LOOKUP MULTI ENTITY):')
        pprint.pprint([
            "{0}: {1}".format(lookup, self._fields_dict[lookup].keys())
            for lookup in self._fields_dict.keys() if ',' in lookup
        ])


if __name__ == '__main__':
    from DAS.core.das_core import DASCore
    s = DasSchemaAdapter(DASCore(multitask=False))
    #pprint.pprint(s.list_result_fields())

    print('validate input params():', \
        s.validate_input_params(set(), entity='dataset.name'))
    print(
        'validate input params(dataset.name):',
        s.validate_input_params(set(['dataset.name']), entity='dataset.name'))
    print(
        'validate input params run(dataset.name):',
        s.validate_input_params(set(['dataset.name']),
                                entity='run.run_number',
                                final_step=True))

    # non related entity in input
    print(
コード例 #25
0
class DASSearch(DASWebManager):
    """
    DAS web interface.
    """
    def __init__(self, config={}):
        DASWebManager.__init__(self, config)
        try:
            # try what is supplied from WebTools framework
            cdict = self.config.dictionary_()
            self.cachesrv = cdict.get('cache_server_url',
                                      'http://localhost:8211')
            self.base = '/dascontrollers'
        except:
            # stand-alone version
            self.cachesrv = config.get('cache_server_url',
                                       'http://localhost:8211')
            self.base = '/das'
        self.dasmgr = DASCore()
        self.daskeys = self.dasmgr.das_keys()
        self.daskeys.sort()
        self.dasmapping = self.dasmgr.mapping
        self.daslogger = self.dasmgr.logger
        self.pageviews = ['xml', 'list', 'json', 'yuijson']
        msg = "DASSearch::init is started with base=%s" % self.base
        self.daslogger.debug(msg)
        print(msg)

    def top(self):
        """
        Define masthead for all DAS web pages
        """
        return self.templatepage('das_top', base=self.base)

    def bottom(self, response_div=True):
        """
        Define footer for all DAS web pages
        """
        return self.templatepage('das_bottom', div=response_div)

    def page(self, content, ctime=None, response_div=True):
        """
        Define footer for all DAS web pages
        """
        page = self.top()
        page += content
        timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
        services = self.dasmgr.keys()
        srv = ""
        for key in services.keys():
            srv += "%s, " % key
        srv = srv[:-2]  # remove last comma
        page += self.templatepage('das_bottom',
                                  ctime=ctime,
                                  services=srv,
                                  timestamp=timestamp,
                                  div=response_div)
        return page

    @expose
    def faq(self, *args, **kwargs):
        """
        represent DAS FAQ.
        """
        page = self.templatepage('das_faq',
                                 operators=', '.join(das_operators()),
                                 aggregators=', '.join(das_aggregators()))
        return self.page(page, response_div=False)

    @expose
    def help(self, *args, **kwargs):
        """
        represent DAS help
        """
        page = self.templatepage('das_help')
        return self.page(page, response_div=False)

    @expose
    def cli(self, *args, **kwargs):
        """
        Serve DAS CLI file download.
        """
        clifile = os.path.join(os.environ['DAS_ROOT'],
                               'src/python/DAS/tools/das_cache_client.py')
        return serve_file(clifile, content_type='text/plain')

    @expose
    def services(self, *args, **kwargs):
        """
        represent DAS services
        """
        dasdict = {}
        daskeys = []
        for system, keys in self.dasmgr.mapping.daskeys().items():
            tmpdict = {}
            for key in keys:
                tmpdict[key] = self.dasmgr.mapping.lookup_keys(system, key)
                if key not in daskeys:
                    daskeys.append(key)
            dasdict[system] = dict(keys=dict(tmpdict),
                                   apis=self.dasmgr.mapping.list_apis(system))
        mapreduce = [r for r in self.dasmgr.rawcache.get_map_reduce()]
        page = self.templatepage('das_services',
                                 dasdict=dasdict,
                                 daskeys=daskeys,
                                 mapreduce=mapreduce)
        return self.page(page, response_div=False)

    @expose
    def api(self, name, **kwargs):
        """
        Return DAS mapping record about provided API.
        """
        record = self.dasmgr.mapping.api_info(name)
        show = kwargs.get('show', 'json')
        page = "<b>DAS mapping record</b>"
        if show == 'json':
            jsoncode = {'jsoncode': json2html(record, "")}
            page += self.templatepage('das_json', **jsoncode)
        elif show == 'code':
            code = pformat(record, indent=1, width=100)
            page += self.templatepage('das_code', code=code)
        else:
            code = yaml.dump(record,
                             width=100,
                             indent=4,
                             default_flow_style=False)
            page += self.templatepage('das_code', code=code)
        return self.page(page, response_div=False)

    @expose
    def default(self, *args, **kwargs):
        """
        Default method.
        """
        return self.index(args, kwargs)

    def check_input(self, uinput):
        """
        Check provided input for valid DAS keys.
        """
        error = self.templatepage('das_ambiguous',
                                  input=uinput,
                                  entities=', '.join(self.daskeys))
        if not uinput:
            return error
        # check provided input. If at least one word is not part of das_keys
        # return ambiguous template.
        mongo_query = self.dasmgr.mongoparser.parse(uinput)
        fields = mongo_query.get('fields', [])
        if not fields:
            fields = []
        spec = mongo_query.get('spec', {})
        if not fields + spec.keys():
            return error
        for word in fields + spec.keys():
            found = 0
            for key in das_keys:
                if word.find(key) != -1:
                    found = 1
            if not found:
                return error
        return

    @expose
    def index(self, *args, **kwargs):
        """
        represents DAS web interface. 
        It uses das_searchform template for
        input form and yui_table for output Table widget.
        """
        try:
            if not args and not kwargs:
                #                msg  = self.templatepage('das_help',
                #                        services    = ', '.join(self.dasmgr.keys()),
                #                        keywords    = ', '.join(self.dasmgr.das_keys()),
                #                        operators   = ', '.join(das_operators()),
                #                        aggregators = ', '.join(das_aggregators()),
                #                        filters     = ', '.join(das_filters())
                #                        )
                page = self.form()
                return self.page(page)
            uinput = getarg(kwargs, 'input', '')
            results = self.check_input(uinput)
            if results:
                return self.page(self.form() + results)
            view = getarg(kwargs, 'view', 'list')
            if args:
                return getattr(self, args[0][0])(args[1])
            if view not in self.pageviews:
                raise Exception("Page view '%s' is not supported" % view)
            return getattr(self, '%sview' % view)(kwargs)
        except:
            return self.error(self.gen_error_msg(kwargs))

    @expose
    def form(self, uinput=None, msg=None):
        """
        provide input DAS search form
        """
        page = self.templatepage('das_searchform',
                                 input=uinput,
                                 msg=msg,
                                 base=self.base)
        return page

    def gen_error_msg(self, kwargs):
        """
        Generate standard error message.
        """
        self.daslogger.error(traceback.format_exc())
        error = "My request to DAS is failed\n\n"
        error += "Input parameters:\n"
        for key, val in kwargs.items():
            error += '%s: %s\n' % (key, val)
        error += "Exception type: %s\nException value: %s\nTime: %s" \
                    % (sys.exc_info()[0], sys.exc_info()[1], web_time())
        error = error.replace("<", "").replace(">", "")
        return error

    @expose
    def error(self, msg):
        """
        Show error message.
        """
        error = self.templatepage('das_error', msg=msg)
        page = self.page(self.form() + error)
        return page

    @exposedasjson
    def wrap2dasjson(self, data):
        """DAS JSON wrapper"""
        return data

    @exposedasplist
    def wrap2dasxml(self, data):
        """DAS XML wrapper"""
        return data

    @expose
    def records(self, *args, **kwargs):
        """
        Retieve all records id's.
        """
        try:
            recordid = None
            format = ''
            if args:
                recordid = args[0]
                spec = {'_id': recordid}
                fields = None
                query = dict(fields=fields, spec=spec)
                if len(args) == 2:
                    format = args[1]
            elif kwargs and '_id' in kwargs:
                spec = {'_id': kwargs['_id']}
                fields = None
                query = dict(fields=fields, spec=spec)
            else:  # return all ids
                query = dict(fields=None, spec={})

            nresults = self.nresults(query)
            time0 = time.time()
            url = self.cachesrv
            idx = getarg(kwargs, 'idx', 0)
            limit = getarg(kwargs, 'limit', 10)
            show = getarg(kwargs, 'show', 'json')
            coll = getarg(kwargs, 'collection', 'merge')
            #            params   = {'query':json.dumps(query), 'idx':idx, 'limit':limit}
            #            path     = '/rest/request'
            params = {
                'query': json.dumps(query),
                'idx': idx,
                'limit': limit,
                'collection': coll
            }
            path = '/rest/records'
            headers = {"Accept": "application/json"}
            try:
                data = urllib2_request('GET',
                                       url + path,
                                       params,
                                       headers=headers)
                result = json.loads(data)
            except:
                self.daslogger.error(traceback.format_exc())
                result = {'status': 'fail', 'reason': traceback.format_exc()}
            res = ""
            if result['status'] == 'success':
                if recordid:  # we got id
                    for row in result['data']:
                        if show == 'json':
                            jsoncode = {'jsoncode': json2html(row, "")}
                            res += self.templatepage('das_json', **jsoncode)
                        elif show == 'code':
                            code = pformat(row, indent=1, width=100)
                            res += self.templatepage('das_code', code=code)
                        else:
                            code = yaml.dump(row,
                                             width=100,
                                             indent=4,
                                             default_flow_style=False)
                            res += self.templatepage('das_code', code=code)
                else:
                    for row in result['data']:
                        rid = row['_id']
                        del row['_id']
                        record = dict(id=rid, daskeys=', '.join(row))
                        res += self.templatepage('das_record', **record)
            else:
                res = result['status']
                if 'reason' in res:
                    return self.error(res['reason'])
                else:
                    msg = 'Uknown error, kwargs=' % kwargs
                    return self.error(msg)
            if recordid:
                if format:
                    if format == 'xml':
                        return self.wrap2dasxml(result['data'])
                    elif format == 'json':
                        return self.wrap2dasjson(result['data'])
                    else:
                        return self.error('Unsupported data format %s' %
                                          format)
                page = res
            else:
                url = '/das/records?'
                idict = dict(nrows=nresults,
                             idx=idx,
                             limit=limit,
                             results=res,
                             url=url)
                page = self.templatepage('das_pagination', **idict)

            form = self.form(uinput="")
            ctime = (time.time() - time0)
            page = self.page(form + page, ctime=ctime)
            return page
        except:
            return self.error(self.gen_error_msg(kwargs))

    def nresults(self, kwargs):
        """
        invoke DAS search call, parse results and return them to
        web methods
        """
        url = self.cachesrv
        uinput = getarg(kwargs, 'input', '')
        params = {'query': uinput}
        path = '/rest/nresults'
        headers = {"Accept": "application/json"}
        try:
            data = urllib2_request('GET', url + path, params, headers=headers)
            record = json.loads(data)
        except:
            self.daslogger.error(traceback.format_exc())
            record = {'status': 'fail', 'reason': traceback.format_exc()}
        if record['status'] == 'success':
            return record['nresults']
        else:
            msg = "nresults returns status: %s" % str(record)
            self.daslogger.info(msg)
        return -1

    def send_request(self, method, kwargs):
        "Send POST request to server with provided parameters"
        url = self.cachesrv
        uinput = getarg(kwargs, 'input', '')
        format = getarg(kwargs, 'format', '')
        idx = getarg(kwargs, 'idx', 0)
        limit = getarg(kwargs, 'limit', 10)
        skey = getarg(kwargs, 'sort', '')
        sdir = getarg(kwargs, 'dir', 'asc')
        params = {
            'query': uinput,
            'idx': idx,
            'limit': limit,
            'skey': skey,
            'order': sdir
        }
        if method == 'POST':
            path = '/rest/create'
        elif method == 'GET':
            path = '/rest/request'
        else:
            raise Exception('Unsupported method %s' % method)
        headers = {
            'Accept': 'application/json',
            'Content-type': 'application/json'
        }
        try:
            data = urllib2_request(method, url + path, params, headers=headers)
            result = json.loads(data)
        except:
            self.daslogger.error(traceback.format_exc())
            result = {'status': 'fail', 'reason': traceback.format_exc()}
        return result

    def result(self, kwargs):
        """
        invoke DAS search call, parse results and return them to
        web methods
        """
        result = self.send_request('GET', kwargs)
        res = []
        if type(result) is bytes:
            data = json.loads(result)
        else:
            data = result
        if data['status'] == 'success':
            res = data['data']
        return res

    @exposedasplist
    def xmlview(self, kwargs):
        """
        provide DAS XML
        """
        rows = self.result(kwargs)
        return rows

    @exposedasjson
    def jsonview(self, kwargs):
        """
        provide DAS JSON
        """
        rows = self.result(kwargs)
        return rows

    def convert2ui(self, idict):
        """
        Convert input row (dict) into UI presentation
        """
        for key in idict.keys():
            if key == 'das' or key == '_id' or key == 'das_id':
                continue
            for item in self.dasmapping.presentation(key):
                try:
                    daskey = item['das']
                    uikey = item['ui']
                    for value in access(idict, daskey):
                        yield uikey, value
                except:
                    yield key, idict[key]

    @expose
    def listview(self, kwargs):
        """
        provide DAS list view
        """
        # force to load the page all the time
        cherrypy.response.headers['Cache-Control'] = 'no-cache'
        cherrypy.response.headers['Pragma'] = 'no-cache'

        time0 = time.time()
        ajaxreq = getarg(kwargs, 'ajax', 0)
        uinput = getarg(kwargs, 'input', '')
        limit = getarg(kwargs, 'limit', 10)
        show = getarg(kwargs, 'show', 'json')
        form = self.form(uinput=uinput)
        # self.status sends request to Cache Server
        # Cache Server uses das_core to retrieve status
        status = self.status(input=uinput, ajax=0)
        if status == 'no data':
            # no data in raw cache, send POST request
            self.send_request('POST', kwargs)
            ctime = (time.time() - time0)
            #            page    = self.templatepage('not_ready')
            page = self.status(input=uinput)
            page = self.page(form + page, ctime=ctime)
            return page
        elif status == 'fail':
            kwargs['reason'] = 'Unable to get status from data-service'
            return self.error(self.gen_error_msg(kwargs))

        total = self.nresults(kwargs)
        rows = self.result(kwargs)
        nrows = len(rows)
        page = ""
        ndict = {'nrows': total, 'limit': limit}
        page = self.templatepage('das_nrecords', **ndict)
        #        for nrecord in range(0, len(rows)):
        #            row = rows[nrecord]
        #            style = "white"
        #            if  nrecord % 2:
        #                style = "white"
        #            else:
        #                style = "gray"
        style = "white"
        for row in rows:
            id = row['_id']
            page += '<div class="%s"><hr class="line" />' % style
            gen = self.convert2ui(row)
            for uikey, value in [k for k, g in groupby(gen)]:
                page += "<b>%s</b>: %s<br />" % (uikey, value)
            pad = ""
            if show == 'json':
                jsoncode = {'jsoncode': json2html(row, pad)}
                jsonhtml = self.templatepage('das_json', **jsoncode)
                jsondict = dict(data=jsonhtml, id=id, rec_id=id)
                page += self.templatepage('das_row', **jsondict)
            elif show == 'code':
                code = pformat(row, indent=1, width=100)
                data = self.templatepage('das_code', code=code)
                datadict = {'data': data, 'id': id, rec_id: id}
                page += self.templatepage('das_row', **datadict)
            else:
                code = yaml.dump(row,
                                 width=100,
                                 indent=4,
                                 default_flow_style=False)
                data = self.templatepage('das_code', code=code)
                datadict = {'data': data, 'id': id, rec_id: id}
                page += self.templatepage('das_row', **datadict)
            page += '</div>'
        ctime = (time.time() - time0)
        return self.page(form + page, ctime=ctime)

    @exposetext
    def plainview(self, kwargs):
        """
        provide DAS plain view
        """
        rows, total, form = self.result(kwargs)
        page = ""
        for item in rows:
            item = str(item).replace('[', '').replace(']', '')
            page += "%s\n" % item.replace("'", "")
        return page

    @exposejson
    def yuijson(self, **kwargs):
        """
        Provide JSON in YUI compatible format to be used in DynamicData table
        widget, see
        http://developer.yahoo.com/yui/examples/datatable/dt_dynamicdata.html
        """
        rows = self.result(kwargs)
        rowlist = []
        id = 0
        for row in rows:
            das = row['das']
            if type(das) is dict:
                das = [das]
            resdict = {}
            for jdx in range(0, len(das)):
                item = das[jdx]
                resdict[id] = id
                for idx in range(0, len(item['system'])):
                    api = item['api'][idx]
                    system = item['system'][idx]
                    key = item['selection_keys'][idx]
                    data = row[key]
                    if type(data) is list:
                        data = data[jdx]
                    if type(data) is list:
                        data = data[idx]
                    # I need to extract from DAS object the values for UI keys
                    for item in self.dasmapping.presentation(key):
                        daskey = item['das']
                        uiname = item['ui']
                        if uiname not in resdict:
                            resdict[uiname] = ""
                        # look at key attributes, which may be compound as well
                        # e.g. block.replica.se
                        if type(data) is dict:
                            result = dict(data)
                        elif type(data) is list:
                            result = list(data)
                        else:
                            result = data
                        res = ""
                        try:
                            for elem in daskey.split('.')[1:]:
                                if elem in result:
                                    res = result[elem]
                                    resdict[uiname] = res
                        except:
                            pass
#                    pad = ""
#                    jsoncode = {'jsoncode': json2html(data, pad)}
#                    jsonhtml = self.templatepage('das_json', **jsoncode)
#                    jsondict = {'id':id, 'system':system, 'api':api, key:jsonhtml}
            if resdict not in rowlist:
                rowlist.append(resdict)
            id += 1
        idx = getarg(kwargs, 'idx', 0)
        limit = getarg(kwargs, 'limit', 10)
        total = len(rowlist)
        jsondict = {
            'recordsReturned': len(rowlist),
            'totalRecords': total,
            'startIndex': idx,
            'sort': 'true',
            'dir': 'asc',
            'pageSize': limit,
            'records': rowlist
        }
        return jsondict

    @expose
    def tableview(self, kwargs):
        """
        provide DAS table view
        """
        kwargs['format'] = 'html'
        uinput = getarg(kwargs, 'input', '')
        ajaxreq = getarg(kwargs, 'ajax', 0)
        form = self.form(uinput=uinput)
        time0 = time.time()
        total = self.nresults(kwargs)
        if not total:
            ctime = (time.time() - time0)
            form = self.form(uinput)
            page = self.templatepage('not_ready')
            page = self.page(form + page, ctime=ctime)
            return page

        # find out which selection keys were used
        selkeys = uinput.replace('find ', '').split(' where ')[0].split(',')
        uikeys = []
        for key in selkeys:
            res = self.dasmapping.presentation(key)
            uikeys += [item['ui'] for item in res]
        titles = ["id"] + uikeys
        coldefs = ""
        for title in titles:
            coldefs += '{key:"%s",label:"%s",sortable:true,resizeable:true},' \
                        % (title, title)
        coldefs = "[%s]" % coldefs[:-1]  # remove last comma
        coldefs = coldefs.replace("},{", "},\n{")
        limit = getarg(kwargs, 'limit', 10)
        names = {
            'titlelist': titles,
            'coldefs': coldefs,
            'rowsperpage': limit,
            'total': total,
            'tag': 'mytag',
            'ajax': ajaxreq,
            'input': urllib.urlencode(dict(input=uinput))
        }
        page = self.templatepage('das_table', **names)
        ctime = (time.time() - time0)
        page = self.page(form + page, ctime=ctime)
        return page

    @expose
    def status(self, **kwargs):
        """
        Place request to obtain status about given query
        """
        img = '<img src="%s/images/loading.gif" alt="loading"/>' % self.base
        req = """
        <script type="application/javascript">
        setTimeout('ajaxStatus()',3000)
        </script>"""

        def set_header():
            "Set HTTP header parameters"
            tstamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
            cherrypy.response.headers['Expire'] = tstamp
            cherrypy.response.headers['Cache-control'] = 'no-cache'

        uinput = kwargs.get('input', '')
        uinput = urllib.unquote_plus(uinput)
        ajax = kwargs.get('ajax', 1)
        view = kwargs.get('view', 'list')
        params = {'query': uinput}
        path = '/rest/status'
        url = self.cachesrv
        headers = {'Accept': 'application/json'}
        try:
            res = urllib2_request('GET', url + path, params, headers=headers)
            data = json.loads(res)
        except:
            self.daslogger.error(traceback.format_exc())
            data = {'status': 'fail'}
        if ajax:
            cherrypy.response.headers['Content-Type'] = 'text/xml'
            if data['status'] == 'ok':
                page = '<script type="application/javascript">reload()</script>'
            elif data['status'] == 'fail':
                page = '<script type="application/javascript">reload()</script>'
                page += self.error(self.gen_error_msg(kwargs))
            else:
                page = img + ' ' + str(data['status']) + ', please wait...'
                img_stop = ''
                page += ', <a href="/das/">stop</a> request'
                page += req
                set_header()
            page = ajax_response(page)
        else:
            try:
                page = data['status']
            except:
                page = traceback.format_exc()
        return page
コード例 #26
0
ファイル: das_services_t.py プロジェクト: zdenekmaxa/DAS
class testCMSFakeDataServices(unittest.TestCase):
    """
    A test class for the DAS core module
    """
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()

    def add_service(self, system, ymlfile):
        """
        Add Fake data service mapping records. We provide system name
        which match corresponding name in DASTestDataService and
        associated with this system YML map file.
        """
        fname  = os.path.join(DASPATH, 'services/maps/%s' % ymlfile)
        url    = self.base + '/%s' % system
        for record in read_service_map(fname):
            record['url'] = url
            record['system'] = system
            self.mgr.add(record)
        for record in read_service_map(fname, 'notations'):
            record['system'] = system
            self.mgr.add(record)

    def clear_collections(self):
        """clean-up test collections"""
        conn = Connection(host=self.dburi)
        for dbname in ['mapping', 'analytics', 'das', 'parser', 'keylearning']:
            db = conn[dbname]
            if  dbname != 'das':
                db.drop_collection(self.collname)
            else:
                db.drop_collection(self.dascache)
                db.drop_collection(self.dasmerge)
                db.drop_collection(self.dasmr)
            

    def tearDown(self):
        """Invoke after each test"""
        self.server.stop()
#        self.mgr.delete_db_collection()
#        self.clear_collections()

    def testDBSService(self):
        """test DASCore with test DBS service"""
        query  = "primary_dataset=abc" # invoke query to fill DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "primary_dataset=abc" # invoke query to get results from DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('primary_dataset.name')
        expect = 'abc'
        self.assertEqual(expect, result)

    def testPhedexAndSiteDBServices(self):
        """test DASCore with test PhEDEx and SiteDB services"""
        query  = "site=T3_US_Cornell" # invoke query to fill DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "site=T3_US_Cornell | grep site.name" # invoke query to get results from DAS cache
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = 'T3_US_Cornell'
        self.assertEqual(expect, DotDict(result[0]).get('site.name'))
        expect = ['_id', 'das_id', 'site', 'cache_id', 'das', 'qhash']
        expect.sort()
        rkeys = result[0].keys()
        rkeys.sort()
        self.assertEqual(expect, rkeys)

    def testAggregators(self):
        """test DASCore aggregators via zip service"""
        query  = "zip=1000"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "zip=1000 | count(zip.place.city)"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = {"function": "count", "result": {"value": 2}, 
                  "key": "zip.place.city", "_id":0}
        self.assertEqual(expect, result[0])

    def testIPService(self):
        """test DASCore with IP service"""
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "ip=137.138.141.145 | grep ip.address"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('ip.address')
        expect = '137.138.141.145'
        self.assertEqual(expect, result)

    def testRecords(self):
        """test records DAS keyword with all services"""
        query  = "ip=137.138.141.145"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "site=T3_US_Cornell"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.call(dquery)
        expect = "ok"
        self.assertEqual(expect, result)

        query  = "records | grep ip.address"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        result = DotDict(result[0]).get('ip.address')
        expect = '137.138.141.145'
        self.assertEqual(expect, result)

        query  = "records | grep site.name"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        result = [r for r in result]
        expect = 'T3_US_Cornell'
        self.assertEqual(expect, DotDict(result[0]).get('site.name'))

        query  = "records"
        dquery = DASQuery(query, mongoparser=self.mongoparser)
        result = self.das.get_from_cache(dquery, collection=self.dasmerge)
        res    = []
        for row in result:
            if  row.has_key('ip'):
                res.append(DotDict(row).get('ip.address'))
            if  row.has_key('site'):
                for item in row['site']:
                    if  item.has_key('name') and item['name'] not in res:
                        res.append(item['name'])
        res.sort()
        expect = ['137.138.141.145', 'T3_US_Cornell']
        self.assertEqual(expect, res)
コード例 #27
0
class Robot(object):
    """
    DAS Robot (daemon) class to fetch data from provided URL/API
    and store them into DAS cache.
    """
    def __init__(self, config=None, query=None, sleep=600):
        self.dascore = DASCore(config, nores=True)
        logdir       = getarg(config, 'logdir', '/tmp')
        self.pidfile = os.path.join(logdir, 'robot-%s.pid' % genkey(query))

        if (hasattr(os, "devnull")):
            devnull  = os.devnull
        else:
            devnull  = "/dev/null"

        self.stdin   = devnull # we do not read from stdinput
        self.stdout  = getarg(config, 'stdout', devnull)
        self.stderr  = getarg(config, 'stderr', devnull)
        self.query   = query
        self.sleep   = sleep

    def daemonize(self):
        """
        do the UNIX double-fork magic, see Stevens' "Advanced
        Programming in the UNIX Environment" for details (ISBN 0201563177)
        http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
        """
        try:
            pid = os.fork()
            if  pid > 0:
                # exit first parent
                sys.exit(0)
        except OSError as err:
            sys.stderr.write("fork #1 failed: %d (%s)\n" \
                % (err.errno, err.strerror))
            sys.exit(1)

        # decouple from parent environment
        os.chdir("/")
        os.umask(0)
        os.setsid()

        # do second fork
        try:
            pid = os.fork()
            if  pid > 0:
                # exit from second parent
                sys.exit(0)
        except OSError as err:
            sys.stderr.write("fork #2 failed: %d (%s)\n" \
                % (err.errno, err.strerror))
            sys.exit(1)

        # redirect standard file descriptors
        sys.stdout.flush()
        sys.stderr.flush()
        stdi = file(self.stdin, 'r')
        stdo = file(self.stdout, 'a+')
        stde = file(self.stderr, 'a+', 0)
        os.dup2(stdi.fileno(), sys.stdin.fileno())
        os.dup2(stdo.fileno(), sys.stdout.fileno())
        os.dup2(stde.fileno(), sys.stderr.fileno())

        # write pidfile
        atexit.register(self.delpid)
        pid = str(os.getpid())
        file(self.pidfile, 'w+').write("%s\n" % pid)

    def delpid(self):
        """Delete PID file"""
        os.remove(self.pidfile)

    def start(self):
        """
        Start the daemon
        """
        # Check for a pidfile to see if the daemon already runs
        try:
            pidf = file(self.pidfile,'r')
            pid  = int(pidf.read().strip())
            pidf.close()
        except IOError:
            pid = None

        if pid:
            message = "pidfile %s already exist. Daemon already running?\n"
            sys.stderr.write(message % self.pidfile)
            sys.exit(1)

        # Start the daemon
        self.daemonize()
        self.run()

    def stop(self):
        """
        Stop the daemon
        """
        # Get the pid from the pidfile
        try:
            pidf = file(self.pidfile, 'r')
            pid = int(pidf.read().strip())
            pidf.close()
        except IOError:
            pid = None

        if not pid:
            message = "pidfile %s does not exist. Daemon not running?\n"
            sys.stderr.write(message % self.pidfile)
            return # not an error in a restart

        # Try killing the daemon process
        try:
            while 1:
                os.kill(pid, SIGTERM)
                time.sleep(0.1)
        except OSError as err:
            if err.find("No such process") > 0:
                if os.path.exists(self.pidfile):
                    os.remove(self.pidfile)
            else:
                print_exc(err)
                sys.exit(1)

    def restart(self):
        """
        Restart the daemon
        """
        self.stop()
        self.start()

    def status(self):
        """
        Return status information about Robot instance.
        """
        # Get the pid from the pidfile
        try:
            pidf  = file(self.pidfile, 'r')
            pid = int(pidf.read().strip())
            pidf.close()
        except IOError:
            pid = None

        if  not pid:
            message = "pidfile %s does not exist. Daemon not running?\n"
            sys.stderr.write(message % self.pidfile)
            return # not an error in a restart

        print("DAS populator information")
        print("PID    :", pid)
        print("pidfile:", self.pidfile)
        print("stdin  :", self.stdin)
        print("stdout :", self.stdout)
        print("stderr :", self.stderr)
        print("sleep  :", self.sleep)
        print("query  :", self.query)


    def run(self):
        """
        Method which will be called after the process has been
        daemonized by start() or restart().
        """
        if  not self.query:
            print("DAS query is not provided")
            sys.exit(1)

        while True:
            self.dascore.call(self.query)
            time.sleep(self.sleep)
コード例 #28
0
ファイル: das_web_srv.py プロジェクト: ktf/DAS
class DASWebService(DASWebManager):
    """
    DAS web service interface.
    """

    def __init__(self, dasconfig):
        DASWebManager.__init__(self, dasconfig)
        config = dasconfig["web_server"]
        self.pid_pat = re.compile(r"^[a-z0-9]{32}")
        self.base = config["url_base"]
        self.interval = config.get("status_update", 2500)
        self.engine = config.get("engine", None)
        self.check_clients = config.get("check_clients", False)
        nworkers = config["web_workers"]
        self.hot_thr = config.get("hot_threshold", 3000)
        self.dasconfig = dasconfig
        self.dburi = self.dasconfig["mongodb"]["dburi"]
        self.lifetime = self.dasconfig["mongodb"]["lifetime"]
        self.queue_limit = config.get("queue_limit", 50)
        qtype = config.get("qtype", "Queue")
        if qtype not in ["Queue", "PriorityQueue"]:
            msg = "Wrong queue type, qtype=%s" % qtype
            raise Exception(msg)
        if self.engine:
            thr_name = "DASWebService:PluginTaskManager"
            self.taskmgr = PluginTaskManager(bus=self.engine, nworkers=nworkers, name=thr_name, qtype=qtype)
            self.taskmgr.subscribe()
        else:
            thr_name = "DASWebService:TaskManager"
            self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, qtype=qtype)
        self.adjust = config.get("adjust_input", False)
        self.dasmgr = None  # defined at run-time via self.init()
        self.reqmgr = None  # defined at run-time via self.init()
        self.daskeys = []  # defined at run-time via self.init()
        self.colors = {}  # defined at run-time via self.init()
        self.dbs_url = None  # defined at run-time via self.init()
        self.dbs_global = None  # defined at run-time via self.init()
        self.kws = None  # defined at run-time via self.init()
        self.q_rewriter = None  # defined at run-time via self.init()
        self.dataset_daemon = config.get("dbs_daemon", False)
        self.dbsmgr = {}  # dbs_urls vs dbs_daemons, defined at run-time
        self.daskeyslist = []  # list of DAS keys
        self.init()

        # Monitoring thread which performs auto-reconnection
        thname = "dascore_monitor"
        start_new_thread(thname, dascore_monitor, ({"das": self.dasmgr, "uri": self.dburi}, self.init, 5))

    def dbs_daemon(self, config):
        """Start DBS daemon if it is requested via DAS configuration"""
        try:
            main_dbs_url = self.dbs_url
            dbs_urls = []
            print "### DBS URL:", self.dbs_url
            print "### DBS instances:", self.dbs_instances
            if not self.dbs_url or not self.dbs_instances:
                return  # just quit
            for inst in self.dbs_instances:
                dbs_urls.append((main_dbs_url.replace(self.dbs_global, inst), inst))
            interval = config.get("dbs_daemon_interval", 3600)
            dbsexpire = config.get("dbs_daemon_expire", 3600)
            preserve_dbs_col = config.get("preserve_on_restart", False)
            dbs_config = {"expire": dbsexpire, "preserve_on_restart": preserve_dbs_col}
            if self.dataset_daemon:
                for dbs_url, inst in dbs_urls:
                    dbsmgr = DBSDaemon(dbs_url, self.dburi, dbs_config)
                    self.dbsmgr[(dbs_url, inst)] = dbsmgr

                    def dbs_updater(_dbsmgr, interval):
                        """DBS updater daemon"""
                        while True:
                            try:
                                _dbsmgr.update()
                            except:
                                pass
                            time.sleep(interval)

                    print "### Start DBSDaemon for %s" % dbs_url
                    thname = "dbs_updater:%s" % dbs_url
                    start_new_thread(thname, dbs_updater, (dbsmgr, interval))
        except Exception as exc:
            print_exc(exc)

    def init(self):
        """Init DAS web server, connect to DAS Core"""
        try:
            self.reqmgr = RequestManager(lifetime=self.lifetime)
            self.dasmgr = DASCore(engine=self.engine)
            self.repmgr = CMSRepresentation(self.dasconfig, self.dasmgr)
            self.daskeys = self.dasmgr.das_keys()
            self.gfs = db_gridfs(self.dburi)
            self.daskeys.sort()
            self.dasmapping = self.dasmgr.mapping
            self.dbs_url = self.dasmapping.dbs_url()
            self.dbs_global = self.dasmapping.dbs_global_instance()
            self.dbs_instances = self.dasmapping.dbs_instances()
            self.dasmapping.init_presentationcache()
            self.colors = {"das": gen_color("das")}
            for system in self.dasmgr.systems:
                self.colors[system] = gen_color(system)
            # get SiteDB from global scope
            self.sitedbmgr = SERVICES.get("sitedb2", None)
            # Start DBS daemon
            if self.dataset_daemon:
                self.dbs_daemon(self.dasconfig["web_server"])
            if not self.daskeyslist:
                keylist = [r for r in self.dasmapping.das_presentation_map()]
                keylist.sort(key=lambda r: r["das"])
                self.daskeyslist = keylist

        except ConnectionFailure as _err:
            tstamp = dastimestamp("")
            mythr = threading.current_thread()
            print "### MongoDB connection failure thread=%s, id=%s, time=%s" % (mythr.name, mythr.ident, tstamp)
        except Exception as exc:
            print_exc(exc)
            self.dasmgr = None
            self.reqmgr = None
            self.dbs_url = None
            self.dbs_global = None
            self.dbs_instances = []
            self.daskeys = []
            self.colors = {}
            self.q_rewriter = None
            return

        # KWS and Query Rewriting failures are not fatal
        try:
            # init query rewriter, if needed
            if self.dasconfig["query_rewrite"]["pk_rewrite_on"]:
                self.q_rewriter = CMSQueryRewrite(self.repmgr, self.templatepage)
        except Exception as exc:
            print_exc(exc)
            self.q_rewriter = None

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def redirect(self, **kwargs):
        """
        Represent DAS redirect page
        """
        dmsg = "You do not have permission to access the resource requested."
        msg = kwargs.get("reason", dmsg)
        if msg:
            msg = "Reason: " + msg
        page = self.templatepage("das_redirect", msg=msg)
        return self.page(page, response_div=False)

    def bottom(self, response_div=True):
        """
        Define footer for all DAS web pages
        """
        return self.templatepage("das_bottom", div=response_div, base=self.base, version=DAS.version)

    def page(self, content, ctime=None, response_div=True):
        """
        Define footer for all DAS web pages
        """
        page = self.top()
        page += content
        page += self.templatepage("das_bottom", ctime=ctime, base=self.base, version=DAS.version, div=response_div)
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS + ["section", "highlight"])
    def faq(self, **kwargs):
        """
        represent DAS FAQ.
        """
        section = kwargs.get("section", None)
        highlight = kwargs.get("highlight", None)
        guide = self.templatepage("dbsql_vs_dasql", operators=", ".join(das_operators()))
        daskeys = self.templatepage("das_keys", daskeys=self.daskeyslist)
        page = self.templatepage(
            "das_faq",
            guide=guide,
            daskeys=daskeys,
            section=section,
            highlight=highlight,
            operators=", ".join(das_operators()),
            aggregators=", ".join(das_aggregators()),
        )
        return self.page(page, response_div=False)

    @expose
    def cli(self):
        """
        Serve DAS CLI file download.
        """
        dasroot = "/".join(__file__.split("/")[:-3])
        clifile = os.path.join(dasroot, "DAS/tools/das_client.py")
        return serve_file(clifile, content_type="text/plain")

    @expose
    def movetodas(self):
        "Placeholder page for DBS to DAS migration"
        style = "width:600px;margin-left:auto;margin-right:auto;padding-top:20px"
        page = """<div style="%s">""" % style
        page += "Dear user,<br/>DBS Data Discovery page is depricated.<br/>"
        page += "Please migrate to Data Aggregation Service located at"
        page += "<p>https://cmsweb.cern.ch/das/</p>"
        page += "<em>CMS HTTP group.</em>"
        page += "</div>" ""
        return page

    @expose
    def opensearch(self):
        """
        Serve DAS opensearch file.
        """
        if self.base and self.base.find("http://") != -1:
            base = self.base
        else:
            base = "http://cmsweb.cern.ch/das"
        desc = self.templatepage("das_opensearch", base=base)
        cherrypy.response.headers["Content-Type"] = "application/opensearchdescription+xml"
        return desc

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def services(self):
        """
        represent DAS services
        """
        dasdict = {}
        daskeys = set()
        dasmapkeys = self.dasmgr.mapping.dasmapscache.keys()
        dasmapkeys.sort()
        for key in dasmapkeys:
            srv, urn = key
            if srv not in self.dasmgr.systems:
                continue
            entry = self.dasmgr.mapping.dasmapscache[key]
            tmpdict = {}
            for item in entry["das_map"]:
                dkey = item["das_key"]
                rkey = item["rec_key"]
                daskeys.add(dkey)
                vlist = tmpdict.get(dkey, []) + [rkey]
                tmpdict[dkey] = list(set(vlist))
            apis = []
            if srv in dasdict:
                vdict = dasdict[srv]
                okeys = vdict["keys"]
                apis = vdict["apis"] + [urn]
                for kkk, vvv in okeys.iteritems():
                    vlist = tmpdict.get(kkk, []) + vvv
                    tmpdict[kkk] = list(set(vlist))
            else:
                apis = [urn]
            vdict = dict(keys=dict(tmpdict), apis=apis)
            dasdict[srv] = vdict
        mapreduce = [r for r in self.dasmgr.rawcache.get_map_reduce()]
        page = self.templatepage("das_services", dasdict=dasdict, daskeys=list(daskeys), mapreduce=mapreduce)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def api(self, system, name):
        """
        Return DAS mapping record about provided API.
        """
        record = self.dasmgr.mapping.api_info(system, name)
        page = "<b>DAS mapping record</b>"
        page += das_json_full(record)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def default(self, *args, **kwargs):
        """
        Default method.
        """
        return self.index(args, kwargs)

    def adjust_input(self, kwargs):
        """
        Adjust user input wrt common DAS keyword patterns, e.g.
        /Zee/*/* -> dataset=*Zee*, T1_US -> site=T1_US.

        More ambiguous input (such as Zee -> dataset=*Zee*) is however left
        to be handled by the keyword search.

        This is active only if adjust_input is set in DAS server configuration.
        """
        if not self.adjust:
            return
        uinput = kwargs.get("input", "")
        inst = kwargs.get("instance", self.dbs_global)

        kwargs["input"] = identify_apparent_query_patterns(uinput, inst)

    def _get_dbsmgr(self, inst):
        """
        Given a string representation of DBS instance, returns DBSManager
        instance which "knows" how to look up datasets
        """
        mgr = None
        # instance selection shall be more clean
        if not self.dataset_daemon:
            return mgr
        for dbs_url, dbs_inst in self.dbsmgr.keys():
            if dbs_inst == inst:
                return self.dbsmgr[(dbs_url, dbs_inst)]
        return mgr

    def _get_kws_host(self):
        """
        gets the host for keyword search from config. default is same server
        """
        return self.dasconfig["load_balance"]["kws_host"]

    def _get_autocompl_host(self):
        """
        gets the host for autocompletion from config. default is same server
        """
        conf = self.dasconfig.get("load_balance", {})
        return conf.get("autocompletion_host", "")

    def is_kws_enabled(self):
        """
        is keyword search client (ajax request) enabled
        """
        return self.dasconfig["keyword_search"]["kws_on"]

    def is_kws_service_enabled(self):
        """
        is keyword search service (response to ajax call) enabled
        """
        return self.dasconfig["keyword_search"]["kws_service_on"]

    def generate_dasquery(self, uinput, inst, html_mode=True):
        """
        Check provided input as valid DAS input query.
        Returns status and content (either error message or valid DASQuery)
        :param uinput: user's input
        :param inst: DBS instance
        :param html_mode: whether errors shall be output in html
        """

        def error_msg(msg, show_kws=False, tmpl="das_ambiguous", **kwargs):
            """
            Helper function which renders an error template, default is
            das_ambiguous, but can be overriden via tmpl param.
            Template has two versions: html and text for CLI.

            The template is passed with msg, base, guide, and **kwargs. """
            guide = self.templatepage("dbsql_vs_dasql", operators=", ".join(das_operators()))
            # render keyword search loader, if needed
            kws = ""
            if show_kws:
                kws = self.templatepage(
                    "kwdsearch_via_ajax", uinput=uinput, inst=inst or self.dbs_global, kws_host=self._get_kws_host()
                )
            # render the appropriate template (html vs text mode)
            page = self.templatepage(
                tmpl + ("_txt" if not html_mode else ""),
                msg=msg,
                base=self.base,
                guide=guide,
                kws_enabled=show_kws,
                kws=kws,
                **kwargs
            )
            return page

        if not uinput:
            return 1, error_msg("No input query")

        # Generate a DASQuery object, if it fails we catch the exception and
        # wrap it for upper layer (web interface)
        try:
            dasquery = DASQuery(uinput, instance=inst)
        except WildcardMultipleMatchesException as err:
            das_parser_error(uinput, str(err).replace("\n", ""))
            return 1, error_msg(str(err), tmpl="das_wildcard_err", suggest=err.options.values)
        except WildcardMatchingException as err:
            das_parser_error(uinput, str(type(err)) + " " + str(err))
            return 1, error_msg(str(err))
        except Exception as err:
            das_parser_error(uinput, str(type(err)) + " " + str(err))

            # show multiple dataset matches for 1 keyword queries
            if hasattr(response, "dataset_matches_msg"):
                return 1, error_msg(response.dataset_matches_msg, show_kws=self.is_kws_enabled())

            # for non Wildcard parsing errors, show the Keyword Search
            return 1, error_msg(str(err), show_kws=self.is_kws_enabled())

        # DAS query validation
        if isinstance(uinput, dict):  # DASQuery w/ {'spec':{'_id:id}}
            pass
        elif uinput.find("queries") != -1:
            pass
        elif uinput.find("records") != -1:
            pass
        else:  # normal user DAS query
            try:
                service_map = dasquery.service_apis_map()
            except Exception as exc:
                msg = "Fail to obtain service API map for this DASQuery"
                print msg
                print_exc(exc)
                return 1, error_msg(msg)
            if not service_map:
                return 1, error_msg("Unable to resolve the query over the " "available services: %s" % dasquery)
        return 0, dasquery

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def index(self, *args, **kwargs):
        """
        represents DAS web interface.
        It uses das_searchform template for
        input form and yui_table for output Table widget.
        """
        uinput = getarg(kwargs, "input", "")
        return self.page(self.form(uinput=uinput, cards=True))

    def form(self, uinput="", instance=None, view="list", cards=False):
        """
        provide input DAS search form
        """
        # TODO: rename into search_form()? (template is also called like this

        if "'" in uinput:  # e.g. file.creation_date>'20120101 12:01:01'
            uinput = uinput.replace("'", '"')
        if not instance:
            instance = self.dbs_global
        cards = self.templatepage(
            "das_cards", base=self.base, show=cards, width=900, height=220, cards=help_cards(self.base)
        )
        daskeys = self.templatepage("das_keys", daskeys=self.daskeyslist)
        page = self.templatepage(
            "das_searchform",
            input=uinput,
            init_dbses=list(self.dbs_instances),
            daskeys=daskeys,
            base=self.base,
            instance=instance,
            view=view,
            cards=cards,
            autocompl_host=json.dumps(self._get_autocompl_host()),
        )
        return page

    @expose
    def error(self, msg, wrap=True):
        """
        Show error message.
        """
        page = self.templatepage("das_error", msg=str(msg))
        if wrap:
            page = self.page(self.form() + page)
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def gridfs(self, **kwargs):
        """
        Retieve records from GridFS
        """
        time0 = time.time()
        if "fid" not in kwargs:
            code = web_code("No file id")
            raise HTTPError(500, "DAS error, code=%s" % code)
        fid = kwargs.get("fid")
        data = {"status": "requested", "fid": fid}
        try:
            fds = self.gfs.get(ObjectId(fid))
            return fds.read()
        except Exception as exc:
            print_exc(exc)
            code = web_code("Exception")
            raise HTTPError(500, "DAS error, code=%s" % code)
        data["ctime"] = time.time() - time0
        return json.dumps(data)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def records(self, *args, **kwargs):
        """
        Retieve all records id's.
        """
        try:
            recordid = None
            if args:
                recordid = args[0]
                spec = {"_id": ObjectId(recordid)}
                fields = None
                query = dict(fields=fields, spec=spec)
            elif kwargs and "_id" in kwargs:
                spec = {"_id": ObjectId(kwargs["_id"])}
                fields = None
                query = dict(fields=fields, spec=spec)
            else:  # return all ids
                query = dict(fields=None, spec={})

            res = ""
            time0 = time.time()
            idx = getarg(kwargs, "idx", 0)
            limit = getarg(kwargs, "limit", 10)
            coll = kwargs.get("collection", "merge")
            view = kwargs.get("view", "")
            if view == "json":
                res = []
            inst = kwargs.get("instance", self.dbs_global)
            form = self.form(uinput="")
            check, content = self.generate_dasquery(query, inst)
            if check:
                return self.page(form + content, ctime=time.time() - time0)
            dasquery = content  # returned content is valid DAS query
            nresults = self.dasmgr.rawcache.nresults(dasquery, coll)
            gen = self.dasmgr.rawcache.get_from_cache(dasquery, idx=idx, limit=limit, collection=coll)
            if recordid:  # we got id
                for row in gen:
                    if view == "json":
                        res.append(row)
                    else:
                        res += das_json(dasquery, row)
            else:
                for row in gen:
                    rid = row["_id"]
                    del row["_id"]
                    res += self.templatepage("das_record", id=rid, collection=coll, daskeys=", ".join(row))
            if recordid:
                page = res
            else:
                url = "/das/records?"
                if nresults:
                    page = self.templatepage("das_pagination", nrows=nresults, idx=idx, limit=limit, url=url)
                else:
                    page = "No results found, nresults=%s" % nresults
                page += res

            ctime = time.time() - time0
            if view == "json":
                return json.dumps(res)
            page = self.page(form + page, ctime=ctime)
            return page
        except Exception as exc:
            print_exc(exc)
            return self.error(gen_error_msg(kwargs))

    @jsonstreamer
    def datastream(self, kwargs):
        """Stream DAS data into JSON format"""
        head = kwargs.get("head", dict(timestamp=time.time()))
        if "mongo_query" not in head:
            head["mongo_query"] = head["dasquery"].mongo_query if "dasquery" in head else {}
        if "dasquery" in head:
            del head["dasquery"]
        if "args" in head:
            del head["args"]
        data = kwargs.get("data", [])
        if self.check_clients:
            # update client version
            cli, cli_msg = check_client_version()
            head.update({"client": cli, "client_message": cli_msg})
            # for old clients setup appropriate status/reason
            if cli_msg:
                head.update({"status": "warning", "reason": cli_msg})
        return head, data

    def get_data(self, kwargs):
        """
        Invoke DAS workflow and get data from the cache.
        """
        head = dict(timestamp=time.time())
        head["args"] = kwargs
        uinput = kwargs.get("input", "")
        inst = kwargs.get("instance", self.dbs_global)
        idx = getarg(kwargs, "idx", 0)
        limit = getarg(kwargs, "limit", 0)  # do not impose limit
        coll = kwargs.get("collection", "merge")
        status = kwargs.get("status")
        error = kwargs.get("error")
        reason = kwargs.get("reason")
        dasquery = kwargs.get("dasquery", None)
        time0 = time.time()
        if dasquery:
            dasquery = DASQuery(dasquery, instance=inst)
        else:
            check, content = self.generate_dasquery(uinput, inst, html_mode=False)
            if check:
                head.update({"status": "fail", "reason": content, "ctime": time.time() - time0, "input": uinput})
                data = []
                return head, data
            dasquery = content  # returned content is valid DAS query
        try:
            nres = self.dasmgr.nresults(dasquery, coll)
            data = self.dasmgr.get_from_cache(dasquery, idx, limit)
            # check that we got what we expected
            data = [r for r in data]
            if nres and not len(data):
                for retry in xrange(1, 3, 5):
                    msg = "retry in %s sec" % retry
                    print dastimestamp("DAS WARNING "), msg, dasquery
                    time.sleep(retry)  # retry one more time
                    data = self.dasmgr.get_from_cache(dasquery, idx, limit)
                    data = [r for r in data]
                    if len(data):
                        break
            if nres and not len(data):
                msg = "fail to get all data for %s, nres=%s, len(data)=%s" % (dasquery, nres, len(data))
                print dastimestamp("DAS WARNING "), msg
                status = "fail"
                reason = "Fail to retrieve data from DAS cache, please retry"

            if dasquery.aggregators:
                # aggregators split DAS record into sub-system and then
                # apply aggregator functions, therefore we need to correctly
                # account for nresults. Resolve generator into list and take
                # its length as nresults value.
                data = [r for r in data]
                nres = len(data)
            if error:  # DAS record contains an error
                status = "error"
            head.update({"status": status, "nresults": nres, "ctime": time.time() - time0, "dasquery": dasquery})
        except Exception as exc:
            status = "fail"
            reason = str(exc)
            print_exc(exc)
            head.update({"status": status, "ctime": time.time() - time0, "dasquery": dasquery})
            data = []
        head.update({"incache": self.dasmgr.incache(dasquery, coll="cache"), "apilist": self.dasmgr.apilist(dasquery)})
        if reason:
            head.update({"reason": reason})
        if status != "ok":
            head.update(self.info())
        return head, data

    def info(self):
        "Return status of DAS server"
        info = {"nrequests": self.reqmgr.size(), "nworkers": self.taskmgr.nworkers(), "dasweb": self.reqmgr.status()}
        if self.dasmgr and self.dasmgr.taskmgr:
            info.update({"dascore": self.dasmgr.taskmgr.status()})
        return dict(das_server=info)

    def busy(self):
        """
        Check server load and report busy status if
        nrequests - nworkers > queue limit
        """
        nrequests = self.reqmgr.size()
        if (nrequests - self.taskmgr.nworkers()) > self.queue_limit:
            msg = "#request=%s, queue_limit=%s, #workers=%s" % (nrequests, self.taskmgr.nworkers(), self.queue_limit)
            print dastimestamp("DAS WEB SERVER IS BUSY "), msg
            return True
        return False

    def busy_page(self, uinput=None):
        """DAS server busy page layout"""
        page = "<h3>DAS server is busy, please try later</h3>"
        form = self.form(uinput)
        return self.page(form + page)

    def _is_web_request(self, view):
        """
        returns whether the current view mode is not web
        """

        # first, check for explicit output type (view)

        if view in ["json", "xml", "plain"]:
            return False

        # check accept header - e.g. das client only provides accept header
        accepts = cherrypy.request.headers.elements("Accept")
        non_html_accepts = ["application/json"]
        other_accepted = [a for a in accepts if a.value not in non_html_accepts]

        # if only non html content types are accepted we are in non html mode
        if not other_accepted and accepts:
            return False

        return True

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def cache(self, **kwargs):
        """
        DAS web cache interface. Fire up new process for new requests and
        record its pid. The client is in charge to keep track of pid.
        The new process uses DAS core call to request the data into cache.
        Since query are cached the repeated call with the same query
        has no cost to DAS core.
        """
        # do not allow caching
        set_no_cache_flags()

        # if busy return right away
        if self.busy():
            nrequests = self.reqmgr.size()
            level = nrequests - self.taskmgr.nworkers() - self.queue_limit
            reason = "DAS server is busy"
            reason += ", #requests=%s, #workers=%s, queue size=%s" % (
                self.reqmgr.size(),
                self.taskmgr.nworkds(),
                self.queue_limit,
            )
            head = dict(timestamp=time.time())
            head.update({"status": "busy", "reason": reason, "ctime": 0})
            data = []
            return self.datastream(dict(head=head, data=data))

        uinput = kwargs.get("input", "").strip()
        if not uinput:
            head = {"status": "fail", "reason": "No input found", "args": kwargs, "ctime": 0, "input": uinput}
            data = []
            return self.datastream(dict(head=head, data=data))
        self.adjust_input(kwargs)
        pid = kwargs.get("pid", "")
        inst = kwargs.get("instance", self.dbs_global)
        uinput = kwargs.get("input", "")
        view = kwargs.get("view", "list")
        data = []

        # textual views need text only error messages...
        check, content = self.generate_dasquery(uinput, inst, html_mode=self._is_web_request(view))
        if check:
            head = dict(timestamp=time.time())
            head.update(
                {"status": "fail", "reason": "Can not interpret the query" + " (while creating DASQuery)", "ctime": 0}
            )
            if not self._is_web_request(view):
                head["error_details"] = content
                head["reason"] = head["reason"] + "\n\n" + content
            return self.datastream(dict(head=head, data=data))

        dasquery = content  # returned content is valid DAS query
        status, error, reason = self.dasmgr.get_status(dasquery)
        kwargs.update({"status": status, "error": error, "reason": reason})
        if not pid:
            pid = dasquery.qhash
        if status == None and not self.reqmgr.has_pid(pid):  # submit new request
            addr = cherrypy.request.headers.get("Remote-Addr")
            _evt, pid = self.taskmgr.spawn(self.dasmgr.call, dasquery, uid=addr, pid=dasquery.qhash)
            self.reqmgr.add(pid, kwargs)
            return pid
        if status == "ok":
            self.reqmgr.remove(pid)
            kwargs["dasquery"] = dasquery
            head, data = self.get_data(kwargs)
            return self.datastream(dict(head=head, data=data))
        kwargs["dasquery"] = dasquery.storage_query
        if not self.pid_pat.match(str(pid)) or len(str(pid)) != 32:
            self.reqmgr.remove(pid)
            head = {"status": "fail", "reason": "Invalid pid", "args": kwargs, "ctime": 0, "input": uinput}
            data = []
            return self.datastream(dict(head=head, data=data))
        elif self.taskmgr.is_alive(pid):
            return pid
        else:  # process is done, get data
            self.reqmgr.remove(pid)
            head, data = self.get_data(kwargs)
            return self.datastream(dict(head=head, data=data))

    def get_page_content(self, kwargs, complete_msg=True):
        """Retrieve page content for provided set of parameters"""
        page = ""
        try:
            view = kwargs.get("view", "list")
            if view == "plain":
                if "limit" in kwargs:
                    del kwargs["limit"]
            if view in ["json", "xml", "plain"] and complete_msg:
                page = "Request completed. Reload the page ..."
            else:
                head, data = self.get_data(kwargs)

                allowed_views = ["list", "table", "plain", "xml", "json"]
                if view not in allowed_views:
                    raise

                func = getattr(self, view + "view")
                page = func(head, data)
        except HTTPError as _err:
            raise
        except Exception as exc:
            print_exc(exc)
            msg = gen_error_msg(kwargs)
            page = self.templatepage("das_error", msg=msg)
        return page

    @expose
    def download(self, lfn):
        "DAS download page for given LFN"
        page = self.templatepage("filemover", lfn=lfn)
        return self.page(page, response_div=False)

    @expose
    def makepy(self, dataset, instance):
        """
        Request to create CMSSW py snippet for a given dataset
        """
        pat = re.compile("/.*/.*/.*")
        if not pat.match(dataset):
            msg = "Invalid dataset name"
            return self.error(msg)
        query = "file dataset=%s instance=%s | grep file.name" % (dataset, instance)
        try:
            data = self.dasmgr.result(query, idx=0, limit=0)
        except Exception as exc:
            print_exc(exc)
            msg = "Exception: %s\n" % str(exc)
            msg += "Unable to retrieve data for query=%s" % query
            return self.error(msg)
        lfns = []
        for rec in data:
            filename = DotDict(rec).get("file.name")
            if filename not in lfns:
                lfns.append(filename)
        page = self.templatepage("das_files_py", lfnList=lfns, pfnList=[])
        cherrypy.response.headers["Content-Type"] = "text/plain"
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def request(self, **kwargs):
        """
        Request data from DAS cache.
        """
        # do not allow caching
        set_no_cache_flags()

        uinput = kwargs.get("input", "").strip()
        if not uinput:
            kwargs["reason"] = "No input found"
            return self.redirect(**kwargs)

        # if busy return right away
        if self.busy():
            return self.busy_page(uinput)

        time0 = time.time()
        self.adjust_input(kwargs)
        view = kwargs.get("view", "list")
        inst = kwargs.get("instance", self.dbs_global)
        uinput = kwargs.get("input", "")
        form = self.form(uinput=uinput, instance=inst, view=view)
        check, content = self.generate_dasquery(uinput, inst)
        if check:
            if view == "list" or view == "table":
                return self.page(form + content, ctime=time.time() - time0)
            else:
                return content
        dasquery = content  # returned content is valid DAS query
        status, error, reason = self.dasmgr.get_status(dasquery)
        kwargs.update({"status": status, "error": error, "reason": reason})
        pid = dasquery.qhash
        if status == None:  # process new request
            kwargs["dasquery"] = dasquery.storage_query
            addr = cherrypy.request.headers.get("Remote-Addr")
            _evt, pid = self.taskmgr.spawn(self.dasmgr.call, dasquery, uid=addr, pid=dasquery.qhash)
            self.reqmgr.add(pid, kwargs)
        elif status == "ok" or status == "fail":
            self.reqmgr.remove(pid)

            # check if query can be rewritten via nested PK query
            rew_msg = self.q_rewriter and self.q_rewriter.check_fields(dasquery)
            if rew_msg:
                content = self.templatepage("das_error", msg=rew_msg)
                return self.page(form + content, ctime=time.time() - time0)

            kwargs["dasquery"] = dasquery
            page = self.get_page_content(kwargs, complete_msg=False)
            ctime = time.time() - time0
            if view == "list" or view == "table":
                return self.page(form + page, ctime=ctime)

            return page
        if self.taskmgr.is_alive(pid):
            page = self.templatepage(
                "das_check_pid",
                method="check_pid",
                uinput=uinput,
                view=view,
                base=self.base,
                pid=pid,
                interval=self.interval,
            )
        else:
            self.reqmgr.remove(pid)
            page = self.get_page_content(kwargs)
        ctime = time.time() - time0
        return self.page(form + page, ctime=ctime)

    @expose
    def status(self):
        """Return list of all current requests in DAS queue"""
        requests = [r for r in self.reqmgr.items()]
        page = self.templatepage("das_status", requests=requests)
        return self.page(page)

    @expose
    @checkargs(["pid"])
    def check_pid(self, pid):
        """
        Check status of given pid. This is a server callback
        function for ajaxCheckPid, see js/ajax_utils.js
        """
        # do not allow caching
        set_no_cache_flags()

        img = '<img src="%s/images/loading.gif" alt="loading"/>' % self.base
        page = ""
        try:
            if self.taskmgr.is_alive(pid):
                page = img + " processing PID=%s" % pid
            else:
                # at this point we don't know if request arrived to this host
                # or it was processed. To distinguish the case we'll ask
                # request manager for that pid
                if self.reqmgr.has_pid(pid):
                    self.reqmgr.remove(pid)
                    page = "Request PID=%s is completed" % pid
                    page += ", please wait for results to load"
                else:
                    # there're no request on this server, re-initiate it
                    ref = cherrypy.request.headers.get("Referer", None)
                    if ref:
                        url = urlparse(ref)
                        params = dict(parse_qsl(url.query))
                        return self.request(**params)
                    else:
                        msg = "No referer in cherrypy.request.headers"
                        msg += "\nHeaders: %s" % cherrypy.request.headers
                        print dastimestamp("DAS WEB ERROR "), msg
        except Exception as err:
            msg = "check_pid fails for pid=%s" % pid
            print dastimestamp("DAS WEB ERROR "), msg
            print_exc(err)
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            return self.error(gen_error_msg({"pid": pid}), wrap=False)
        return page

    def listview(self, head, data):
        """DAS listview data representation"""
        return self.repmgr.listview(head, data)

    def tableview(self, head, data):
        """DAS tabular view data representation"""
        return self.repmgr.tableview(head, data)

    def plainview(self, head, data):
        """DAS plain view data representation"""
        return self.repmgr.plainview(head, data)

    def xmlview(self, head, data):
        """DAS XML data representation"""
        return self.repmgr.xmlview(head, data)

    def jsonview(self, head, data):
        """DAS JSON data representation"""
        return self.repmgr.jsonview(head, data)

    @exposedasjson
    @enable_cross_origin
    @checkargs(["query", "dbs_instance"])
    def autocomplete(self, **kwargs):
        """
        Provides autocomplete functionality for DAS web UI.
        """
        query = kwargs.get("query", "").strip()
        result = autocomplete_helper(query, self.dasmgr, self.daskeys)
        dataset = [r for r in result if r["value"].find("dataset=") != -1]
        dbsinst = kwargs.get("dbs_instance", self.dbs_global)
        if self.dataset_daemon and len(dataset):
            dbsmgr = self._get_dbsmgr(dbsinst)
            if query.find("dataset=") != -1:
                query = query.replace("dataset=", "")
            for row in dbsmgr.find(query):
                result.append({"css": "ac-info", "value": "dataset=%s" % row, "info": "dataset"})
        return result
コード例 #29
0
ファイル: das_web_srv.py プロジェクト: dhootha/DAS
class DASWebService(DASWebManager):
    """
    DAS web service interface.
    """

    def __init__(self, dasconfig):
        DASWebManager.__init__(self, dasconfig)
        config = dasconfig['web_server']
        self.pid_pat     = re.compile(r'^[a-z0-9]{32}')
        # TODO: self.base shall be automatically included in all tmpls
        self.base        = config['url_base']
        self.interval    = config.get('status_update', 2500)
        self.engine      = config.get('engine', None)
        self.check_clients = config.get('check_clients', False)
        nworkers         = config['web_workers']
        self.hot_thr     = config.get('hot_threshold', 3000)
        self.dasconfig   = dasconfig
        self.dburi       = self.dasconfig['mongodb']['dburi']
        self.lifetime    = self.dasconfig['mongodb']['lifetime']
        self.queue_limit = config.get('queue_limit', 50)
        qtype            = config.get('qtype', 'Queue')
        qfreq            = config.get('qfreq', 5)
        if  qtype not in ['Queue', 'PriorityQueue']:
            msg = 'Wrong queue type, qtype=%s' % qtype
            raise Exception(msg)
#         if  self.engine:
#             thr_name = 'DASWebService:PluginTaskManager'
#             self.taskmgr = PluginTaskManager(bus=self.engine, \
#                     nworkers=nworkers, name=thr_name, qtype=qtype, \
#                     qfreq=qfreq)
#             self.taskmgr.subscribe()
#         else:
#             thr_name = 'DASWebService:TaskManager'
#             self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, \
#                     qtype=qtype, qfreq=qfreq)
        thr_name = 'DASWebService:TaskManager'
        self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, \
                qtype=qtype, qfreq=qfreq)
        self.adjust      = config.get('adjust_input', False)
        self.dasmgr      = None # defined at run-time via self.init()
        self.reqmgr      = None # defined at run-time via self.init()
        self.daskeys     = []   # defined at run-time via self.init()
        self.colors      = {}   # defined at run-time via self.init()
        self.dbs_url     = None # defined at run-time via self.init()
        self.dbs_global  = None # defined at run-time via self.init()
        self.dbs_instances = [] # defined at run-time via self.init()
        self.kws         = None # defined at run-time via self.init()
        self.q_rewriter  = None # defined at run-time via self.init()
        self.dataset_daemon = None
        self.dbsmgr      = {} # dbs_urls vs dbs_daemons, defined at run-time
        self.daskeyslist = [] # list of DAS keys
        self.init()
        self.dbs_init(config)

        # Monitoring thread which performs auto-reconnection
        thname = 'dascore_monitor'
        start_new_thread(thname, dascore_monitor, \
                ({'das':self.dasmgr, 'uri':self.dburi}, self.init, 5))

    def dbs_init(self, config):
        """Initialize DBS daemons"""
        main_dbs_url = self.dbs_url
        dbs_urls = []
        print("### DBS URL:", self.dbs_url)
        print("### DBS global instance:", self.dbs_global)
        print("### DBS instances:", self.dbs_instances)
        for inst in self.dbs_instances:
            dbs_urls.append(\
                    (main_dbs_url.replace(self.dbs_global, inst), inst))
        interval  = config.get('dbs_daemon_interval', 3600)
        dbsexpire = config.get('dbs_daemon_expire', 3600)
        preserve_dbs_col = config.get('preserve_on_restart', False)
        dbs_config  = {'expire': dbsexpire,
                       'preserve_on_restart': preserve_dbs_col}
        for dbs_url, inst in dbs_urls:
            dbsmgr = DBSDaemon(dbs_url, self.dburi, dbs_config)
            self.dbsmgr[(dbs_url, inst)] = dbsmgr

    def init(self):
        """Init DAS web server, connect to DAS Core"""
        try:
            self.reqmgr     = RequestManager(lifetime=self.lifetime)
            self.dasmgr     = DASCore(engine=self.engine)
            self.repmgr     = CMSRepresentation(self.dasconfig, self.dasmgr)
            self.daskeys    = self.dasmgr.das_keys()
            self.gfs        = db_gridfs(self.dburi)
            self.daskeys.sort()
            self.dasmapping = self.dasmgr.mapping
            self.dbs_url    = self.dasmapping.dbs_url()
            self.dbs_global = self.dasmapping.dbs_global_instance()
            self.dbs_instances = self.dasmapping.dbs_instances()
            self.dasmapping.init_presentationcache()
            self.colors = {'das':gen_color('das')}
            for system in self.dasmgr.systems:
                self.colors[system] = gen_color(system)
            if  not self.daskeyslist:
                keylist = [r for r in self.dasmapping.das_presentation_map()]
                keylist.sort(key=lambda r: r['das'])
                self.daskeyslist = keylist

        except ConnectionFailure as _err:
            tstamp = dastimestamp('')
            mythr  = threading.current_thread()
            print("### MongoDB connection failure thread=%s, id=%s, time=%s" \
                    % (mythr.name, mythr.ident, tstamp))
        except Exception as exc:
            print_exc(exc)
            self.dasmgr  = None
            self.reqmgr  = None
            self.dbs_url = None
            self.dbs_global = None
            self.dbs_instances = []
            self.daskeys = []
            self.colors  = {}
            self.q_rewriter = None
            return

        # KWS and Query Rewriting failures are not fatal
        try:
            # init query rewriter, if needed
            if self.dasconfig['query_rewrite']['pk_rewrite_on']:
                self.q_rewriter = CMSQueryRewrite(self.repmgr,
                                                  self.templatepage)
        except Exception as exc:
            print_exc(exc)
            self.q_rewriter = None

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def redirect(self, **kwargs):
        """
        Represent DAS redirect page
        """
        dmsg = 'You do not have permission to access the resource requested.'
        msg  = kwargs.get('reason', dmsg)
        if  msg:
            msg = 'Reason: ' + msg
        page = self.templatepage('das_redirect', msg=msg)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def dumpthreads(self, **kwargs):
        """
        Represent DAS redirect page
        """
        dumpstacks('web call', 'web frame')
        msg = 'Thread dump performed: %s' % time.strftime("%Y%m%d %H:%M:%S GMT", time.gmtime())
        return self.page(msg, response_div=False)

    def bottom(self, response_div=True):
        """
        Define footer for all DAS web pages
        """
        tstamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
        return self.templatepage('das_bottom', div=response_div, base=self.base,
                version=DAS.version, time=time)

    def page(self, content, ctime=None, response_div=True):
        """
        Define footer for all DAS web pages
        """
        page  = self.top()
        page += content
        page += self.templatepage('das_bottom', ctime=ctime,  base=self.base,
                                  version=DAS.version, div=response_div, time=time)
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS + ['section', 'highlight'])
    @tools.secmodv2()
    def faq(self, **kwargs):
        """
        represent DAS FAQ.
        """
        section = kwargs.get('section', None)
        highlight = kwargs.get('highlight', None)
        guide = self.templatepage('dbsql_vs_dasql',
                    operators=', '.join(das_operators()))
        daskeys = self.templatepage('das_keys', daskeys=self.daskeyslist)
        page = self.templatepage('das_faq', guide=guide, daskeys=daskeys,
                section=section, highlight=highlight,
                operators=', '.join(das_operators()),
                aggregators=', '.join(das_aggregators()))
        return self.page(page, response_div=False)

    @expose
    @tools.secmodv2()
    def cli(self):
        """
        Serve DAS CLI file download.
        """
        msg = 'Please use dasgoclient which is available in any CMSSW releases'
        return self.page(msg)
#         dasroot = '/'.join(__file__.split('/')[:-3])
#         clifile = os.path.join(dasroot, 'DAS/tools/das_client.py')
#         return serve_file(clifile, content_type='text/plain')

    @expose
    @tools.secmodv2()
    def movetodas(self):
        "Placeholder page for DBS to DAS migration"
        style = \
            "width:600px;margin-left:auto;margin-right:auto;padding-top:20px"
        page  = """<div style="%s">""" % style
        page += "Dear user,<br/>DBS Data Discovery page is depricated.<br/>"
        page += "Please migrate to Data Aggregation Service located at"
        page += "<p>https://cmsweb.cern.ch/das/</p>"
        page += "<em>CMS HTTP group.</em>"
        page += "</div>"""
        return page

    @expose
    @tools.secmodv2()
    def opensearch(self):
        """
        Serve DAS opensearch file.
        """
        if  self.base and self.base.find('http://') != -1:
            base = self.base
        else:
            base = 'http://cmsweb.cern.ch/das'
        desc = self.templatepage('das_opensearch', base=base)
        cherrypy.response.headers['Content-Type'] = \
                'application/opensearchdescription+xml'
        return desc

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def keys(self, **kwds):
        """
        Show DAS keys and their attibutes
        """
        adict = {}
        for row in self.dasmgr.keylearning.attributes():
            try:
                qpat = row.get('query_pat', [])
                key, attr = row['member'].split('.', 1)
            except:
                continue
            if  key in adict:
                vdict = adict[key]
                if  attr in vdict:
                    vdict[attr] += qpat
                else:
                    vdict[attr] = qpat
                adict[key] = vdict
            else:
                adict[key] = {attr: qpat}
        view = kwds.get('view', '')
        if  view == 'json':
            return json.dumps(adict)
        page = self.templatepage('das_keys_attrs', attrs=adict)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def services(self):
        """
        represent DAS services
        """
        dasdict = {}
        daskeys = set()
        dasmapkeys = list(self.dasmgr.mapping.dasmapscache.keys())
        dasmapkeys.sort()
        for key in dasmapkeys:
            srv, urn = key
            if  srv not in self.dasmgr.systems:
                continue
            entry = self.dasmgr.mapping.dasmapscache[key]
            tmpdict = {}
            for item in entry['das_map']:
                dkey = item['das_key']
                rkey = item['rec_key']
                daskeys.add(dkey)
                vlist = tmpdict.get(dkey, []) + [rkey]
                tmpdict[dkey] = list(set(vlist))
            apis = []
            if  srv in dasdict:
                vdict = dasdict[srv]
                okeys = vdict['keys']
                apis  = vdict['apis'] + [urn]
                for kkk, vvv in okeys.items():
                    vlist = tmpdict.get(kkk, []) + vvv
                    tmpdict[kkk] = list(set(vlist))
            else:
                apis = [urn]
            vdict = dict(keys=dict(tmpdict), apis=apis)
            dasdict[srv] = vdict
        mapreduce = [r for r in self.dasmgr.rawcache.get_map_reduce()]
        page = self.templatepage('das_services', dasdict=dasdict,
                        dbses=self.dbs_instances, dbs_global=self.dbs_global,
                        daskeys=list(daskeys), mapreduce=mapreduce,
                        urllib=urllib)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def nsystems(self):
        """
        Return number of systems participating in DAS
        """
        systems = self.dasmgr.mapping.list_systems()
        return "DAS systems %s" % ','.join(systems)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def api(self, system, name):
        """
        Return DAS mapping record about provided API.
        """
        record = self.dasmgr.mapping.api_info(system, name)
        page   = "<b>DAS mapping record</b>"
        page  += das_json_full(record)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def default(self, *args, **kwargs):
        """
        Default method.
        """
        return self.index(args, kwargs)

    def adjust_input(self, kwargs):
        """
        Adjust user input wrt common DAS keyword patterns, e.g.
        /Zee/*/* -> dataset=*Zee*, T1_US -> site=T1_US.

        More ambiguous input (such as Zee -> dataset=*Zee*) is however left
        to be handled by the keyword search.

        This is active only if adjust_input is set in DAS server configuration.
        """
        if not self.adjust:
            return
        uinput = kwargs.get('input', '')
        inst = kwargs.get('instance', self.dbs_global)

        kwargs['input'] = identify_apparent_query_patterns(uinput, inst)

    def _get_dbsmgr(self, inst):
        """
        Given a string representation of DBS instance, returns DBSManager
        instance which "knows" how to look up datasets
        """
        mgr = None
        # instance selection shall be more clean
        if not self.dataset_daemon:
            return mgr
        for dbs_url, dbs_inst in self.dbsmgr.keys():
            if  dbs_inst == inst:
                return self.dbsmgr[(dbs_url, dbs_inst)]
        return mgr


    def _get_kws_host(self):
        """
        gets the host for keyword search from config. default is same server
        """
        return self.dasconfig['load_balance']['kws_host']

    def _get_autocompl_host(self):
        """
        gets the host for autocompletion from config. default is same server
        """
        conf = self.dasconfig.get('load_balance', {})
        return conf.get('autocompletion_host', '')

    def is_kws_enabled(self):
        """
        is keyword search client (ajax request) enabled
        """
        return self.dasconfig['keyword_search']['kws_on']

    def is_kws_service_enabled(self):
        """
        is keyword search service (response to ajax call) enabled
        """
        return self.dasconfig['keyword_search']['kws_service_on']

    def generate_dasquery(self, uinput, inst, html_mode=True, qcache=0):
        """
        Check provided input as valid DAS input query.
        Returns status and content (either error message or valid DASQuery)
        :param uinput: user's input
        :param inst: DBS instance
        :param html_mode: whether errors shall be output in html
        """

        def error_msg(msg, show_kws=False, tmpl='das_ambiguous', **kwargs):
            """
            Helper function which renders an error template, default is
            das_ambiguous, but can be overriden via tmpl param.
            Template has two versions: html and text for CLI.

            The template is passed with msg, base, guide, and **kwargs. """
            # TODO: this shall be done by inheriting a parent template
            # TODO: no header/footer?
            guide = self.templatepage('dbsql_vs_dasql',
                                      operators=', '.join(das_operators()))
            # render keyword search loader, if needed
            kws = ''
            if show_kws:
                kws = self.templatepage('kwdsearch_via_ajax',
                                        uinput=uinput,
                                        jsonize=jsonize,
                                        url_extend_params_as_dict=url_extend_params_as_dict,
                                        inst=inst or self.dbs_global,
                                        kws_host=self._get_kws_host())
            # render the appropriate template (html vs text mode)
            page = self.templatepage(tmpl + ('_txt' if not html_mode else ''),
                                     msg=msg, base=self.base, guide=guide,
                                     kws_enabled=show_kws, kws=kws, **kwargs)
            return page

        if not uinput:
            return 1, error_msg('No input query')

        # Generate a DASQuery object, if it fails we catch the exception and
        # wrap it for upper layer (web interface)
        try:
            dasquery = DASQuery(uinput, instance=inst, qcache=qcache)
        except WildcardMultipleMatchesException as err:
            # TODO: hints could be shown here also, but it makes no sense, as
            # they are shown only when no matches are found
            if isinstance(err.options.values, list) and err.options.values:
                return 1, error_msg(str(err), tmpl='das_wildcard_err',
                                    suggest=err.options.values,
                                    url_extend_params=url_extend_params)
            return 1, error_msg(str(err), tmpl='das_wildcard_err',
                                url_extend_params=url_extend_params)

        except WildcardMatchingException as err:
            kwds = {'input':uinput, 'instance':inst}
            hints = self.hint_datasets(kwds)
            page = error_msg(str(err))
            for hint in hints:
                page += self.templatepage('hint',
                        url_extend_params=url_extend_params,
                        hint=hint, base=self.base, dbs=self.dbs_global)
            return 1, page
        except Exception as err:
            # show multiple dataset matches for 1 keyword queries
            if hasattr(response, 'dataset_matches_msg'):
                return 1, error_msg(response.dataset_matches_msg,
                                    show_kws=self.is_kws_enabled())

            # for non Wildcard parsing errors, show the Keyword Search
            return 1, error_msg(str(err), show_kws=self.is_kws_enabled())

        if dasquery.error:
            return 1, error_msg(dasquery.error)

        # DAS query validation
        if isinstance(uinput, dict):  # DASQuery w/ {'spec':{'_id:id}}
            pass
        elif uinput.find('queries') != -1:
            pass
        elif uinput.find('records') != -1:
            pass
        else:  # normal user DAS query
            try:
                service_map = dasquery.service_apis_map()
            except Exception as exc:
                msg = 'Fail to obtain service API map for this DASQuery'
                print(msg)
                print_exc(exc)
                return 1, error_msg(msg)
            if not service_map:
                return 1, error_msg('Unable to resolve the query over the '
                                    'available services: %s' % dasquery)
        return 0, dasquery

    @expose
    @checkargs(DAS_WEB_INPUTS)
#    @tools.secmodv2()
    def index(self, *args, **kwargs):
        """
        represents DAS web interface.
        It uses das_searchform template for
        input form and yui_table for output Table widget.
        """
        uinput = getarg(kwargs, 'input', '')
        return self.page(self.form(uinput=uinput, cards=True))


    def form(self, uinput='', instance=None, view='list', cards=False):
        """
        provide input DAS search form
        """
        # TODO: rename into search_form()? (template is also called like this

        if  "'" in uinput: # e.g. file.creation_date>'20120101 12:01:01'
            uinput = uinput.replace("'", '"')
        if  not instance:
            instance = self.dbs_global
        hcards = help_cards(self.base)
        width = 900
        height = 220
        cards = self.templatepage('das_cards', base=self.base, show=cards, \
                width=width, height=height, max_width=len(hcards)*width, \
                cards=hcards, enumerate=enumerate)
        daskeys = self.templatepage('das_keys', daskeys=self.daskeyslist)
        page  = self.templatepage('das_searchform', input=uinput, \
                init_dbses=list(self.dbs_instances), daskeys=daskeys, \
                base=self.base, instance=instance, view=view, cards=cards,
                autocompl_host=json.dumps(self._get_autocompl_host())
                )
        return page

    @expose
    @tools.secmodv2()
    def error(self, msg, wrap=True):
        """
        Show error message.
        """
        page = self.templatepage('das_error', msg=str(msg))
        if  wrap:
            page  = self.page(self.form() + page)
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def gridfs(self, **kwargs):
        """
        Retieve records from GridFS
        """
        time0 = time.time()
        if  'fid' not in kwargs:
            code = web_code('No file id')
            raise HTTPError(500, 'DAS error, code=%s' % code)
        fid  = kwargs.get('fid')
        data = {'status':'requested', 'fid':fid}
        try:
            fds = self.gfs.get(ObjectId(fid))
            return fds.read()
        except Exception as exc:
            print_exc(exc)
            code = web_code('Exception')
            raise HTTPError(500, 'DAS error, code=%s' % code)
        data['ctime'] = time.time() - time0
        return json.dumps(data)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def records(self, *args, **kwargs):
        """
        Retieve all records id's.
        """
        try:
            recordid = None
            if  args:
                recordid = args[0]
                spec = {'_id':ObjectId(recordid)}
                fields = None
                query = dict(fields=fields, spec=spec)
            elif  kwargs and '_id' in kwargs:
                spec = {'_id': ObjectId(kwargs['_id'])}
                fields = None
                query = dict(fields=fields, spec=spec)
            else: # return all ids
                query = dict(fields=None, spec={})

            res      = ''
            time0    = time.time()
            idx      = getarg(kwargs, 'idx', 0)
            limit    = getarg(kwargs, 'limit', 50)
            coll     = kwargs.get('collection', 'merge')
            view     = kwargs.get('view', '')
            if  view == 'json':
                res  = []
            inst     = kwargs.get('instance', self.dbs_global)
            form     = self.form(uinput="")
            check, content = self.generate_dasquery(query, inst)
            if  check:
                return self.page(form + content, ctime=time.time()-time0)
            dasquery = content # returned content is valid DAS query
            nresults = self.dasmgr.rawcache.nresults(dasquery, coll)
            gen      = self.dasmgr.rawcache.get_from_cache\
                (dasquery, idx=idx, limit=limit, collection=coll)
            if  recordid: # we got id
                for row in gen:
                    if  view == 'json':
                        res.append(row)
                    else:
                        res += das_json(dasquery, row)
            else:
                for row in gen:
                    rid  = row['_id']
                    del row['_id']
                    res += self.templatepage('das_record', \
                            id=rid, collection=coll, daskeys=', '.join(row))
            if  recordid:
                page  = res
            else:
                url   = '/das/records?'
                if  nresults:
                    page = self.templatepage('das_pagination', \
                        nrows=nresults, idx=idx, limit=limit, url=url, \
                        cgi=cgi, str=str)
                else:
                    page = 'No results found, nresults=%s' % nresults
                page += res

            ctime   = (time.time()-time0)
            if  view == 'json':
                return json.dumps(res)
            page = self.page(form + page, ctime=ctime)
            return page
        except Exception as exc:
            print_exc(exc)
            return self.error(gen_error_msg(kwargs))

    @jsonstreamer
    def datastream(self, kwargs):
        """Stream DAS data into JSON format"""
        head = kwargs.get('head', dict(timestamp=time.time()))
        if  'mongo_query' not in head:
            head['mongo_query'] = head['dasquery'].mongo_query \
                if 'dasquery' in head else {}
        if  'dasquery' in head:
            del head['dasquery']
        if  'args' in head:
            del head['args']
        data = kwargs.get('data', [])
        if  self.check_clients:
            # update client version
            cli, cli_msg = check_client_version()
            head.update({'client': cli, 'client_message': cli_msg})
        return head, data

    def hint_datasets(self, kwargs):
        "Use hint functions to find datasets in non-default DBS istances"
        query = kwargs.get('input', '').strip()
        dbsinst = kwargs.get('instance', self.dbs_global)
        hint_functions = [hint_dataset_case_insensitive,
                          hint_dataset_in_other_insts, ]
        hints = (hint(query, dbsinst) for hint in hint_functions)
        hints = [r for r in hints if r and r.get('results')]
        return hints

    def get_data(self, kwargs):
        """
        Invoke DAS workflow and get data from the cache.
        """
        head   = dict(timestamp=time.time())
        head['args'] = kwargs
        uinput = kwargs.get('input', '')
        inst   = kwargs.get('instance', self.dbs_global)
        idx    = getarg(kwargs, 'idx', 0)
        limit  = getarg(kwargs, 'limit', 0) # do not impose limit
        coll   = kwargs.get('collection', 'merge')
        status = kwargs.get('status')
        error  = kwargs.get('error')
        reason = kwargs.get('reason')
        dasquery = kwargs.get('dasquery', None)
        time0  = time.time()
        if  dasquery:
            dasquery = DASQuery(dasquery, instance=inst)
            if  dasquery.error:
                return self.page(form + dasquery.error, ctime=time.time()-time0)
        else:
            check, content = \
                    self.generate_dasquery(uinput, inst, html_mode=False)
            if  check:
                head.update({'status': 'fail', 'reason': content,
                             'ctime': time.time()-time0, 'input': uinput})
                data = []
                return head, data
            dasquery = content # returned content is valid DAS query
        try:
            nres = self.dasmgr.nresults(dasquery, coll)
            data = \
                self.dasmgr.get_from_cache(dasquery, idx, limit)
            # check that we got what we expected
            data = [r for r in data]
            if  nres and not len(data):
                for retry in range(1, 3, 5):
                    msg = 'retry in %s sec' % retry
                    dasprint(dastimestamp('DAS WARNING '), msg, dasquery)
                    time.sleep(retry) # retry one more time
                    data = \
                        self.dasmgr.get_from_cache(dasquery, idx, limit)
                    data = [r for r in data]
                    if  len(data):
                        break
            if  nres and not len(data):
                msg = 'fail to get all data for %s, nres=%s, len(data)=%s' \
                        % (dasquery, nres, len(data))
                dasprint(dastimestamp('DAS WARNING '), msg)
                status = 'fail'
                reason = 'Fail to retrieve data from DAS cache, please retry'

            if  dasquery.aggregators:
                # aggregators split DAS record into sub-system and then
                # apply aggregator functions, therefore we need to correctly
                # account for nresults. Resolve generator into list and take
                # its length as nresults value.
                data = [r for r in data]
                nres = len(data)
            if  error: # DAS record contains an error
                status = 'error'
            head.update({'status':status, 'nresults':nres,
                         'ctime': time.time()-time0, 'dasquery': dasquery})
        except Exception as exc:
            status = 'fail'
            reason = str(exc)
            print_exc(exc)
            head.update({'status': status,
                         'ctime': time.time()-time0, 'dasquery': dasquery})
            data = []
        head.update({'incache':self.dasmgr.incache(dasquery, coll='cache'),
                     'apilist':self.dasmgr.apilist(dasquery)})
        if  reason:
            head.update({'reason': reason})
        if  status != 'ok':
            head.update(self.info())

        # check if query had dataset input and returned no results
        # then run hint functions to find dataset in other DBS instances
        mquery = dasquery.mongo_query
        empty = False
        for item in data:
            if  'dataset.name' in mquery['spec'] and 'dataset' in mquery['fields'] \
                    and 'result' not in item:
                if  not item['dataset']:
                    empty = True
                    break
        if  empty: # if no results found add dataset from other DBS instances
            hints = self.hint_datasets(kwargs)
            for item in data:
                item.update({'hints': hints})

        return head, data

    def info(self):
        "Return status of DAS server"
        info = {'nrequests': self.reqmgr.size(),
                'nworkers': self.taskmgr.nworkers(),
                'dasweb': self.reqmgr.status()}
        if  self.dasmgr and self.dasmgr.taskmgr:
            info.update({'dascore': self.dasmgr.taskmgr.status()})
        return dict(das_server=info)

    def busy(self):
        """
        Check server load and report busy status if
        nrequests - nworkers > queue limit
        """
        nrequests = self.reqmgr.size()
        if  (nrequests - self.taskmgr.nworkers()) > self.queue_limit:
            msg = '#request=%s, queue_limit=%s, #workers=%s' \
                    % (nrequests, self.taskmgr.nworkers(), self.queue_limit)
            dasprint(dastimestamp('DAS WEB SERVER IS BUSY '), msg)
            return True
        return False

    def busy_page(self, uinput=None):
        """DAS server busy page layout"""
        page = "<h3>DAS server is busy, please try later</h3>"
        form = self.form(uinput)
        return self.page(form + page)

    def _is_web_request(self, view):
        """
        returns whether the current view mode is not web
        """

        # first, check for explicit output type (view)

        if view in ['json', 'xml', 'plain']:
            return False

        # check accept header - e.g. das client only provides accept header
        accepts = cherrypy.request.headers.elements('Accept')
        non_html_accepts = ['application/json']
        other_accepted = [a for a in accepts
                          if a.value not in non_html_accepts]

        # if only non html content types are accepted we are in non html mode
        if not other_accepted and accepts:
            return  False

        return True

    def empty_return(self, dasquery, status='busy', reason=None):
        "Return header/data when DAS server is busy"
        if  not reason:
            reason  = 'DAS server is busy'
            reason += ', #requests=%s, #workers=%s, queue size=%s' \
                % (self.reqmgr.size(), self.taskmgr.nworkers(), self.queue_limit)
        head = dict(timestamp=time.time())
        head.update({'status': status, 'reason': reason, 'ctime':0})
        data = []
        dasprint(dastimestamp('DAS INFO '), dasquery, 'server status=%s'%status, reason)
        return self.datastream(dict(head=head, data=data))

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def cache(self, **kwargs):
        """
        DAS web cache interface. Fire up new process for new requests and
        record its pid. The client is in charge to keep track of pid.
        The new process uses DAS core call to request the data into cache.
        Since query are cached the repeated call with the same query
        has no cost to DAS core.
        """
        # do not allow caching
        set_no_cache_flags()

        # if busy return right away
        if  self.busy():
            return self.empty_return(kwargs)

        uinput = kwargs.get('input', '').strip()
        check_query(uinput)
        if  not uinput:
            head = {'status': 'fail', 'reason': 'No input found',
                    'args': kwargs, 'ctime': 0, 'input': uinput}
            data = []
            return self.datastream(dict(head=head, data=data))
        self.adjust_input(kwargs)
        pid    = kwargs.get('pid', '')
        inst   = kwargs.get('instance', self.dbs_global)
        uinput = kwargs.get('input', '')
        view   = kwargs.get('view', 'list')
        qcache = kwargs.get('qcache', 0)
        data   = []

        # textual views need text only error messages...
        check, content = self.generate_dasquery(uinput, inst,
                              html_mode=self._is_web_request(view),
                              qcache=qcache)
        if  check:
            head = dict(timestamp=time.time())
            head.update({'status': 'fail',
                         'reason': 'Can not interpret the query'+ \
                                   ' (while creating DASQuery)',
                         'ctime': 0})
            if not self._is_web_request(view):
                head['error_details'] = content
                head['reason'] = head['reason'] + '\n\n' + content
            return self.datastream(dict(head=head, data=data))

        dasquery = content # returned content is valid DAS query
        status, error, reason = self.dasmgr.get_status(dasquery)
        kwargs.update({'status':status, 'error':error, 'reason':reason})
        if  not pid:
            pid = dasquery.qhash
        if  status == None and not self.reqmgr.has_pid(pid): # submit new request
            uid = cherrypy.request.headers.get('Remote-Addr')
            if  hasattr(cherrypy.request, 'user'):
                uid = cherrypy.request.user.get('dn', None)
            _evt, pid = self.taskmgr.spawn(\
                self.dasmgr.call, dasquery, uid=uid, pid=dasquery.qhash)
            self.reqmgr.add(pid, kwargs)
            return pid
        if  status == 'ok':
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            kwargs['dasquery'] = dasquery
            head, data = self.get_data(kwargs)
            return self.datastream(dict(head=head, data=data))
        kwargs['dasquery'] = dasquery.storage_query
        if  not self.pid_pat.match(str(pid)) or len(str(pid)) != 32:
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            return self.empty_return(dasquery, 'fail', 'Invalid pid')
        elif self.taskmgr.is_alive(pid):
            return pid
        elif status == None:
            # DAS was busy and query expired since status==None
            if  not self.taskmgr.is_alive(pid) and self.reqmgr.has_pid(pid):
                self.reqmgr.remove(pid)
                self.taskmgr.remove(pid)
                return self.empty_return(dasquery, 'fail', 'request expired')
            return pid
        else: # process is done, get data
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            head, data = self.get_data(kwargs)
            return self.datastream(dict(head=head, data=data))

    def get_page_content(self, kwargs, complete_msg=True):
        """Retrieve page content for provided set of parameters"""
        html_views = ['list', 'table']
        page = ''
        try:
            view = kwargs.get('view', 'list')
            if  view == 'plain':
                if  'limit' in kwargs:
                    del kwargs['limit']
            if  view in ['json', 'xml', 'plain'] and complete_msg:
                page = 'Request completed. Reload the page ...'
            else:
                head, data = self.get_data(kwargs)

                allowed_views = ['list', 'table', 'plain', 'xml', 'json']
                if view not in allowed_views:
                    raise

                func = getattr(self, view + "view")
                page = func(head, data)
        except HTTPError as _err:
            raise
        except Exception as exc:
            print_exc(exc)
            msg  = gen_error_msg(kwargs)
            page = self.templatepage('das_error', msg=msg)
        return page

    @expose
    @tools.secmodv2()
    def download(self, lfn):
        "DAS download page for given LFN"
        page = self.templatepage('filemover', lfn=lfn)
        return self.page(page, response_div=False)

    @expose
    @tools.secmodv2()
    def makepy(self, dataset, instance):
        """
        Request to create CMSSW py snippet for a given dataset
        """
        pat = re.compile('/.*/.*/.*')
        if  not pat.match(dataset):
            msg = 'Invalid dataset name'
            return self.error(msg)
        query = "file dataset=%s instance=%s | grep file.name" \
                % (dataset, instance)
        try:
            data   = self.dasmgr.result(query, idx=0, limit=0)
        except Exception as exc:
            print_exc(exc)
            msg    = 'Exception: %s\n' % str(exc)
            msg   += 'Unable to retrieve data for query=%s' % query
            return self.error(msg)
        lfns = []
        for rec in data:
            filename = DotDict(rec).get('file.name')
            if  filename not in lfns:
                lfns.append(filename)
        page = self.templatepage('das_files_py', lfnList=lfns, pfnList=[], isinstance=isinstance, list=list)
        cherrypy.response.headers['Content-Type'] = "text/plain"
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS)
    @tools.secmodv2()
    def request(self, **kwargs):
        """
        Request data from DAS cache.
        """
        # do not allow caching
        set_no_cache_flags()

        uinput  = kwargs.get('input', '').strip()
        check_query(uinput)
        if  not uinput:
            kwargs['reason'] = 'No input found'
            return self.redirect(**kwargs)

        # if busy return right away
        if  self.busy():
            return self.busy_page(uinput)

        time0   = time.time()
        self.adjust_input(kwargs)
        view    = kwargs.get('view', 'list')
        qcache  = kwargs.get('qcache', 0)
        if  'instance' in uinput:
            form     = self.form(uinput=uinput, view=view)
            content  = 'On DAS web UI please use drop-down menu to specify DBS'
            content += ' instance to avoid ambiguity. '
            content += 'To proceed please clear your input query.'
            return self.page(form + '<div class="box_red">%s</div>' % content)
        else:
            inst = kwargs.get('instance', self.dbs_global)
        uinput  = kwargs.get('input', '')
        form    = self.form(uinput=uinput, instance=inst, view=view)
        check, content = self.generate_dasquery(uinput, inst, qcache=qcache)
        if  check:
            if  view == 'list' or view == 'table':
                return self.page(form + content, ctime=time.time()-time0)
            else:
                return content
        dasquery = content # returned content is valid DAS query
        status, error, reason = self.dasmgr.get_status(dasquery)
        kwargs.update({'status':status, 'error':error, 'reason':reason})
        pid = dasquery.qhash
        if  status is None: # process new request
            kwargs['dasquery'] = dasquery.storage_query
            uid = cherrypy.request.headers.get('Remote-Addr')
            if  hasattr(cherrypy.request, 'user'):
                uid = cherrypy.request.user.get('dn', None)
            _evt, pid = self.taskmgr.spawn(self.dasmgr.call, dasquery,
                    uid=uid, pid=dasquery.qhash)
            self.reqmgr.add(pid, kwargs)
        elif status == 'ok' or status == 'fail':
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)

            # check if query can be rewritten via nested PK query
            rew_msg = self.q_rewriter and self.q_rewriter.check_fields(dasquery)
            if rew_msg:
                content =  self.templatepage('das_error', msg=rew_msg)
                return self.page(form + content, ctime=time.time()-time0)

            kwargs['dasquery'] = dasquery
            page = self.get_page_content(kwargs, complete_msg=False)
            ctime = (time.time()-time0)
            if  view == 'list' or view == 'table':
                return self.page(form + page, ctime=ctime)

            return page
        if  self.taskmgr.is_alive(pid):
            page = self.templatepage('das_check_pid', method='check_pid',
                    uinput=uinput, view=view, urllib=urllib,
                    base=self.base, pid=pid, interval=self.interval)
        elif status == None:
            # DAS was busy and query expired since status==None
            if  not self.taskmgr.is_alive(pid) and self.reqmgr.has_pid(pid):
                self.reqmgr.remove(pid)
                self.taskmgr.remove(pid)
                return self.empty_return(dasquery, 'fail', 'request expired')
            page = self.templatepage('das_check_pid', method='check_pid',
                    uinput=uinput, view=view, urllib=urllib,
                    base=self.base, pid=pid, interval=self.interval)
        else:
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            page = self.get_page_content(kwargs)
        ctime = (time.time()-time0)
        return self.page(form + page, ctime=ctime)

    @expose
    @tools.secmodv2()
    def status(self):
        """Return list of all current requests in DAS queue"""
        requests = [r for r in self.reqmgr.items()]
        page = self.templatepage('das_status', requests=requests, time=time)

        sdict = self.dasmgr.status()
        sdict['web'] = self.taskmgr.status()
        dasprint(dastimestamp('DAS INFO '), "web TaskManager", sdict['web'])
        for key, val in sdict.items():
            dasprint(dastimestamp('DAS INFO '), "%s TaskManager %s" % (key, val))
        page += '<h3>Services</h3>'
        def dump(idict):
            "Dump input dict"
            return ', '.join(['<em>%s:</em> %s' % (k, idict[k]) for k in sorted(idict)])
        for key, val in sdict.items():
            page += '<div>'
            stats = ', '.join([dump(v) for v in val.values()])
            page += '<b>%s</b>: %s' % (key, stats)
            page += '</div>'
        return self.page(page)

    @expose
    @checkargs(['pid'])
    @tools.secmodv2()
    def check_pid(self, pid):
        """
        Check status of given pid. This is a server callback
        function for ajaxCheckPid, see js/ajax_utils.js
        """
        # do not allow caching
        set_no_cache_flags()

        img  = '<img src="%s/images/loading.gif" alt="loading"/>' % self.base
        page = ''
        try:
            if  self.taskmgr.is_alive(pid):
                page = img + " processing PID=%s" % pid
            else:
                # at this point we don't know if request arrived to this host
                # or it was processed. To distinguish the case we'll ask
                # request manager for that pid
                if  self.reqmgr.has_pid(pid):
                    self.reqmgr.remove(pid)
                    self.taskmgr.remove(pid)
                    page  = 'Request PID=%s is completed' % pid
                    page += ', please wait for results to load'
                else:
                    # there're no request on this server, re-initiate it
                    ref = cherrypy.request.headers.get('Referer', None)
                    if  ref:
                        url = urlparse(ref)
                        params = dict(parse_qsl(url.query))
                        return self.request(**params)
                    else:
                        msg  = 'No referer in cherrypy.request.headers'
                        msg += '\nHeaders: %s' % cherrypy.request.headers
                        dasprint(dastimestamp('DAS WEB ERROR '), msg)
        except Exception as err:
            msg = 'check_pid fails for pid=%s' % pid
            dasprint(dastimestamp('DAS WEB ERROR '), msg)
            print_exc(err)
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            return self.error(gen_error_msg({'pid':pid}), wrap=False)
        return page

    def listview(self, head, data):
        """DAS listview data representation"""
        return self.repmgr.listview(head, data)

    def tableview(self, head, data):
        """DAS tabular view data representation"""
        return self.repmgr.tableview(head, data)

    def plainview(self, head, data):
        """DAS plain view data representation"""
        return self.repmgr.plainview(head, data)

    def xmlview(self, head, data):
        """DAS XML data representation"""
        return self.repmgr.xmlview(head, data)

    def jsonview(self, head, data):
        """DAS JSON data representation"""
        return self.repmgr.jsonview(head, data)

    @exposedasjson
    @enable_cross_origin
    @checkargs(['query', 'dbs_instance'])
    @tools.secmodv2()
    def autocomplete(self, **kwargs):
        """
        Provides autocomplete functionality for DAS web UI.
        """
        query = kwargs.get("query", "").strip()
        result = autocomplete_helper(query, self.dasmgr, self.daskeys)
        dataset = [r for r in result if r['value'].find('dataset=')!=-1]
        dbsinst = kwargs.get('dbs_instance', self.dbs_global)
        if  self.dataset_daemon and len(dataset):
            dbsmgr = self._get_dbsmgr(dbsinst)
            # we shall autocomplete the last token so queries like
            # file dataset=/ZMM/.. are autocompleted
            prefix = ''
            if ' ' in query:
                prefix = '  '.join(query.split()[:-1]) + ' '
                print('prefix=', prefix)
                query = query.split()[-1]
            if  query.find('dataset=') != -1:
                query = query.replace('dataset=', '')
            for row in dbsmgr.find(query):
                result.append({'css': 'ac-info',
                               'value': prefix + 'dataset=%s' % row,
                               'info': 'dataset'})
        return result
コード例 #30
0
ファイル: das_web_srv.py プロジェクト: zdenekmaxa/DAS
class DASWebService(DASWebManager):
    """
    DAS web service interface.
    """
    def __init__(self, dasconfig):
        DASWebManager.__init__(self, dasconfig)
        config = dasconfig['web_server']
        self.pid_pat     = re.compile(r'^[a-z0-9]{32}')
        self.base        = config['url_base']
        self.interval    = config.get('status_update', 2500)
        self.engine      = config.get('engine', None)
        nworkers         = config['number_of_workers']
        self.hot_thr     = config.get('hot_threshold', 3000)
        self.dasconfig   = dasconfig
        self.dburi       = self.dasconfig['mongodb']['dburi']
        self.lifetime    = self.dasconfig['mongodb']['lifetime']
        self.queue_limit = config.get('queue_limit', 50)
        if  self.engine:
            thr_name = 'DASWebService:PluginTaskManager'
            self.taskmgr = PluginTaskManager(\
                        bus=self.engine, nworkers=nworkers, name=thr_name)
            self.taskmgr.subscribe()
        else:
            thr_name = 'DASWebService:TaskManager'
            self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
        self.adjust      = config.get('adjust_input', False)

        self.init()

        # Monitoring thread which performs auto-reconnection
        thread.start_new_thread(dascore_monitor, \
                ({'das':self.dasmgr, 'uri':self.dburi}, self.init, 5))

        # Obtain DBS global instance or set it as None
        if  self.dasconfig.has_key('dbs'):
            self.dbs_global = \
                self.dasconfig['dbs'].get('dbs_global_instance', None)
            self.dbs_instances = \
                self.dasconfig['dbs'].get('dbs_instances', [])
        else:
            self.dbs_global = None
            self.dbs_instances = []

        # Start DBS daemon
        self.dataset_daemon = config.get('dbs_daemon', False)
        if  self.dataset_daemon:
            self.dbs_daemon(config)

    def process_requests_onhold(self):
        "Process requests which are on hold"
        try:
            limit = self.queue_limit/2
            thread.start_new_thread(onhold_worker, \
                (self.dasmgr, self.taskmgr, self.reqmgr, limit))
        except Exception as exc:
            print_exc(exc)

    def dbs_daemon(self, config):
        """Start DBS daemon if it is requested via DAS configuration"""
        try:
            main_dbs_url = self.dasconfig['dbs']['dbs_global_url']
            self.dbs_urls = []
            for inst in self.dbs_instances:
                self.dbs_urls.append(\
                        main_dbs_url.replace(self.dbs_global, inst))
            interval  = config.get('dbs_daemon_interval', 3600)
            dbsexpire = config.get('dbs_daemon_expire', 3600)
            self.dbsmgr = {} # dbs_urls vs dbs_daemons
            if  self.dataset_daemon:
                for dbs_url in self.dbs_urls:
                    dbsmgr = DBSDaemon(dbs_url, self.dburi, expire=dbsexpire)
                    self.dbsmgr[dbs_url] = dbsmgr
                    def dbs_updater(_dbsmgr, interval):
                        """DBS updater daemon"""
                        while True:
                            try:
                                _dbsmgr.update()
                            except:
                                pass
                            time.sleep(interval)
                    print "Start DBSDaemon for %s" % dbs_url
                    thread.start_new_thread(dbs_updater, (dbsmgr, interval, ))
        except Exception as exc:
            print_exc(exc)

    def init(self):
        """Init DAS web server, connect to DAS Core"""
        try:
            self.logcol     = DASLogdb(self.dasconfig)
            self.reqmgr     = RequestManager(self.dburi, lifetime=self.lifetime)
            self.dasmgr     = DASCore(engine=self.engine)
            self.repmgr     = CMSRepresentation(self.dasconfig, self.dasmgr)
            self.daskeys    = self.dasmgr.das_keys()
            self.gfs        = db_gridfs(self.dburi)
            self.daskeys.sort()
            self.dasmapping = self.dasmgr.mapping
            self.dasmapping.init_presentationcache()
            self.colors = {}
            for system in self.dasmgr.systems:
                self.colors[system] = gen_color(system)
            self.sitedbmgr   = SiteDBService(self.dasconfig)
        except Exception as exc:
            print_exc(exc)
            self.dasmgr = None
            self.daskeys = []
            self.colors = {}
            return
        # Start Onhold_request daemon
        if  self.dasconfig['web_server'].get('onhold_daemon', False):
            self.process_requests_onhold()

    def logdb(self, query):
        """
        Make entry in Logging DB
        """
        qhash = genkey(query)
        args  = cherrypy.request.params
        doc = dict(qhash=qhash,
                date=int(str(date.fromtimestamp(time.time())).replace('-', '')),
                headers=cherrypy.request.headers,
                method=cherrypy.request.method,
                path=cherrypy.request.path_info,
                args=args, ahash=genkey(args),
                ip=cherrypy.request.remote.ip,
                hostname=cherrypy.request.remote.name,
                port=cherrypy.request.remote.port)
        self.logcol.insert('web', doc)

    def get_nhits(self):
        "Return number of hits per day client made"
        tsec  = time.mktime(date.timetuple(date.today()))
        spec  = {'ip': cherrypy.request.remote.ip, 'ts': {'$gte': tsec},
                 'args.pid': {'$exists': False}, # do not count pid requests
                 'path': '/cache'} # requests from das_client calls
        nhits = self.logcol.find(spec, count=True)
        return nhits

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def redirect(self, **kwargs):
        """
        Represent DAS redirect page
        """
        dmsg = 'You do not have permission to access the resource requested.'
        msg  = kwargs.get('reason', dmsg)
        if  msg:
            msg = 'Reason: ' + msg
        page = self.templatepage('das_redirect', msg=msg)
        return self.page(page, response_div=False)

    def bottom(self, response_div=True):
        """
        Define footer for all DAS web pages
        """
        return self.templatepage('das_bottom', div=response_div,
                version=DAS.version)

    def page(self, content, ctime=None, response_div=True):
        """
        Define footer for all DAS web pages
        """
        page  = self.top()
        page += content
        page += self.templatepage('das_bottom', ctime=ctime, 
                                  version=DAS.version, div=response_div)
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS + ['section', 'highlight'])
    def faq(self, *args, **kwargs):
        """
        represent DAS FAQ.
        """
        section = kwargs.get('section', None)
        highlight = kwargs.get('highlight', None)
        guide = self.templatepage('dbsql_vs_dasql', 
                    operators=', '.join(das_operators()))
        page = self.templatepage('das_faq', guide=guide,
                section=section, highlight=highlight,
                operators=', '.join(das_operators()), 
                aggregators=', '.join(das_aggregators()))
        return self.page(page, response_div=False)

    @expose
    def cli(self):
        """
        Serve DAS CLI file download.
        """
        dasroot = '/'.join(__file__.split('/')[:-3])
        clifile = os.path.join(dasroot, 'DAS/tools/das_client.py')
        return serve_file(clifile, content_type='text/plain')

    @expose
    def movetodas(self):
        "Placeholder page for DBS to DAS migration"
        style = "width:600px;margin-left:auto;margin-right:auto;padding-top:20px"
        page  = """<div style="%s">""" % style
        page += "Dear user,<br/>DBS Data Discovery page is depricated.<br/>"
        page += "Please migrate to Data Aggregation Service located at"
        page += "<p>https://cmsweb.cern.ch/das/</p>"
        page += "<em>CMS HTTP group.</em>"
        page += "</div>"""
        return page

    @expose
    def opensearch(self):
        """
        Serve DAS opensearch file.
        """
        if  self.base and self.base.find('http://') != -1:
            base = self.base
        else:
            base = 'http://cmsweb.cern.ch/das'
        desc = self.templatepage('das_opensearch', base=base)
        cherrypy.response.headers['Content-Type'] = \
                'application/opensearchdescription+xml'
        return desc

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def services(self, *args, **kwargs):
        """
        represent DAS services
        """
        dasdict = {}
        daskeys = []
        for system, keys in self.dasmgr.mapping.daskeys().iteritems():
            if  system not in self.dasmgr.systems:
                continue
            tmpdict = {}
            for key in keys:
                tmpdict[key] = self.dasmgr.mapping.lookup_keys(system, key) 
                if  key not in daskeys:
                    daskeys.append(key)
            dasdict[system] = dict(keys=dict(tmpdict), 
                apis=self.dasmgr.mapping.list_apis(system))
        mapreduce = [r for r in self.dasmgr.rawcache.get_map_reduce()]
        page = self.templatepage('das_services', dasdict=dasdict, 
                        daskeys=daskeys, mapreduce=mapreduce)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def api(self, name, **kwargs):
        """
        Return DAS mapping record about provided API.
        """
        record = self.dasmgr.mapping.api_info(name)
        page   = "<b>DAS mapping record</b>"
        page  += das_json(record)
        return self.page(page, response_div=False)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def default(self, *args, **kwargs):
        """
        Default method.
        """
        return self.index(args, kwargs)

    def adjust_input(self, kwargs):
        """
        Adjust user input wrt common DAS keyword patterns, e.g.
        Zee -> dataset=*Zee*, T1_US -> site=T1_US*. This method
        only works if self.adjust is set in configuration of DAS server.
        This method can be customization for concrete DAS applications via
        external free_text_parser function (part of DAS.web.utils module)
        """
        if  not self.adjust:
            return
        uinput = kwargs.get('input', '')
        query_part = uinput.split('|')[0]
        if  query_part == 'queries' or query_part == 'records':
            return
        new_input = free_text_parser(uinput, self.daskeys)
        if  uinput and new_input == uinput:
            selkey = choose_select_key(uinput, self.daskeys, 'dataset')
            if  selkey and len(new_input) > len(selkey) and \
                new_input[:len(selkey)] != selkey:
                new_input = selkey + ' ' + new_input
        kwargs['input'] = new_input

    def generate_dasquery(self, uinput, inst, html_error=True):
        """
        Check provided input as valid DAS input query.
        Returns status and content (either error message or valid DASQuery)
        """
        def helper(msg, html_error=None):
            """Helper function which provide error template"""
            if  not html_error:
                return msg
            guide = self.templatepage('dbsql_vs_dasql', 
                        operators=', '.join(das_operators()))
            page = self.templatepage('das_ambiguous', msg=msg, base=self.base,
                        guide=guide)
            return page
        if  not uinput:
            return 1, helper('No input query')
        # Generate DASQuery object, if it fails we catch the exception and
        # wrap it for upper layer (web interface)
        try:
            dasquery = DASQuery(uinput, instance=inst)
        except Exception as err:
            return 1, helper(das_parser_error(uinput, str(err)), html_error)
        fields = dasquery.mongo_query.get('fields', [])
        if  not fields:
            fields = []
        spec   = dasquery.mongo_query.get('spec', {})
        for word in fields+spec.keys():
            found = 0
            if  word in DAS_DB_KEYWORDS:
                found = 1
            for key in self.daskeys:
                if  word.find(key) != -1:
                    found = 1
            if  not found:
                msg = 'Provided input does not contain a valid DAS key'
                return 1, helper(msg, html_error)
        if  isinstance(uinput, dict): # DASQuery w/ {'spec':{'_id:id}}
            pass
        elif uinput.find('queries') != -1:
            pass
        elif uinput.find('records') != -1:
            pass
        else: # normal user DAS query
            try:
                service_map = dasquery.service_apis_map()
            except Exception as exc:
                msg = 'Fail to lookup DASQuery service API map'
                print msg
                print_exc(exc)
                return 1, helper(msg, html_error)
            if  not service_map:
                msg  = "None of the API's registered in DAS "
                msg += "can resolve this query"
                return 1, helper(msg, html_error)
        return 0, dasquery

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def index(self, *args, **kwargs):
        """
        represents DAS web interface. 
        It uses das_searchform template for
        input form and yui_table for output Table widget.
        """
        uinput = getarg(kwargs, 'input', '') 
        return self.page(self.form(uinput=uinput, cards=True))

    def form(self, uinput='', instance=None, view='list', cards=False):
        """
        provide input DAS search form
        """
        if  not instance:
            instance = self.dbs_global
        cards = self.templatepage('das_cards', base=self.base, show=cards, \
                width=900, height=220, cards=help_cards(self.base))
        page  = self.templatepage('das_searchform', input=uinput, \
                init_dbses=list(self.dbs_instances), \
                base=self.base, instance=instance, view=view, cards=cards)
        return page

    @expose
    def error(self, msg, wrap=True):
        """
        Show error message.
        """
        page = self.templatepage('das_error', msg=str(msg))
        if  wrap:
            page  = self.page(self.form() + page)
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def gridfs(self, *args, **kwargs):
        """
        Retieve records from GridFS
        """
        time0 = time.time()
        if  not kwargs.has_key('fid'):
            code = web_code('No file id')
            raise HTTPError(500, 'DAS error, code=%s' % code)
        fid  = kwargs.get('fid')
        data = {'status':'requested', 'fid':fid}
        try:
            fds = self.gfs.get(ObjectId(fid))
            return fds.read()
        except Exception as exc:
            print_exc(exc)
            code = web_code('Exception')
            raise HTTPError(500, 'DAS error, code=%s' % code)
        data['ctime'] = time.time() - time0
        return json.dumps(data)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def records(self, *args, **kwargs):
        """
        Retieve all records id's.
        """
        try:
            recordid = None
            if  args:
                recordid = args[0]
                spec = {'_id':ObjectId(recordid)}
                fields = None
                query = dict(fields=fields, spec=spec)
            elif  kwargs and kwargs.has_key('_id'):
                spec = {'_id': ObjectId(kwargs['_id'])}
                fields = None
                query = dict(fields=fields, spec=spec)
            else: # return all ids
                query = dict(fields=None, spec={})

            res      = ''
            time0    = time.time()
            idx      = getarg(kwargs, 'idx', 0)
            limit    = getarg(kwargs, 'limit', 10)
            coll     = kwargs.get('collection', 'merge')
            inst     = kwargs.get('instance', self.dbs_global)
            form     = self.form(uinput="")
            check, content = self.generate_dasquery(query, inst)
            if  check:
                return self.page(form + content, ctime=time.time()-time0)
            dasquery = content # returned content is valid DAS query
            nresults = self.dasmgr.rawcache.nresults(dasquery, coll)
            gen      = self.dasmgr.rawcache.get_from_cache\
                (dasquery, idx=idx, limit=limit, collection=coll)
            if  recordid: # we got id
                for row in gen:
                    res += das_json(row)
            else:
                for row in gen:
                    rid  = row['_id']
                    del row['_id']
                    res += self.templatepage('das_record', \
                            id=rid, collection=coll, daskeys=', '.join(row))
            if  recordid:
                page  = res
            else:
                url   = '/das/records?'
                if  nresults:
                    page = self.templatepage('das_pagination', \
                        nrows=nresults, idx=idx, limit=limit, url=url)
                else:
                    page = 'No results found, nresults=%s' % nresults
                page += res

            ctime   = (time.time()-time0)
            page = self.page(form + page, ctime=ctime)
            return page
        except Exception as exc:
            print_exc(exc)
            return self.error(gen_error_msg(kwargs))

    @jsonstreamer
    def datastream(self, kwargs):
        """Stream DAS data into JSON format"""
        head = kwargs.get('head', dict(timestamp=time.time()))
        if  not head.has_key('mongo_query'):
            head['mongo_query'] = head['dasquery'].mongo_query \
                if head.has_key('dasquery') else {}
        if  head.has_key('dasquery'):
            del head['dasquery']
        if  head.has_key('args'):
            del head['args']
        data = kwargs.get('data', [])
        return head, data

    def get_data(self, kwargs):
        """
        Invoke DAS workflow and get data from the cache.
        """
        head   = dict(timestamp=time.time())
        head['args'] = kwargs
        uinput = kwargs.get('input', '')
        inst   = kwargs.get('instance', self.dbs_global)
        idx    = getarg(kwargs, 'idx', 0)
        limit  = getarg(kwargs, 'limit', 0) # do not impose limit
        coll   = kwargs.get('collection', 'merge')
        dasquery = kwargs.get('dasquery', None)
        time0  = time.time()
        if  dasquery:
            dasquery = DASQuery(dasquery, instance=inst)
        else:
            check, content = \
                    self.generate_dasquery(uinput, inst, html_error=False)
            if  check:
                head.update({'status': 'fail', 'reason': content,
                             'ctime': time.time()-time0, 'input': uinput})
                data = []
                return head, data
            dasquery = content # returned content is valid DAS query
        try:
            nres = self.dasmgr.nresults(dasquery, coll)
            data = \
                self.dasmgr.get_from_cache(dasquery, idx, limit)
            head.update({'status':'ok', 'nresults':nres,
                         'ctime': time.time()-time0, 'dasquery': dasquery})
        except Exception as exc:
            print_exc(exc)
            head.update({'status': 'fail', 'reason': str(exc),
                         'ctime': time.time()-time0, 'dasquery': dasquery})
            data = []
        head.update({'incache':self.dasmgr.incache(dasquery, coll='cache')})
        return head, data

    def busy(self):
        """
        Check number server load and report busy status if it's
        above threashold = queue size - nworkers
        """
        nrequests = self.reqmgr.size()
        if  (nrequests - self.taskmgr.nworkers()) > self.queue_limit:
            return True
        return False

    def busy_page(self, uinput=None):
        """DAS server busy page layout"""
        page = "<h3>DAS server is busy, please try later</h3>"
        form = self.form(uinput)
        return self.page(form + page)

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def cache(self, **kwargs):
        """
        DAS web cache interface. Fire up new process for new requests and
        record its pid. The client is in charge to keep track of pid.
        The new process uses DAS core call to request the data into cache.
        Since query are cached the repeated call with the same query
        has no cost to DAS core.
        """
        # remove expires records from merge collection
        self.dasmgr.rawcache.remove_expired('merge')

        # do not allow caching
        cherrypy.response.headers['Cache-Control'] = 'no-cache'
        cherrypy.response.headers['Pragma'] = 'no-cache'
        uinput = kwargs.get('input', '').strip()
        if  not uinput:
            head = {'status': 'fail', 'reason': 'No input found',
                    'args': kwargs, 'ctime': 0, 'input': uinput}
            data = []
            return self.datastream(dict(head=head, data=data))
        self.adjust_input(kwargs)
        pid    = kwargs.get('pid', '')
        inst   = kwargs.get('instance', self.dbs_global)
        uinput = kwargs.get('input', '')
        data   = []
        check, content = self.generate_dasquery(uinput, inst)
        if  check:
            head = dict(timestamp=time.time())
            head.update({'status': 'fail',
                         'reason': 'Fail to create DASQuery object',
                         'ctime': 0})
            return self.datastream(dict(head=head, data=data))
        dasquery = content # returned content is valid DAS query
        status, qhash = self.dasmgr.get_status(dasquery)
        if  status == 'ok':
            self.reqmgr.remove(dasquery.qhash)
            head, data = self.get_data(kwargs)
            return self.datastream(dict(head=head, data=data))
        kwargs['dasquery'] = dasquery.storage_query
        if  not pid and self.busy():
            head = dict(timestamp=time.time())
            head.update({'status': 'busy', 'reason': 'DAS server is busy',
                         'ctime': 0})
            return self.datastream(dict(head=head, data=data))
        if  pid:
            if  not self.pid_pat.match(str(pid)) or len(str(pid)) != 32:
                head = {'status': 'fail', 'reason': 'Invalid pid',
                        'args': kwargs, 'ctime': 0, 'input': uinput}
                data = []
                return self.datastream(dict(head=head, data=data))
            elif self.taskmgr.is_alive(pid):
                return pid
            else: # process is done, get data
                self.reqmgr.remove(pid)
                head, data = self.get_data(kwargs)
                return self.datastream(dict(head=head, data=data))
        else:
            config = self.dasconfig.get('cacherequests', {})
            thr = threshold(self.sitedbmgr, self.hot_thr, config)
            nhits = self.get_nhits()
            if  nhits > thr: # exceed threshold
                if  self.busy(): # put request onhold, server is busy
                    tstamp = time.time() + 60*(nhits/thr) + (nhits%thr)
                    pid  = dasquery.qhash
                    self.reqmgr.add_onhold(\
                        pid, uinput, cherrypy.request.remote.ip, tstamp)
                    head = {'status':'onhold',
                            'mongo_query':dasquery.mongo_query,
                            'pid':pid, 'nresults':0, 'ctime':0,
                            'timestamp':time.time()}
                    data = []
                    return self.datastream(dict(head=head, data=data))
            addr = cherrypy.request.headers.get('Remote-Addr')
            _evt, pid = self.taskmgr.spawn(\
                self.dasmgr.call, dasquery, addr, pid=dasquery.qhash)
            self.logdb(uinput) # put entry in log DB once we place a request
            self.reqmgr.add(pid, kwargs)
            return pid

    def get_page_content(self, kwargs, complete_msg=True):
        """Retrieve page content for provided set of parameters"""
        page = ''
        try:
            view = kwargs.get('view', 'list')
            if  view == 'plain':
                if  kwargs.has_key('limit'):
                    del kwargs['limit']
            if  view in ['json', 'xml', 'plain'] and complete_msg:
                page = 'Request comlpeted. Reload the page ...'
            else:
                head, data = self.get_data(kwargs)
                func = getattr(self, view + "view")
                page = func(head, data)
        except HTTPError as _err:
            raise 
        except Exception as exc:
            print_exc(exc)
            msg  = gen_error_msg(kwargs)
            page = self.templatepage('das_error', msg=msg)
        return page

    @expose
    def makepy(self, dataset, instance):
        """
        Request to create CMSSW py snippet for a given dataset
        """
        pat = re.compile('/.*/.*/.*')
        if  not pat.match(dataset):
            msg = 'Invalid dataset name'
            return self.error(msg)
        query = "file dataset=%s instance=%s | grep file.name" \
                % (dataset, instance)
        try:
            data   = self.dasmgr.result(query, idx=0, limit=0)
        except Exception as exc:
            print_exc(exc)
            msg    = 'Exception: %s\n' % str(exc)
            msg   += 'Unable to retrieve data for query=%s' % query
            return self.error(msg)
        lfns = []
        for rec in data:
            filename = DotDict(rec).get('file.name')
            if  filename not in lfns:
                lfns.append(filename)
        page = self.templatepage('das_files_py', lfnList=lfns, pfnList=[])
        cherrypy.response.headers['Content-Type'] = "text/plain"
        return page

    @expose
    @checkargs(DAS_WEB_INPUTS)
    def request(self, **kwargs):
        """
        Request data from DAS cache.
        """
        # remove expires records from merge collection
        self.dasmgr.rawcache.remove_expired('merge')

        # do not allow caching
        cherrypy.response.headers['Cache-Control'] = 'no-cache'
        cherrypy.response.headers['Pragma'] = 'no-cache'

        uinput  = kwargs.get('input', '').strip()
        if  not uinput:
            kwargs['reason'] = 'No input found'
            return self.redirect(**kwargs)

        time0   = time.time()
        self.adjust_input(kwargs)
        view    = kwargs.get('view', 'list')
        inst    = kwargs.get('instance', self.dbs_global)
        uinput  = kwargs.get('input', '')
        if  self.busy():
            return self.busy_page(uinput)
        ahash    = genkey(cherrypy.request.params)
        self.logdb(uinput)
        form    = self.form(uinput=uinput, instance=inst, view=view)
        check, content = self.generate_dasquery(uinput, inst)
        if  check:
            if  view == 'list' or view == 'table':
                return self.page(form + content, ctime=time.time()-time0)
            else:
                return content
        dasquery = content # returned content is valid DAS query
        status, qhash = self.dasmgr.get_status(dasquery)
        if  status == 'ok':
            page = self.get_page_content(kwargs, complete_msg=False)
            ctime = (time.time()-time0)
            if  view == 'list' or view == 'table':
                return self.page(form + page, ctime=ctime)
            return page
        else:
            kwargs['dasquery'] = dasquery.storage_query
            addr = cherrypy.request.headers.get('Remote-Addr')
            _evt, pid = self.taskmgr.spawn(self.dasmgr.call, dasquery, addr,
                                pid=dasquery.qhash)
            self.reqmgr.add(pid, kwargs)
            if  self.taskmgr.is_alive(pid):
                page = self.templatepage('das_check_pid', method='check_pid',
                        uinput=uinput, view=view, ahash=ahash,
                        base=self.base, pid=pid, interval=self.interval)
            else:
                page = self.get_page_content(kwargs)
                self.reqmgr.remove(pid)
        ctime = (time.time()-time0)
        return self.page(form + page, ctime=ctime)

    @expose
    def requests(self):
        """Return list of all current requests in DAS queue"""
        page = ""
        count = 0
        for row in self.reqmgr.items():
            page += '<li>%s placed at %s<br/>%s</li>' \
                        % (row['_id'], row['timestamp'], row['kwds'])
            count += 1
        if  page:
            page = "<ul>%s</ul>" % page
        else:
            page = "The request queue is empty"
        if  count:
            page += '<div>Total: %s requests</div>' % count
        return self.page(page)

    @expose
    @checkargs(['pid', 'ahash'])
    def check_pid(self, pid, ahash):
        """
        Check status of given pid and return appropriate page content.
        This is a server callback function for ajaxCheckPid, see
        js/ajax_utils.js
        """
        cherrypy.response.headers['Cache-Control'] = 'no-cache'
        cherrypy.response.headers['Pragma'] = 'no-cache'
        img  = '<img src="%s/images/loading.gif" alt="loading"/>' % self.base
        page = ''
        try:
            if  self.taskmgr.is_alive(pid):
                page = img + " processing PID=%s" % pid
            else:
                kwargs = self.reqmgr.get(pid)
                if  kwargs and kwargs.has_key('dasquery'):
                    del kwargs['dasquery']
                # if no kwargs (another request delete it)
                # use logging DB to look-up user request via ahash
                if  not kwargs:
                    spec = {'ahash':ahash}
                    skey = [('ts', DESCENDING)]
                    res  = [r for r in self.logcol.find(spec).sort(skey)]
                    kwargs = res[0]['args']
                    self.adjust_input(kwargs)
                self.reqmgr.remove(pid)
                page = self.get_page_content(kwargs)
        except Exception as err:
            msg = 'check_pid fails for pid=%s' % pid
            print dastimestamp('DAS WEB ERROR '), msg
            print_exc(err)
            self.reqmgr.remove(pid)
            self.taskmgr.remove(pid)
            return self.error(gen_error_msg({'pid':pid}), wrap=False)
        return page

    def listview(self, head, data):
        """DAS listview data representation"""
        return self.repmgr.listview(head, data)

    def tableview(self, head, data):
        """DAS tabular view data representation"""
        return self.repmgr.tableview(head, data)

    def plainview(self, head, data):
        """DAS plain view data representation"""
        return self.repmgr.plainview(head, data)

    def xmlview(self, head, data):
        """DAS XML data representation"""
        return self.repmgr.xmlview(head, data)

    def jsonview(self, head, data):
        """DAS JSON data representation"""
        return self.repmgr.jsonview(head, data)

    @exposedasjson
    @checkargs(['query', 'dbs_instance'])
    def autocomplete(self, **kwargs):
        """
        Provides autocomplete functionality for DAS web UI.
        """
        query = kwargs.get("query", "").strip()
        result = autocomplete_helper(query, self.dasmgr, self.daskeys)
        dataset = [r for r in result if r['value'].find('dataset=')!=-1]
        dbsinst = kwargs.get('dbs_instance', self.dbs_global)
        if  self.dataset_daemon and len(dataset):
            dbs_urls = [d for d in self.dbsmgr.keys() if d.find(dbsinst) != -1]
            if  len(dbs_urls) == 1:
                dbsmgr = self.dbsmgr[dbs_urls[0]]
                if  query.find('dataset=') != -1:
                    query = query.replace('dataset=', '')
                for row in dbsmgr.find(query):
                    result.append({'css': 'ac-info',
                                   'value': 'dataset=%s' % row,
                                   'info': 'dataset'})
        return result
コード例 #31
0
ファイル: DASCacheModel.py プロジェクト: ktf/DAS
    def __init__(self, config):
        self.config  = config 
        DASWebManager.__init__(self, config)
        self.version = __version__
        self.methods = {}
        self.methods['GET']= {
            'request':
                {'args':['idx', 'limit', 'query', 'skey', 'order'],
                 'call': self.request, 'version':__version__},
            'nresults':
                {'args':['query'],
                 'call': self.nresults, 'version':__version__},
            'records':
                {'args':['query', 'count', 'collection'],
                 'call': self.records, 'version':__version__},
            'status':
                {'args':['query'],
                 'call': self.status, 'version':__version__},
        }
        self.methods['POST']= {'create':
                {'args':['query', 'expire'],
                 'call': self.create, 'version':__version__}}
        self.methods['PUT']= {'replace':
                {'args':['query', 'expire'],
                 'call': self.replace, 'version':__version__}}
        self.methods['DELETE']= {'delete':
                {'args':['query'],
                 'call': self.delete, 'version':__version__}}

        try:
            # WMCore/WebTools
            rest  = RESTModel(config)
            rest.methods = self.methods # set RESTModel methods
            self.model = self # re-reference model to my class
            self.model.handler = rest.handler # reference handler to RESTModel
            cdict = self.config.dictionary_()
            self.base = '/rest'
        except:
            cdict = {}
            self.base = ''

        self.dascore  = DASCore()
        dbhost        = self.dascore.dasconfig['mongocache_dbhost']
        dbport        = self.dascore.dasconfig['mongocache_dbport']
        capped_size   = self.dascore.dasconfig['mongocache_capped_size']
        self.con      = Connection(dbhost, dbport)
        if  'logging' not in self.con.database_names():
            db = self.con['logging']
            options = {'capped':True, 'size': capped_size}
            db.create_collection('db', options)
            self.warning('Created logging.db, size=%s' % capped_size)
        self.col      = self.con['logging']['db']
        sleep         = cdict.get('sleep', 2)
        verbose       = cdict.get('verbose', None)
        iconfig       = {'sleep':sleep, 'verbose':verbose, 
                         'logger':self.dascore.logger}
        self.cachemgr = DASCacheMgr(iconfig)
        thread.start_new_thread(self.cachemgr.worker, (worker, ))
        msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \
                % (dbhost, dbport, capped_size)
        self.dascore.logger.debug(msg)
        print msg
コード例 #32
0
ファイル: das_cli.py プロジェクト: ktf/DAS
def main():
    "Main function"
    optmgr = DASOptionParser()
    opts, _ = optmgr.getOpt()

    t0 = time.time()
    query = opts.query
    if  'instance' not in query:
        query += ' instance=cms_dbs_prod_global'
    debug = opts.verbose
    dascore = DASCore(debug=debug, nores=opts.noresults)
    if  opts.hash:
        dasquery = DASQuery(query)
        mongo_query = dasquery.mongo_query
        service_map = dasquery.service_apis_map()
        str_query   = dasquery.storage_query
        print "---------------"
        print "DAS-QL query  :", query
        print "DAS query     :", dasquery
        print "Mongo query   :", mongo_query
        print "Storage query :", str_query
        print "Services      :\n"
        for srv, val in service_map.items():
            print "%s : %s\n" % (srv, ', '.join(val))
        sys.exit(0)
    sdict = dascore.keys()
    if  opts.services:
        msg = "DAS services:"
        print msg
        print "-"*len(msg)
        keys = sdict.keys()
        keys.sort()
        for key in keys:
            print key
    elif  opts.service:
        msg = "DAS service %s:" % opts.service
        print msg
        print "-"*len(msg)
        keys = sdict[opts.service]
        keys.sort()
        for key in keys:
            print key
    elif query:

        idx    = opts.idx
        limit  = opts.limit
        output = opts.nooutput
        plain  = opts.plain

        if  opts.profile:
            import cProfile # python profiler
            import pstats   # profiler statistics
            cmd  = 'run(dascore,query,idx,limit,output,plain)'
            cProfile.runctx(cmd, globals(), locals(), 'profile.dat')
            info = pstats.Stats('profile.dat')
            info.sort_stats('cumulative')
            info.print_stats()
        else:
            run(dascore, query, idx, limit, output, plain)
    elif opts.dasconfig:
        print pformat(dascore.dasconfig)
    else:
        print
        print "DAS CLI interface, no actions found,"
        print "please use --help for more options."
    timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
    timer = get_das_timer()
    print "\nDAS execution time:\n"
    if  debug:
        timelist = []
        for _, timerdict in timer.items():
            counter = timerdict['counter']
            tag = timerdict['tag']
            exetime = timerdict['time']
            timelist.append((counter, tag, exetime))
        timelist.sort()
        for _, tag, exetime in timelist:
            print "%s %s sec" % (tag, round(exetime, 2))
    print "Total %s sec, %s" % (round(time.time()-t0, 2), timestamp)
コード例 #33
0
ファイル: das_cli.py プロジェクト: perrozzi/DAS
def main():
    "Main function"
    optmgr  = DASOptionParser()
    opts = optmgr.parser.parse_args()

    t0 = time.time()
    query = opts.query
    if  'instance' not in query:
        query = ' instance=prod/global ' + query
    debug = opts.verbose
    dascore = DASCore(debug=debug, nores=opts.noresults)
    if  opts.hash:
        dasquery = DASQuery(query)
        mongo_query = dasquery.mongo_query
        service_map = dasquery.service_apis_map()
        str_query   = dasquery.storage_query
        print("---------------")
        print("DAS-QL query  :", query)
        print("DAS query     :", dasquery)
        print("Mongo query   :", mongo_query)
        print("Storage query :", str_query)
        print("Services      :\n")
        for srv, val in service_map.items():
            print("%s : %s\n" % (srv, ', '.join(val)))
        sys.exit(0)
    sdict = dascore.keys()
    if  opts.services:
        msg = "DAS services:"
        print(msg)
        print("-"*len(msg))
        keys = list(sdict.keys())
        keys.sort()
        for key in keys:
            print(key)
    elif  opts.service:
        msg = "DAS service %s:" % opts.service
        print(msg)
        print("-"*len(msg))
        keys = sdict[opts.service]
        keys.sort()
        for key in keys:
            print(key)
    elif opts.jsfile:
        kws_js(dascore, query, opts.idx, opts.limit, opts.jsfile, debug)
        sys.exit(0)
    elif opts.kfile:
        keylearning_js(dascore, query, opts.kfile, debug)
        sys.exit(0)
    elif query:

        idx    = opts.idx
        limit  = opts.limit
        output = opts.nooutput
        plain  = opts.plain
        qcache = opts.qcache

        if  opts.profile:
            import cProfile # python profiler
            import pstats   # profiler statistics
            cmd  = 'run(dascore,query,idx,limit,output,plain)'
            cProfile.runctx(cmd, globals(), locals(), 'profile.dat')
            info = pstats.Stats('profile.dat')
            info.sort_stats('cumulative')
            info.print_stats()
        else:
            run(dascore, query, idx, limit, output, plain)
    elif opts.dasconfig:
        print(pformat(dascore.dasconfig))
    else:
        print()
        print("DAS CLI interface, no actions found,")
        print("please use --help for more options.")
    timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
    timer = get_das_timer()
    print("\nDAS execution time:\n")
    if  debug:
        timelist = []
        for _, timerdict in timer.items():
            counter = timerdict['counter']
            tag = timerdict['tag']
            exetime = timerdict['time']
            timelist.append((counter, tag, exetime))
        timelist.sort()
        for _, tag, exetime in timelist:
            print("%s %s sec" % (tag, round(exetime, 2)))
    print("Total %s sec, %s" % (round(time.time()-t0, 2), timestamp))
コード例 #34
0
ファイル: DASCacheModel.py プロジェクト: ktf/DAS
class DASCacheModel(DASWebManager):
    """
    DASCacheModel represents DAS cache RESTful interface.
    It supports POST/GET/DELETE/UPDATE methods who communicate with
    DAS caching systems. The input queries are placed into DAS cache
    queue and served via FIFO mechanism. 
    """
    def __init__(self, config):
        self.config  = config 
        DASWebManager.__init__(self, config)
        self.version = __version__
        self.methods = {}
        self.methods['GET']= {
            'request':
                {'args':['idx', 'limit', 'query', 'skey', 'order'],
                 'call': self.request, 'version':__version__},
            'nresults':
                {'args':['query'],
                 'call': self.nresults, 'version':__version__},
            'records':
                {'args':['query', 'count', 'collection'],
                 'call': self.records, 'version':__version__},
            'status':
                {'args':['query'],
                 'call': self.status, 'version':__version__},
        }
        self.methods['POST']= {'create':
                {'args':['query', 'expire'],
                 'call': self.create, 'version':__version__}}
        self.methods['PUT']= {'replace':
                {'args':['query', 'expire'],
                 'call': self.replace, 'version':__version__}}
        self.methods['DELETE']= {'delete':
                {'args':['query'],
                 'call': self.delete, 'version':__version__}}

        try:
            # WMCore/WebTools
            rest  = RESTModel(config)
            rest.methods = self.methods # set RESTModel methods
            self.model = self # re-reference model to my class
            self.model.handler = rest.handler # reference handler to RESTModel
            cdict = self.config.dictionary_()
            self.base = '/rest'
        except:
            cdict = {}
            self.base = ''

        self.dascore  = DASCore()
        dbhost        = self.dascore.dasconfig['mongocache_dbhost']
        dbport        = self.dascore.dasconfig['mongocache_dbport']
        capped_size   = self.dascore.dasconfig['mongocache_capped_size']
        self.con      = Connection(dbhost, dbport)
        if  'logging' not in self.con.database_names():
            db = self.con['logging']
            options = {'capped':True, 'size': capped_size}
            db.create_collection('db', options)
            self.warning('Created logging.db, size=%s' % capped_size)
        self.col      = self.con['logging']['db']
        sleep         = cdict.get('sleep', 2)
        verbose       = cdict.get('verbose', None)
        iconfig       = {'sleep':sleep, 'verbose':verbose, 
                         'logger':self.dascore.logger}
        self.cachemgr = DASCacheMgr(iconfig)
        thread.start_new_thread(self.cachemgr.worker, (worker, ))
        msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \
                % (dbhost, dbport, capped_size)
        self.dascore.logger.debug(msg)
        print msg

    def logdb(self, query):
        """
        Make entry in Logging DB
        """
        qhash = genkey(query)
        headers = cherrypy.request.headers
        doc = dict(qhash=qhash, timestamp=time.time(),
                headers=cherrypy.request.headers,
                method=cherrypy.request.method,
                path=cherrypy.request.path_info,
                args=cherrypy.request.params,
                ip=cherrypy.request.remote.ip, 
                hostname=cherrypy.request.remote.name,
                port=cherrypy.request.remote.port)
        self.col.insert(doc)

    @checkargs
    def records(self, *args, **kwargs):
        """
        HTTP GET request.
        Retrieve records from provided collection.
        """
        data  = {'server_method':'request'}
        if  not kwargs.has_key('query'):
            data['status'] = 'fail'
            data['reason'] = 'no query is provided'
            return data
        # input query in JSON format, we should decode it using json.
        query = json.loads(kwargs.get('query'))
        coll  = kwargs.get('collection', 'merge')
        idx   = getarg(kwargs, 'idx', 0)
        limit = getarg(kwargs, 'limit', 10) # getarg perfrom type convertion
        count = kwargs.get('count', 0)
        data.update({'status':'requested', 'query':kwargs['query'], 
                 'collection':coll, 'count': count})
        if  query['spec'].has_key('_id'):
            recid = query['spec']['_id']
            ids   = []
            if  type(recid) is types.StringType:
                ids = [ObjectId(recid)]
            elif type(recid) is types.ListType:
                ids = [ObjectId(r) for r in recid]
            spec = {'spec':{'_id':{'$in':ids}}}
        else: # look-up all records
            spec = {}
        self.logdb(query)
        try:
            gen = self.dascore.rawcache.get_from_cache\
                (spec, idx=idx, limit=limit, collection=coll, adjust=False)
            data['status'] = 'success'
            data['data']   = [r for r in gen]
        except:
            self.debug(traceback.format_exc())
            data['status'] = 'fail'
            data['reason'] =  sys.exc_type
        return data

    @checkargs
    def status(self, *args, **kwargs):
        """
        HTTP GET request. Check status of the input query in DAS.
        """
        data = {'server_method':'status'}
        if  kwargs.has_key('query'):
            query  = kwargs['query']
            self.logdb(query)
            query  = self.dascore.mongoparser.parse(query)
            status = self.dascore.get_status(query)
            if  not status:
                status = 'no data' 
            data.update({'status':status})
        else:
            data.update({'status': 'fail', 
                    'reason': 'Unsupported keys %s' % kwargs.keys() })
        return data

    @checkargs
    def nresults(self, *args, **kwargs):
        """
        HTTP GET request. Ask DAS for total number of records
        for provided query.
        """
        data = {'server_method':'nresults'}
        if  kwargs.has_key('query'):
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            data.update({'status':'success'})
            res = self.dascore.in_raw_cache_nresults(query)
            data.update({'status':'success', 'nresults':res})
        else:
            data.update({'status': 'fail', 
                    'reason': 'Unsupported keys %s' % kwargs.keys() })
        return data

    @checkargs
    def request(self, *args, **kwargs):
        """
        HTTP GET request.
        Retrieve results from DAS cache.
        """
        data = {'server_method':'request'}
        if  kwargs.has_key('query'):
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            idx   = getarg(kwargs, 'idx', 0)
            limit = getarg(kwargs, 'limit', 0)
            skey  = getarg(kwargs, 'skey', '')
            order = getarg(kwargs, 'order', 'asc')
            data.update({'status':'requested', 'idx':idx, 
                     'limit':limit, 'query':query,
                     'skey':skey, 'order':order})
#            if  self.dascore.in_raw_cache(query):
            res = self.dascore.result(query, idx, limit)
            if  type(res) is types.GeneratorType:
                result = []
                for item in res:
                    if  item not in result:
                        result.append(item)
                data['data'] = result
                tot = len(data['data'])
            else:
                data['data'] = res
                tot = 1
            data['status'] = 'success'
            data['nresults'] = tot
#            else:
#                data['status'] = 'not found'
        else:
            data.update({'status': 'fail', 
                    'reason': 'Unsupported keys %s' % kwargs.keys() })
        return data

    @checkargs
    def create(self, *args, **kwargs):
        """
        HTTP POST request. 
        Requests the server to create a new resource
        using the data enclosed in the request body.
        Creates new entry in DAS cache for provided query.
        """
        data = {'server_method':'create'}
        if  kwargs.has_key('query'):
            query  = kwargs['query']
            self.logdb(query)
            query  = self.dascore.mongoparser.parse(query)
            expire = getarg(kwargs, 'expire', 600)
            try:
                status = self.cachemgr.add(query, expire)
                data.update({'status':status, 'query':query, 'expire':expire})
            except:
                data.update({'exception':traceback.format_exc(), 
                             'status':'fail'})
        else:
            data.update({'status': 'fail', 
                    'reason': 'Unsupported keys %s' % kwargs.keys() })
        return data

    @checkargs
    def replace(self, *args, **kwargs):
        """
        HTTP PUT request.
        Requests the server to replace an existing
        resource with the one enclosed in the request body.
        Replace existing query in DAS cache.
        """
        data = {'server_method':'replace'}
        if  kwargs.has_key('query'):
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            try:
                self.dascore.remove_from_cache(query)
            except:
                msg  = traceback.format_exc()
                data.update({'status':'fail', 'query':query, 'exception':msg})
                return data
            expire = getarg(kwargs, 'expire', 600)
            try:
                status = self.cachemgr.add(query, expire)
                data.update({'status':status, 'query':query, 'expire':expire})
            except:
                data.update({'status':'fail', 'query':query,
                        'exception':traceback.format_exc()})
        else:
            data.update({'status': 'fail', 
                    'reason': 'Unsupported keys %s' % kwargs.keys() })
        return data

    @checkargs
    def delete(self, *args, **kwargs):
        """
        HTTP DELETE request.
        Delete input query in DAS cache
        """
        data = {'server_method':'delete'}
        if  kwargs.has_key('query'):
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            data.update({'status':'requested', 'query':query})
            try:
                self.dascore.remove_from_cache(query)
                data.update({'status':'success'})
            except:
                msg  = traceback.format_exc()
                data.update({'status':'fail', 'exception':msg})
        else:
            data.update({'status': 'fail', 
                    'reason': 'Unsupported keys %s' % kwargs.keys() })
        return data

    @exposejson
    def rest(self, *args, **kwargs):
        """
        RESTful interface. We use args tuple as access method(s), e.g.
        args = ('method',) and kwargs to represent input parameters.
        """
        request = cherrypy.request.method
        if  request not in self.methods.keys():
            msg = "Usupported request '%s'" % requset
            return {'error': msg}
        method  = args[0]
        if  method not in self.methods[request].keys():
            msg  = "Unsupported method '%s'" % method
            return {'error': msg}
        if  request == 'POST':
            if  cherrypy.request.body:
                body = cherrypy.request.body.read()
                try:
                    kwargs = json.loads(body)
                except:
                    msg = "Unable to load body request"
                    return {'error': msg}
        return getattr(self, method)(kwargs)
コード例 #35
0
ファイル: DASCacheModel.py プロジェクト: perrozzi/DAS
class DASCacheModel(DASWebManager):
    """
    DASCacheModel represents DAS cache RESTful interface.
    It supports POST/GET/DELETE/UPDATE methods who communicate with
    DAS caching systems. The input queries are placed into DAS cache
    queue and served via FIFO mechanism. 
    """
    def __init__(self, config):
        self.config = config
        DASWebManager.__init__(self, config)
        self.version = __version__
        self.methods = {}
        self.methods['GET'] = {
            'request': {
                'args': ['idx', 'limit', 'query', 'skey', 'order'],
                'call': self.request,
                'version': __version__
            },
            'nresults': {
                'args': ['query'],
                'call': self.nresults,
                'version': __version__
            },
            'records': {
                'args': ['query', 'count', 'collection'],
                'call': self.records,
                'version': __version__
            },
            'status': {
                'args': ['query'],
                'call': self.status,
                'version': __version__
            },
        }
        self.methods['POST'] = {
            'create': {
                'args': ['query', 'expire'],
                'call': self.create,
                'version': __version__
            }
        }
        self.methods['PUT'] = {
            'replace': {
                'args': ['query', 'expire'],
                'call': self.replace,
                'version': __version__
            }
        }
        self.methods['DELETE'] = {
            'delete': {
                'args': ['query'],
                'call': self.delete,
                'version': __version__
            }
        }

        try:
            # WMCore/WebTools
            rest = RESTModel(config)
            rest.methods = self.methods  # set RESTModel methods
            self.model = self  # re-reference model to my class
            self.model.handler = rest.handler  # reference handler to RESTModel
            cdict = self.config.dictionary_()
            self.base = '/rest'
        except:
            cdict = {}
            self.base = ''

        self.dascore = DASCore()
        dbhost = self.dascore.dasconfig['mongocache_dbhost']
        dbport = self.dascore.dasconfig['mongocache_dbport']
        capped_size = self.dascore.dasconfig['mongocache_capped_size']
        self.con = Connection(dbhost, dbport)
        if 'logging' not in self.con.database_names():
            db = self.con['logging']
            options = {'capped': True, 'size': capped_size}
            db.create_collection('db', options)
            self.warning('Created logging.db, size=%s' % capped_size)
        self.col = self.con['logging']['db']
        sleep = cdict.get('sleep', 2)
        verbose = cdict.get('verbose', None)
        iconfig = {
            'sleep': sleep,
            'verbose': verbose,
            'logger': self.dascore.logger
        }
        self.cachemgr = DASCacheMgr(iconfig)
        thread.start_new_thread(self.cachemgr.worker, (worker, ))
        msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \
                % (dbhost, dbport, capped_size)
        self.dascore.logger.debug(msg)
        print(msg)

    def logdb(self, query):
        """
        Make entry in Logging DB
        """
        qhash = genkey(query)
        headers = cherrypy.request.headers
        doc = dict(qhash=qhash,
                   timestamp=time.time(),
                   headers=cherrypy.request.headers,
                   method=cherrypy.request.method,
                   path=cherrypy.request.path_info,
                   args=cherrypy.request.params,
                   ip=cherrypy.request.remote.ip,
                   hostname=cherrypy.request.remote.name,
                   port=cherrypy.request.remote.port)
        self.col.insert(doc)

    @checkargs
    def records(self, *args, **kwargs):
        """
        HTTP GET request.
        Retrieve records from provided collection.
        """
        data = {'server_method': 'request'}
        if 'query' not in kwargs:
            data['status'] = 'fail'
            data['reason'] = 'no query is provided'
            return data
        # input query in JSON format, we should decode it using json.
        query = json.loads(kwargs.get('query'))
        coll = kwargs.get('collection', 'merge')
        idx = getarg(kwargs, 'idx', 0)
        limit = getarg(kwargs, 'limit', 10)  # getarg perfrom type convertion
        count = kwargs.get('count', 0)
        data.update({
            'status': 'requested',
            'query': kwargs['query'],
            'collection': coll,
            'count': count
        })
        if '_id' in query['spec']:
            recid = query['spec']['_id']
            ids = []
            if type(recid) is bytes:
                ids = [ObjectId(recid)]
            elif type(recid) is list:
                ids = [ObjectId(r) for r in recid]
            spec = {'spec': {'_id': {'$in': ids}}}
        else:  # look-up all records
            spec = {}
        self.logdb(query)
        try:
            gen = self.dascore.rawcache.get_from_cache\
                (spec, idx=idx, limit=limit, collection=coll, adjust=False)
            data['status'] = 'success'
            data['data'] = [r for r in gen]
        except:
            self.debug(traceback.format_exc())
            data['status'] = 'fail'
            data['reason'] = sys.exc_info()[0]
        return data

    @checkargs
    def status(self, *args, **kwargs):
        """
        HTTP GET request. Check status of the input query in DAS.
        """
        data = {'server_method': 'status'}
        if 'query' in kwargs:
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            status = self.dascore.get_status(query)
            if not status:
                status = 'no data'
            data.update({'status': status})
        else:
            data.update({
                'status': 'fail',
                'reason': 'Unsupported keys %s' % kwargs.keys()
            })
        return data

    @checkargs
    def nresults(self, *args, **kwargs):
        """
        HTTP GET request. Ask DAS for total number of records
        for provided query.
        """
        data = {'server_method': 'nresults'}
        if 'query' in kwargs:
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            data.update({'status': 'success'})
            res = self.dascore.in_raw_cache_nresults(query)
            data.update({'status': 'success', 'nresults': res})
        else:
            data.update({
                'status': 'fail',
                'reason': 'Unsupported keys %s' % kwargs.keys()
            })
        return data

    @checkargs
    def request(self, *args, **kwargs):
        """
        HTTP GET request.
        Retrieve results from DAS cache.
        """
        data = {'server_method': 'request'}
        if 'query' in kwargs:
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            idx = getarg(kwargs, 'idx', 0)
            limit = getarg(kwargs, 'limit', 0)
            skey = getarg(kwargs, 'skey', '')
            order = getarg(kwargs, 'order', 'asc')
            data.update({
                'status': 'requested',
                'idx': idx,
                'limit': limit,
                'query': query,
                'skey': skey,
                'order': order
            })
            #            if  self.dascore.in_raw_cache(query):
            res = self.dascore.result(query, idx, limit)
            if type(res) is types.GeneratorType:
                result = []
                for item in res:
                    if item not in result:
                        result.append(item)
                data['data'] = result
                tot = len(data['data'])
            else:
                data['data'] = res
                tot = 1
            data['status'] = 'success'
            data['nresults'] = tot
#            else:
#                data['status'] = 'not found'
        else:
            data.update({
                'status': 'fail',
                'reason': 'Unsupported keys %s' % kwargs.keys()
            })
        return data

    @checkargs
    def create(self, *args, **kwargs):
        """
        HTTP POST request. 
        Requests the server to create a new resource
        using the data enclosed in the request body.
        Creates new entry in DAS cache for provided query.
        """
        data = {'server_method': 'create'}
        if 'query' in kwargs:
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            expire = getarg(kwargs, 'expire', 600)
            try:
                status = self.cachemgr.add(query, expire)
                data.update({
                    'status': status,
                    'query': query,
                    'expire': expire
                })
            except:
                data.update({
                    'exception': traceback.format_exc(),
                    'status': 'fail'
                })
        else:
            data.update({
                'status': 'fail',
                'reason': 'Unsupported keys %s' % kwargs.keys()
            })
        return data

    @checkargs
    def replace(self, *args, **kwargs):
        """
        HTTP PUT request.
        Requests the server to replace an existing
        resource with the one enclosed in the request body.
        Replace existing query in DAS cache.
        """
        data = {'server_method': 'replace'}
        if 'query' in kwargs:
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            try:
                self.dascore.remove_from_cache(query)
            except:
                msg = traceback.format_exc()
                data.update({
                    'status': 'fail',
                    'query': query,
                    'exception': msg
                })
                return data
            expire = getarg(kwargs, 'expire', 600)
            try:
                status = self.cachemgr.add(query, expire)
                data.update({
                    'status': status,
                    'query': query,
                    'expire': expire
                })
            except:
                data.update({
                    'status': 'fail',
                    'query': query,
                    'exception': traceback.format_exc()
                })
        else:
            data.update({
                'status': 'fail',
                'reason': 'Unsupported keys %s' % kwargs.keys()
            })
        return data

    @checkargs
    def delete(self, *args, **kwargs):
        """
        HTTP DELETE request.
        Delete input query in DAS cache
        """
        data = {'server_method': 'delete'}
        if 'query' in kwargs:
            query = kwargs['query']
            self.logdb(query)
            query = self.dascore.mongoparser.parse(query)
            data.update({'status': 'requested', 'query': query})
            try:
                self.dascore.remove_from_cache(query)
                data.update({'status': 'success'})
            except:
                msg = traceback.format_exc()
                data.update({'status': 'fail', 'exception': msg})
        else:
            data.update({
                'status': 'fail',
                'reason': 'Unsupported keys %s' % kwargs.keys()
            })
        return data

    @exposejson
    def rest(self, *args, **kwargs):
        """
        RESTful interface. We use args tuple as access method(s), e.g.
        args = ('method',) and kwargs to represent input parameters.
        """
        request = cherrypy.request.method
        if request not in self.methods.keys():
            msg = "Usupported request '%s'" % requset
            return {'error': msg}
        method = args[0]
        if method not in self.methods[request].keys():
            msg = "Unsupported method '%s'" % method
            return {'error': msg}
        if request == 'POST':
            if cherrypy.request.body:
                body = cherrypy.request.body.read()
                try:
                    kwargs = json.loads(body)
                except:
                    msg = "Unable to load body request"
                    return {'error': msg}
        return getattr(self, method)(kwargs)
コード例 #36
0
ファイル: DASSearch.py プロジェクト: dmwm/DAS
class DASSearch(DASWebManager):
    """
    DAS web interface.
    """
    def __init__(self, config={}):
        DASWebManager.__init__(self, config)
        try:
            # try what is supplied from WebTools framework
            cdict         = self.config.dictionary_()
            self.cachesrv = cdict.get('cache_server_url', 
                                'http://localhost:8211')
            self.base     = '/dascontrollers'
        except:
            # stand-alone version
            self.cachesrv = config.get('cache_server_url',
                                'http://localhost:8211')
            self.base     = '/das'
        self.dasmgr     = DASCore()
        self.daskeys    = self.dasmgr.das_keys()
        self.daskeys.sort()
        self.dasmapping = self.dasmgr.mapping
        self.daslogger  = self.dasmgr.logger
        self.pageviews  = ['xml', 'list', 'json', 'yuijson'] 
        msg = "DASSearch::init is started with base=%s" % self.base
        self.daslogger.debug(msg)
        print(msg)

    def top(self):
        """
        Define masthead for all DAS web pages
        """
        return self.templatepage('das_top', base=self.base)

    def bottom(self, response_div=True):
        """
        Define footer for all DAS web pages
        """
        return self.templatepage('das_bottom', div=response_div)

    def page(self, content, ctime=None, response_div=True):
        """
        Define footer for all DAS web pages
        """
        page  = self.top()
        page += content
        timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
        services = self.dasmgr.keys()
        srv = ""
        for key in services.keys():
            srv += "%s, " % key
        srv = srv[:-2] # remove last comma
        page += self.templatepage('das_bottom', ctime=ctime, services=srv,
                                  timestamp=timestamp, div=response_div)
        return page

    @expose
    def faq(self, *args, **kwargs):
        """
        represent DAS FAQ.
        """
        page = self.templatepage('das_faq', 
                operators=', '.join(das_operators()), 
                aggregators=', '.join(das_aggregators()))
        return self.page(page, response_div=False)

    @expose
    def help(self, *args, **kwargs):
        """
        represent DAS help
        """
        page = self.templatepage('das_help')
        return self.page(page, response_div=False)

    @expose
    def cli(self, *args, **kwargs):
        """
        Serve DAS CLI file download.
        """
        clifile = os.path.join(os.environ['DAS_ROOT'], 
                'src/python/DAS/tools/das_cache_client.py')
        return serve_file(clifile, content_type='text/plain')

    @expose
    def services(self, *args, **kwargs):
        """
        represent DAS services
        """
        dasdict = {}
        daskeys = []
        for system, keys in self.dasmgr.mapping.daskeys().items():
            tmpdict = {}
            for key in keys:
                tmpdict[key] = self.dasmgr.mapping.lookup_keys(system, key) 
                if  key not in daskeys:
                    daskeys.append(key)
            dasdict[system] = dict(keys=dict(tmpdict), 
                apis=self.dasmgr.mapping.list_apis(system))
        mapreduce = [r for r in self.dasmgr.rawcache.get_map_reduce()]
        page = self.templatepage('das_services', dasdict=dasdict, 
                        daskeys=daskeys, mapreduce=mapreduce)
        return self.page(page, response_div=False)

    @expose
    def api(self, name, **kwargs):
        """
        Return DAS mapping record about provided API.
        """
        record = self.dasmgr.mapping.api_info(name)
        show   = kwargs.get('show', 'json')
        page   = "<b>DAS mapping record</b>"
        if  show == 'json':
            jsoncode = {'jsoncode': json2html(record, "")}
            page += self.templatepage('das_json', **jsoncode)
        elif show == 'code':
            code  = pformat(record, indent=1, width=100)
            page += self.templatepage('das_code', code=code)
        else:
            code  = yaml.dump(record, width=100, indent=4, 
                        default_flow_style=False)
            page += self.templatepage('das_code', code=code)
        return self.page(page, response_div=False)

    @expose
    def default(self, *args, **kwargs):
        """
        Default method.
        """
        return self.index(args, kwargs)

    def check_input(self, uinput):
        """
        Check provided input for valid DAS keys.
        """
        error = self.templatepage('das_ambiguous',
                    input=uinput, entities=', '.join(self.daskeys))
        if  not uinput:
            return error
        # check provided input. If at least one word is not part of das_keys
        # return ambiguous template.
        mongo_query = self.dasmgr.mongoparser.parse(uinput)
        fields = mongo_query.get('fields', [])
        if  not fields:
            fields = []
        spec   = mongo_query.get('spec', {})
        if  not fields+spec.keys():
            return error
        for word in fields+spec.keys():
            found = 0
            for key in das_keys:
                if  word.find(key) != -1:
                    found = 1
            if  not found:
                return error
        return

    @expose
    def index(self, *args, **kwargs):
        """
        represents DAS web interface. 
        It uses das_searchform template for
        input form and yui_table for output Table widget.
        """
        try:
            if  not args and not kwargs:
#                msg  = self.templatepage('das_help', 
#                        services    = ', '.join(self.dasmgr.keys()),
#                        keywords    = ', '.join(self.dasmgr.das_keys()),
#                        operators   = ', '.join(das_operators()),
#                        aggregators = ', '.join(das_aggregators()),
#                        filters     = ', '.join(das_filters()) 
#                        )
                page = self.form()
                return self.page(page)
            uinput  = getarg(kwargs, 'input', '')
            results = self.check_input(uinput)
            if  results:
                return self.page(self.form() + results)
            view = getarg(kwargs, 'view', 'list')
            if  args:
                return getattr(self, args[0][0])(args[1])
            if  view not in self.pageviews:
                raise Exception("Page view '%s' is not supported" % view)
            return getattr(self, '%sview' % view)(kwargs)
        except:
            return self.error(self.gen_error_msg(kwargs))

    @expose
    def form(self, uinput=None, msg=None):
        """
        provide input DAS search form
        """
        page = self.templatepage('das_searchform', input=uinput, msg=msg, 
                                        base=self.base)
        return page

    def gen_error_msg(self, kwargs):
        """
        Generate standard error message.
        """
        self.daslogger.error(traceback.format_exc())
        error  = "My request to DAS is failed\n\n"
        error += "Input parameters:\n"
        for key, val in kwargs.items():
            error += '%s: %s\n' % (key, val)
        error += "Exception type: %s\nException value: %s\nTime: %s" \
                    % (sys.exc_info()[0], sys.exc_info()[1], web_time())
        error = error.replace("<", "").replace(">", "")
        return error

    @expose
    def error(self, msg):
        """
        Show error message.
        """
        error = self.templatepage('das_error', msg=msg)
        page  = self.page(self.form() + error)
        return page

    @exposedasjson
    def wrap2dasjson(self, data):
        """DAS JSON wrapper"""
        return data

    @exposedasplist
    def wrap2dasxml(self, data):
        """DAS XML wrapper"""
        return data

    @expose
    def records(self, *args, **kwargs):
        """
        Retieve all records id's.
        """
        try:
            recordid = None
            format = ''
            if  args:
                recordid = args[0]
                spec = {'_id':recordid}
                fields = None
                query = dict(fields=fields, spec=spec)
                if  len(args) == 2:
                    format = args[1]
            elif  kwargs and '_id' in kwargs:
                spec = {'_id': kwargs['_id']}
                fields = None
                query = dict(fields=fields, spec=spec)
            else: # return all ids
                query = dict(fields=None, spec={})

            nresults = self.nresults(query)
            time0    = time.time()
            url      = self.cachesrv
            idx      = getarg(kwargs, 'idx', 0)
            limit    = getarg(kwargs, 'limit', 10)
            show     = getarg(kwargs, 'show', 'json')
            coll     = getarg(kwargs, 'collection', 'merge')
#            params   = {'query':json.dumps(query), 'idx':idx, 'limit':limit}
#            path     = '/rest/request'
            params   = {'query':json.dumps(query), 'idx':idx, 'limit':limit, 
                        'collection':coll}
            path     = '/rest/records'
            headers  = {"Accept": "application/json"}
            try:
                data = urllib2_request('GET', url+path, params, headers=headers)
                result = json.loads(data)
            except:
                self.daslogger.error(traceback.format_exc())
                result = {'status':'fail', 'reason':traceback.format_exc()}
            res = ""
            if  result['status'] == 'success':
                if  recordid: # we got id
                    for row in result['data']:
                        if  show == 'json':
                            jsoncode = {'jsoncode': json2html(row, "")}
                            res += self.templatepage('das_json', **jsoncode)
                        elif show == 'code':
                            code  = pformat(row, indent=1, width=100)
                            res += self.templatepage('das_code', code=code)
                        else:
                            code = yaml.dump(row, width=100, indent=4, 
                                        default_flow_style=False)
                            res += self.templatepage('das_code', code=code)
                else:
                    for row in result['data']:
                        rid  = row['_id']
                        del row['_id']
                        record = dict(id=rid, daskeys=', '.join(row))
                        res += self.templatepage('das_record', **record)
            else:
                res = result['status']
                if  'reason' in res:
                    return self.error(res['reason'])
                else:
                    msg = 'Uknown error, kwargs=' % kwargs
                    return self.error(msg)
            if  recordid:
                if  format:
                    if  format == 'xml':
                        return self.wrap2dasxml(result['data'])
                    elif  format == 'json':
                        return self.wrap2dasjson(result['data'])
                    else:
                        return self.error('Unsupported data format %s' % format)
                page  = res
            else:
                url   = '/das/records?'
                idict = dict(nrows=nresults, idx=idx, 
                            limit=limit, results=res, url=url)
                page  = self.templatepage('das_pagination', **idict)

            form    = self.form(uinput="")
            ctime   = (time.time()-time0)
            page = self.page(form + page, ctime=ctime)
            return page
        except:
            return self.error(self.gen_error_msg(kwargs))

    def nresults(self, kwargs):
        """
        invoke DAS search call, parse results and return them to
        web methods
        """
        url     = self.cachesrv
        uinput  = getarg(kwargs, 'input', '')
        params  = {'query':uinput}
        path    = '/rest/nresults'
        headers = {"Accept": "application/json"}
        try:
            data = urllib2_request('GET', url+path, params, headers=headers)
            record = json.loads(data)
        except:
            self.daslogger.error(traceback.format_exc())
            record = {'status':'fail', 'reason':traceback.format_exc()}
        if  record['status'] == 'success':
            return record['nresults']
        else:
            msg = "nresults returns status: %s" % str(record)
            self.daslogger.info(msg)
        return -1

    def send_request(self, method, kwargs):
        "Send POST request to server with provided parameters"
        url     = self.cachesrv
        uinput  = getarg(kwargs, 'input', '')
        format  = getarg(kwargs, 'format', '')
        idx     = getarg(kwargs, 'idx', 0)
        limit   = getarg(kwargs, 'limit', 10)
        skey    = getarg(kwargs, 'sort', '')
        sdir    = getarg(kwargs, 'dir', 'asc')
        params  = {'query':uinput, 'idx':idx, 'limit':limit, 
                  'skey':skey, 'order':sdir}
        if  method == 'POST':
            path    = '/rest/create'
        elif  method == 'GET':
            path    = '/rest/request'
        else:
            raise Exception('Unsupported method %s' % method)
        headers = {'Accept': 'application/json', 
                   'Content-type': 'application/json'} 
        try:
            data = urllib2_request(method, url+path, params, headers=headers)
            result = json.loads(data)
        except:
            self.daslogger.error(traceback.format_exc())
            result = {'status':'fail', 'reason':traceback.format_exc()}
        return result

    def result(self, kwargs):
        """
        invoke DAS search call, parse results and return them to
        web methods
        """
        result  = self.send_request('GET', kwargs)
        res = []
        if  type(result) is bytes:
            data = json.loads(result)
        else:
            data = result
        if  data['status'] == 'success':
            res    = data['data']
        return res
        
    @exposedasplist
    def xmlview(self, kwargs):
        """
        provide DAS XML
        """
        rows = self.result(kwargs)
        return rows

    @exposedasjson
    def jsonview(self, kwargs):
        """
        provide DAS JSON
        """
        rows = self.result(kwargs)
        return rows

    def convert2ui(self, idict):
        """
        Convert input row (dict) into UI presentation
        """
        for key in idict.keys():
            if  key == 'das' or key == '_id' or key == 'das_id':
                continue
            for item in self.dasmapping.presentation(key):
                try:
                    daskey = item['das']
                    uikey  = item['ui']
                    for value in access(idict, daskey):
                        yield uikey, value
                except:
                    yield key, idict[key]

    @expose
    def listview(self, kwargs):
        """
        provide DAS list view
        """
        # force to load the page all the time
        cherrypy.response.headers['Cache-Control'] = 'no-cache'
        cherrypy.response.headers['Pragma'] = 'no-cache'

        time0   = time.time()
        ajaxreq = getarg(kwargs, 'ajax', 0)
        uinput  = getarg(kwargs, 'input', '')
        limit   = getarg(kwargs, 'limit', 10)
        show    = getarg(kwargs, 'show', 'json')
        form    = self.form(uinput=uinput)
        # self.status sends request to Cache Server
        # Cache Server uses das_core to retrieve status
        status  = self.status(input=uinput, ajax=0)
        if  status == 'no data':
            # no data in raw cache, send POST request
            self.send_request('POST', kwargs)
            ctime = (time.time()-time0)
#            page    = self.templatepage('not_ready')
            page  = self.status(input=uinput)
            page  = self.page(form + page, ctime=ctime)
            return page
        elif status == 'fail':
            kwargs['reason'] = 'Unable to get status from data-service'
            return self.error(self.gen_error_msg(kwargs))

        total   = self.nresults(kwargs)
        rows    = self.result(kwargs)
        nrows   = len(rows)
        page    = ""
        ndict   = {'nrows':total, 'limit':limit}
        page    = self.templatepage('das_nrecords', **ndict)
#        for nrecord in range(0, len(rows)):
#            row = rows[nrecord]
#            style = "white"
#            if  nrecord % 2:
#                style = "white"
#            else:
#                style = "gray" 
        style = "white"
        for row in rows:
            id    = row['_id']
            page += '<div class="%s"><hr class="line" />' % style
            gen   = self.convert2ui(row)
            for uikey, value in [k for k, g in groupby(gen)]:
                page += "<b>%s</b>: %s<br />" % (uikey, value)
            pad   = ""
            if  show == 'json':
                jsoncode = {'jsoncode': json2html(row, pad)}
                jsonhtml = self.templatepage('das_json', **jsoncode)
                jsondict = dict(data=jsonhtml, id=id, rec_id=id)
                page += self.templatepage('das_row', **jsondict)
            elif show == 'code':
                code  = pformat(row, indent=1, width=100)
                data  = self.templatepage('das_code', code=code)
                datadict = {'data':data, 'id':id, rec_id:id}
                page += self.templatepage('das_row', **datadict)
            else:
                code  = yaml.dump(row, width=100, indent=4, 
                                default_flow_style=False)
                data  = self.templatepage('das_code', code=code)
                datadict = {'data':data, 'id':id, rec_id:id}
                page += self.templatepage('das_row', **datadict)
            page += '</div>'
        ctime   = (time.time()-time0)
        return self.page(form + page, ctime=ctime)

    @exposetext
    def plainview(self, kwargs):
        """
        provide DAS plain view
        """
        rows, total, form = self.result(kwargs)
        page = ""
        for item in rows:
            item  = str(item).replace('[','').replace(']','')
            page += "%s\n" % item.replace("'","")
        return page

    @exposejson
    def yuijson(self, **kwargs):
        """
        Provide JSON in YUI compatible format to be used in DynamicData table
        widget, see
        http://developer.yahoo.com/yui/examples/datatable/dt_dynamicdata.html
        """
        rows = self.result(kwargs)
        rowlist = []
        id = 0
        for row in rows:
            das = row['das']
            if  type(das) is dict:
                das = [das]
            resdict = {}
            for jdx in range(0, len(das)):
                item = das[jdx]
                resdict[id] = id
                for idx in range(0, len(item['system'])):
                    api    = item['api'][idx]
                    system = item['system'][idx]
                    key    = item['selection_keys'][idx]
                    data   = row[key]
                    if  type(data) is list:
                        data = data[jdx]
                    if  type(data) is list:
                        data = data[idx]
                    # I need to extract from DAS object the values for UI keys
                    for item in self.dasmapping.presentation(key):
                        daskey = item['das']
                        uiname = item['ui']
                        if  uiname not in resdict:
                            resdict[uiname] = ""
                        # look at key attributes, which may be compound as well
                        # e.g. block.replica.se
                        if  type(data) is dict:
                            result = dict(data)
                        elif type(data) is list:
                            result = list(data)
                        else:
                            result = data
                        res = ""
                        try:
                            for elem in daskey.split('.')[1:]:
                                if  elem in result:
                                    res  = result[elem]
                                    resdict[uiname] = res
                        except:
                            pass
#                    pad = ""
#                    jsoncode = {'jsoncode': json2html(data, pad)}
#                    jsonhtml = self.templatepage('das_json', **jsoncode)
#                    jsondict = {'id':id, 'system':system, 'api':api, key:jsonhtml}
            if  resdict not in rowlist:
                rowlist.append(resdict)
            id += 1
        idx      = getarg(kwargs, 'idx', 0)
        limit    = getarg(kwargs, 'limit', 10)
        total    = len(rowlist) 
        jsondict = {'recordsReturned': len(rowlist),
                   'totalRecords': total, 'startIndex':idx,
                   'sort':'true', 'dir':'asc',
                   'pageSize': limit,
                   'records': rowlist}
        return jsondict

    @expose
    def tableview(self, kwargs):
        """
        provide DAS table view
        """
        kwargs['format'] = 'html'
        uinput  = getarg(kwargs, 'input', '')
        ajaxreq = getarg(kwargs, 'ajax', 0)
        form    = self.form(uinput=uinput)
        time0   = time.time()
        total   = self.nresults(kwargs)
        if  not total:
            ctime   = (time.time()-time0)
            form    = self.form(uinput)
            page    = self.templatepage('not_ready')
            page    = self.page(form + page, ctime=ctime)
            return page

        # find out which selection keys were used
        selkeys = uinput.replace('find ', '').split(' where ')[0].split(',')
        uikeys  = []
        for key in selkeys:
            res = self.dasmapping.presentation(key)
            uikeys += [item['ui'] for item in res]
        titles = ["id"] + uikeys
        coldefs = ""
        for title in titles:
            coldefs += '{key:"%s",label:"%s",sortable:true,resizeable:true},' \
                        % (title, title)
        coldefs = "[%s]" % coldefs[:-1] # remove last comma
        coldefs = coldefs.replace("},{","},\n{")
        limit   = getarg(kwargs, 'limit', 10)
        names   = {'titlelist':titles,
                   'coldefs':coldefs, 'rowsperpage':limit,
                   'total':total, 'tag':'mytag', 'ajax':ajaxreq,
                   'input':urllib.urlencode(dict(input=uinput))}
        page    = self.templatepage('das_table', **names)
        ctime   = (time.time()-time0)
        page    = self.page(form + page, ctime=ctime)
        return page

    @expose
    def status(self, **kwargs):
        """
        Place request to obtain status about given query
        """
        img  = '<img src="%s/images/loading.gif" alt="loading"/>' % self.base
        req  = """
        <script type="application/javascript">
        setTimeout('ajaxStatus()',3000)
        </script>"""

        def set_header():
            "Set HTTP header parameters"
            tstamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
            cherrypy.response.headers['Expire'] = tstamp
            cherrypy.response.headers['Cache-control'] = 'no-cache'

        uinput  = kwargs.get('input', '')
        uinput  = urllib.unquote_plus(uinput)
        ajax    = kwargs.get('ajax', 1)
        view    = kwargs.get('view', 'list')
        params  = {'query':uinput}
        path    = '/rest/status'
        url     = self.cachesrv
        headers = {'Accept': 'application/json'}
        try:
            res  = urllib2_request('GET', url+path, params, headers=headers)
            data = json.loads(res)
        except:
            self.daslogger.error(traceback.format_exc())
            data = {'status':'fail'}
        if  ajax:
            cherrypy.response.headers['Content-Type'] = 'text/xml'
            if  data['status'] == 'ok':
                page  = '<script type="application/javascript">reload()</script>'
            elif data['status'] == 'fail':
                page  = '<script type="application/javascript">reload()</script>'
                page += self.error(self.gen_error_msg(kwargs))
            else:
                page  = img + ' ' + str(data['status']) + ', please wait...'
                img_stop = ''
                page += ', <a href="/das/">stop</a> request' 
                page += req
                set_header()
            page = ajax_response(page)
        else:
            try:
                page = data['status']
            except:
                page = traceback.format_exc()
        return page
コード例 #37
0
ファイル: DASCacheModel.py プロジェクト: perrozzi/DAS
    def __init__(self, config):
        self.config = config
        DASWebManager.__init__(self, config)
        self.version = __version__
        self.methods = {}
        self.methods['GET'] = {
            'request': {
                'args': ['idx', 'limit', 'query', 'skey', 'order'],
                'call': self.request,
                'version': __version__
            },
            'nresults': {
                'args': ['query'],
                'call': self.nresults,
                'version': __version__
            },
            'records': {
                'args': ['query', 'count', 'collection'],
                'call': self.records,
                'version': __version__
            },
            'status': {
                'args': ['query'],
                'call': self.status,
                'version': __version__
            },
        }
        self.methods['POST'] = {
            'create': {
                'args': ['query', 'expire'],
                'call': self.create,
                'version': __version__
            }
        }
        self.methods['PUT'] = {
            'replace': {
                'args': ['query', 'expire'],
                'call': self.replace,
                'version': __version__
            }
        }
        self.methods['DELETE'] = {
            'delete': {
                'args': ['query'],
                'call': self.delete,
                'version': __version__
            }
        }

        try:
            # WMCore/WebTools
            rest = RESTModel(config)
            rest.methods = self.methods  # set RESTModel methods
            self.model = self  # re-reference model to my class
            self.model.handler = rest.handler  # reference handler to RESTModel
            cdict = self.config.dictionary_()
            self.base = '/rest'
        except:
            cdict = {}
            self.base = ''

        self.dascore = DASCore()
        dbhost = self.dascore.dasconfig['mongocache_dbhost']
        dbport = self.dascore.dasconfig['mongocache_dbport']
        capped_size = self.dascore.dasconfig['mongocache_capped_size']
        self.con = Connection(dbhost, dbport)
        if 'logging' not in self.con.database_names():
            db = self.con['logging']
            options = {'capped': True, 'size': capped_size}
            db.create_collection('db', options)
            self.warning('Created logging.db, size=%s' % capped_size)
        self.col = self.con['logging']['db']
        sleep = cdict.get('sleep', 2)
        verbose = cdict.get('verbose', None)
        iconfig = {
            'sleep': sleep,
            'verbose': verbose,
            'logger': self.dascore.logger
        }
        self.cachemgr = DASCacheMgr(iconfig)
        thread.start_new_thread(self.cachemgr.worker, (worker, ))
        msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \
                % (dbhost, dbport, capped_size)
        self.dascore.logger.debug(msg)
        print(msg)