コード例 #1
0
ファイル: das_core.py プロジェクト: dmwm/DAS
 def update_das_query(dasquery, status, reason=None):
     "Update DAS query record with given status and reason"
     self.rawcache.update_query_record(dasquery, status, reason=reason)
     self.rawcache.add_to_record(dasquery, {"das.timer": get_das_timer()}, system="das")
     # make sure that das record is updated, we use 7 iteration which
     # sum up into 1 minute to cover default syncdelay value of mongo
     # server (in a future it would be better to find programatically
     # this syncdelay value, but it seems pymongo driver does not
     # provide any API for it.
     for idx in range(0, 7):
         spec = {"qhash": dasquery.qhash, "das.system": ["das"]}
         res = self.rawcache.col.find_one(spec)
         if res:
             dbstatus = res.get("das", {}).get("status", None)
             if dbstatus == status:
                 break
             msg = "qhash %s, das.status=%s, status=%s, wait for update" % (dasquery.qhash, dbstatus, status)
             print(dastimestamp("DAS WARNING"), msg)
         self.rawcache.update_query_record(dasquery, status, reason=reason)
         time.sleep(idx * idx)
コード例 #2
0
ファイル: das_cli.py プロジェクト: ktf/DAS
def main():
    "Main function"
    optmgr = DASOptionParser()
    opts, _ = optmgr.getOpt()

    t0 = time.time()
    query = opts.query
    if  'instance' not in query:
        query += ' instance=cms_dbs_prod_global'
    debug = opts.verbose
    dascore = DASCore(debug=debug, nores=opts.noresults)
    if  opts.hash:
        dasquery = DASQuery(query)
        mongo_query = dasquery.mongo_query
        service_map = dasquery.service_apis_map()
        str_query   = dasquery.storage_query
        print "---------------"
        print "DAS-QL query  :", query
        print "DAS query     :", dasquery
        print "Mongo query   :", mongo_query
        print "Storage query :", str_query
        print "Services      :\n"
        for srv, val in service_map.items():
            print "%s : %s\n" % (srv, ', '.join(val))
        sys.exit(0)
    sdict = dascore.keys()
    if  opts.services:
        msg = "DAS services:"
        print msg
        print "-"*len(msg)
        keys = sdict.keys()
        keys.sort()
        for key in keys:
            print key
    elif  opts.service:
        msg = "DAS service %s:" % opts.service
        print msg
        print "-"*len(msg)
        keys = sdict[opts.service]
        keys.sort()
        for key in keys:
            print key
    elif query:

        idx    = opts.idx
        limit  = opts.limit
        output = opts.nooutput
        plain  = opts.plain

        if  opts.profile:
            import cProfile # python profiler
            import pstats   # profiler statistics
            cmd  = 'run(dascore,query,idx,limit,output,plain)'
            cProfile.runctx(cmd, globals(), locals(), 'profile.dat')
            info = pstats.Stats('profile.dat')
            info.sort_stats('cumulative')
            info.print_stats()
        else:
            run(dascore, query, idx, limit, output, plain)
    elif opts.dasconfig:
        print pformat(dascore.dasconfig)
    else:
        print
        print "DAS CLI interface, no actions found,"
        print "please use --help for more options."
    timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
    timer = get_das_timer()
    print "\nDAS execution time:\n"
    if  debug:
        timelist = []
        for _, timerdict in timer.items():
            counter = timerdict['counter']
            tag = timerdict['tag']
            exetime = timerdict['time']
            timelist.append((counter, tag, exetime))
        timelist.sort()
        for _, tag, exetime in timelist:
            print "%s %s sec" % (tag, round(exetime, 2))
    print "Total %s sec, %s" % (round(time.time()-t0, 2), timestamp)
コード例 #3
0
ファイル: das_core.py プロジェクト: perrozzi/DAS
 def update_das_query(dasquery, status, reason=None):
     "Update DAS query record with given status and reason"
     self.rawcache.update_query_record(dasquery, status, reason=reason)
     self.rawcache.add_to_record(\
             dasquery, {'das.timer': get_das_timer()}, system='das')
コード例 #4
0
ファイル: das_cli.py プロジェクト: perrozzi/DAS
def main():
    "Main function"
    optmgr  = DASOptionParser()
    opts = optmgr.parser.parse_args()

    t0 = time.time()
    query = opts.query
    if  'instance' not in query:
        query = ' instance=prod/global ' + query
    debug = opts.verbose
    dascore = DASCore(debug=debug, nores=opts.noresults)
    if  opts.hash:
        dasquery = DASQuery(query)
        mongo_query = dasquery.mongo_query
        service_map = dasquery.service_apis_map()
        str_query   = dasquery.storage_query
        print("---------------")
        print("DAS-QL query  :", query)
        print("DAS query     :", dasquery)
        print("Mongo query   :", mongo_query)
        print("Storage query :", str_query)
        print("Services      :\n")
        for srv, val in service_map.items():
            print("%s : %s\n" % (srv, ', '.join(val)))
        sys.exit(0)
    sdict = dascore.keys()
    if  opts.services:
        msg = "DAS services:"
        print(msg)
        print("-"*len(msg))
        keys = list(sdict.keys())
        keys.sort()
        for key in keys:
            print(key)
    elif  opts.service:
        msg = "DAS service %s:" % opts.service
        print(msg)
        print("-"*len(msg))
        keys = sdict[opts.service]
        keys.sort()
        for key in keys:
            print(key)
    elif opts.jsfile:
        kws_js(dascore, query, opts.idx, opts.limit, opts.jsfile, debug)
        sys.exit(0)
    elif opts.kfile:
        keylearning_js(dascore, query, opts.kfile, debug)
        sys.exit(0)
    elif query:

        idx    = opts.idx
        limit  = opts.limit
        output = opts.nooutput
        plain  = opts.plain
        qcache = opts.qcache

        if  opts.profile:
            import cProfile # python profiler
            import pstats   # profiler statistics
            cmd  = 'run(dascore,query,idx,limit,output,plain)'
            cProfile.runctx(cmd, globals(), locals(), 'profile.dat')
            info = pstats.Stats('profile.dat')
            info.sort_stats('cumulative')
            info.print_stats()
        else:
            run(dascore, query, idx, limit, output, plain)
    elif opts.dasconfig:
        print(pformat(dascore.dasconfig))
    else:
        print()
        print("DAS CLI interface, no actions found,")
        print("please use --help for more options.")
    timestamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
    timer = get_das_timer()
    print("\nDAS execution time:\n")
    if  debug:
        timelist = []
        for _, timerdict in timer.items():
            counter = timerdict['counter']
            tag = timerdict['tag']
            exetime = timerdict['time']
            timelist.append((counter, tag, exetime))
        timelist.sort()
        for _, tag, exetime in timelist:
            print("%s %s sec" % (tag, round(exetime, 2)))
    print("Total %s sec, %s" % (round(time.time()-t0, 2), timestamp))
コード例 #5
0
ファイル: das_core.py プロジェクト: zdenekmaxa/DAS
    def call(self, query, add_to_analytics=True, **kwds):
        """
        Top level DAS api which execute a given query using underlying
        data-services. It follows the following steps:

            - parse input query
            - identify data-sercices based on selection keys
              and where clause conditions
            - construct DAS workflow and execute data-service 
              API calls. At this step individual 
              data-services store results into DAS cache.

        Return status 0/1 depending on success of the calls, can be
        used by workers on cache server.

        kwds is provided for compatibility with web layer, e.g. it
        may invoke this method with additional pid parameter.
        """
        self.logger.info('input query=%s' % query)
        das_timer('DASCore::call', self.verbose)
        services = []
        if  isinstance(query, object) and hasattr(query, '__class__')\
            and query.__class__.__name__ == 'DASQuery':
            dasquery = query
        else:
            dasquery = DASQuery(query, mongoparser=self.mongoparser)
        if  add_to_analytics:
            dasquery.add_to_analytics()
        query = dasquery.mongo_query
        if  dasquery.mongo_query.has_key('system'):
            system = query['system']
            if  isinstance(system, str) or isinstance(system, unicode):
                services = [system]
            elif isinstance(system, list):
                services = system
            else:
                msg = 'Unsupported system=%s type=%s in DAS query' \
                        % (system, type(system))
                raise Exception(msg)
        spec   = query.get('spec')
        fields = query.get('fields')
        if  fields == ['records']:
            msg = 'look-up all records in cache'
            self.logger.info(msg)
            return 'in cache'
        if  spec == dict(records='*'):
            self.logger.info("look-up everything in cache")
            return 'in cache'
        for record in self.rawcache.find_specs(dasquery):
            status = record['das']['status']
            msg = 'found query %s in cache, status=%s\n' \
                        % (record['query'], status)
            self.logger.info(msg)
            return status
        similar_dasquery = self.rawcache.similar_queries(dasquery)
        if  similar_dasquery:
            for record in self.rawcache.find_specs(similar_dasquery):
                if  record:
                    try:
                        status = record['das']['status']
                    except:
                        status = 'N/A'
                        msg = 'Fail to look-up das.status, record=%s' % record
                        self.logger.info(msg)
                msg  = 'found SIMILAR query in cache,'
                msg += 'query=%s, status=%s\n' % (record['query'], status)
                self.logger.info(msg)
                return status

        self.logger.info(dasquery)
        params = dasquery.params()
        if  not services:
            services = params['services']
        self.logger.info('services = %s' % services)
        das_timer('das_record', self.verbose)
        # initial expire tstamp 1 day (long enough to be overwriten by data-srv)
        expire = expire_timestamp(time.time()+1*24*60*60)
        header = dasheader("das", dasquery, expire)
        header['lookup_keys'] = []
        self.rawcache.insert_query_record(dasquery, header)
        das_timer('das_record', self.verbose)
        try:
            if  self.multitask:
                jobs = []
                for srv in services:
                    jobs.append(self.taskmgr.spawn(self.worker, srv, dasquery))
                self.taskmgr.joinall(jobs)
            else:
                for srv in services:
                    self.worker(srv, dasquery)
        except Exception as exc:
            print_exc(exc)
            return 'fail'
        self.logger.info('\n##### merging ######\n')
        self.rawcache.update_query_record(dasquery, 'merging')
        das_timer('merge', self.verbose)
        self.rawcache.merge_records(dasquery)
        das_timer('merge', self.verbose)
        self.rawcache.update_query_record(dasquery, 'ok')
        self.rawcache.add_to_record(\
                dasquery, {'das.timer': get_das_timer()}, system='das')
        das_timer('DASCore::call', self.verbose)
        return 'ok'
コード例 #6
0
ファイル: das_core.py プロジェクト: dmwm/DAS
 def update_das_query(dasquery, status, reason=None):
     "Update DAS query record with given status and reason"
     self.rawcache.update_query_record(dasquery, status, reason=reason)
     self.rawcache.add_to_record(\
             dasquery, {'das.timer': get_das_timer()}, system='das')