コード例 #1
0
 def __init__(self, host, port):
     self.host   = host
     self.port   = port
     self.conn   = MongoClient(host, port)
     self.cache  = self.conn['das']['cache']
     self.line   = "-"*80
     self.config = das_readconfig()
コード例 #2
0
ファイル: das_parser.py プロジェクト: zdenekmaxa/DAS
    def __init__(self, config=None):
        if  not config:
            config = das_readconfig()
        if  not config.has_key('dasmapping'):
            config['dasmapping'] = DASMapping(config)
        if  not config.has_key('dasanalytics'):
            config['dasanalytics'] = DASAnalytics(config)
        if  not config['dasmapping'].check_maps():
            msg = "No DAS maps found in MappingDB"
            raise Exception(msg)
        self.map         = config['dasmapping']
        self.analytics   = config['dasanalytics']
        self.dasservices = config['services']
        self.daskeysmap  = self.map.daskeys()
        self.operators   = list(das_operators())
        self.daskeys     = list(das_special_keys())
        self.verbose     = config['verbose']
        self.logger      = PrintManager('QLManger', self.verbose)
        for val in self.daskeysmap.values():
            for item in val:
                self.daskeys.append(item)
        parserdir   = config['das']['parserdir']
        self.dasply = DASPLY(parserdir, self.daskeys, self.dasservices, 
                verbose=self.verbose)

        self.enabledb = config['parserdb']['enable']
        if  self.enabledb:
            self.parserdb = DASParserDB(config)
コード例 #3
0
ファイル: das_kwd_search.py プロジェクト: ktf/DAS
 def __init__(self, dascore):
     if not dascore:
         raise Exception("dascore needed")
     self.kws = KeywordSearch(dascore)
     config = das_readconfig()
     self.colored_scorebar = \
         config['keyword_search'].get('colored_scorebar', False)
コード例 #4
0
ファイル: dataset_populator.py プロジェクト: ktf/DAS
 def __init__(self, config):
     nworkers     = int(config.get('nworkers', 10))
     name         = config.get('name', 'dataset_populator')
     dasconfig    = das_readconfig()
     debug        = False
     self.dascore = DASCore(config=dasconfig, nores=True, debug=debug)
     self.taskmgr = TaskManager(nworkers=nworkers, name=name)
コード例 #5
0
ファイル: das_mapping_t.py プロジェクト: ktf/DAS
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        self.db = "test_mapping.db"
        config = deepcopy(das_readconfig())
        dburi = config["mongodb"]["dburi"]
        logger = PrintManager("TestDASMapping", verbose=debug)
        config["logger"] = logger
        config["verbose"] = debug
        dbname = "test_mapping"
        collname = "db"
        config["mappingdb"] = dict(dburi=dburi, dbname=dbname, collname=collname)
        # add some maps to mapping db
        conn = MongoClient(dburi)
        conn.drop_database(dbname)
        self.coll = conn[dbname][collname]
        self.pmap = {
            "presentation": {
                "block": [{"ui": "Block name", "das": "block.name"}, {"ui": "Block size", "das": "block.size"}]
            },
            "type": "presentation",
        }
        self.coll.insert(self.pmap)
        ver_token = verification_token(self.coll.find(exhaust=True))
        rec = {"verification_token": ver_token, "type": "verification_token"}
        self.coll.insert(rec)

        self.mgr = DASMapping(config)
コード例 #6
0
ファイル: dataset_populator.py プロジェクト: ktf/DAS
def datasets(inst='cms_dbs_prod_global'):
    "Provide list of datasets"
    dasconfig = das_readconfig()
    conn = db_connection(dasconfig['mongodb']['dburi'])
    coll = conn['dbs'][inst]
    for row in coll.find():
        yield row['dataset']
コード例 #7
0
ファイル: dbs_daemon.py プロジェクト: ktf/DAS
def list_dbs_instances():
    """ list all DBS instances """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_instances()
コード例 #8
0
 def __init__(self, host, port):
     self.host   = host
     self.port   = port
     self.conn   = MongoClient(host, port)
     self.cache  = self.conn['das']['cache']
     self.line   = "-"*80
     self.config = das_readconfig()
コード例 #9
0
ファイル: das_kwd_search.py プロジェクト: perrozzi/DAS
 def __init__(self, dascore):
     if not dascore:
         raise Exception("dascore needed")
     self.kws = KeywordSearch(dascore)
     config = das_readconfig()
     self.colored_scorebar = \
         config['keyword_search'].get('colored_scorebar', False)
コード例 #10
0
ファイル: tools.py プロジェクト: perrozzi/DAS
def enable_cross_origin(func):
    """
    Enables Cross Origin Requests (from a predefined list of DAS origins)
    to be run on each given back-end server (keyword search, autocompletion)
    """
    from DAS.utils.das_config import das_readconfig
    dasconfig = das_readconfig()

    # load list of hosts from where keyword search could be initialized
    valid_origins = dasconfig['load_balance'].get('valid_origins', [])

    def enable_cross_orign_requests():
        """
        on each request, add additional headers that will allow browser
        to use the KWS  result (loaded from other origin/domain)
        """

        # output the requests origin if it's allowed
        origin = cherrypy.request.headers.get('Origin', '')
        if origin in valid_origins:
            cherrypy.response.headers['Access-Control-Allow-Origin'] = origin

        cherrypy.response.headers['Access-Control-Allow-Headers'] = 'X-JSON'
        cherrypy.response.headers['Access-Control-Expose-Headers'] = 'X-JSON'

    def wrapper(self, *args, **kwds):
        data = func(self, *args, **kwds)
        enable_cross_orign_requests()
        return data

    return wrapper
コード例 #11
0
def get_global_dbs_inst():
    """
    gets the name of global dbs instance
    """
    from DAS.core.das_mapping_db import DASMapping
    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_global_instance()
コード例 #12
0
ファイル: das_server.py プロジェクト: dmwm/DAS
def main():
    """
    Start-up web server.
    """
    parser  = OptionParser()
    parser.add_option("-c", "--config", dest="config", default=False,\
        help="provide cherrypy configuration file")
    opts, _ = parser.parse_args()

    if  opts.config: # read provided configuration
        config = das_readconfig(opts.config, debug=True)
    else:
        config = das_readconfig(debug=True)

    # Start DAS server
    root = Root(config)
    root.start()
コード例 #13
0
ファイル: request_manager_t.py プロジェクト: zdenekmaxa/DAS
 def setUp(self):
     """
     set up stuff
     """
     self.debug  = 0
     dasconfig   = deepcopy(das_readconfig())
     self.dburi  = dasconfig['mongodb']['dburi']
     self.reqmgr = RequestManager(self.dburi)
コード例 #14
0
def main():
    """
    Start-up web server.
    """
    parser = OptionParser()
    parser.add_option("-c", "--config", dest="config", default=False,\
        help="provide cherrypy configuration file")
    opts, _ = parser.parse_args()

    if opts.config:  # read provided configuration
        config = das_readconfig(opts.config, debug=True)
    else:
        config = das_readconfig(debug=True)

    # Start DAS server
    root = Root(config)
    root.start()
コード例 #15
0
ファイル: das_services_t.py プロジェクト: zdenekmaxa/DAS
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
#        config        = deepcopy(das_readconfig())
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://localhost:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs', 'phedex', 'sitedb', 'google_maps', 'ip']

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # create fresh DB
        self.clear_collections()
        self.mgr.delete_db_collection()
        self.mgr.create_db()

        # Add fake mapping records
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs', 'dbs.yml')
        self.add_service('phedex', 'phedex.yml')
        self.add_service('sitedb', 'sitedb.yml')

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
コード例 #16
0
ファイル: das_cache_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     config = deepcopy(das_readconfig())
     logger = PrintManager('TestDASCache', verbose=debug)
     config['logger'] = logger
     config['verbose'] = debug
コード例 #17
0
ファイル: dbs_daemon.py プロジェクト: ktf/DAS
def get_global_dbs_inst():
    """
    gets the name of global dbs instance
    """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)
    return dasmapping.dbs_global_instance()
コード例 #18
0
ファイル: das_db_t.py プロジェクト: ktf/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     self.debug  = 0
     dasconfig   = deepcopy(das_readconfig())
     self.dburi  = dasconfig['mongodb']['dburi']
     self.dbhost = 'localhost'
     self.dbport = 27017
コード例 #19
0
ファイル: das_cache_t.py プロジェクト: ktf/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug    = 0
     config   = deepcopy(das_readconfig())
     logger   = PrintManager('TestDASCache', verbose=debug)
     config['logger']  = logger
     config['verbose'] = debug
コード例 #20
0
ファイル: das_db_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     self.debug = 0
     dasconfig = deepcopy(das_readconfig())
     self.dburi = dasconfig['mongodb']['dburi']
     self.dbhost = 'localhost'
     self.dbport = 27017
コード例 #21
0
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0

        # read DAS config and make fake Mapping DB entry
        collname      = 'test_collection'
        self.dasmerge = 'test_merge'
        self.dascache = 'test_cache'
        self.dasmr    = 'test_mapreduce'
        self.collname = collname
        config        = das_readconfig()
        dburi         = config['mongodb']['dburi']
        self.dburi    = dburi
        logger        = PrintManager('TestCMSFakeDataServices', verbose=debug)
        self.base     = 'http://127.0.0.1:8080' # URL of DASTestDataService
        self.expire   = 100
        config['logger']    = logger
        config['loglevel']  = debug
        config['verbose']   = debug
        config['mappingdb'] = dict(dburi=dburi, dbname='mapping', collname=collname)
        config['analyticsdb'] = dict(dbname='analytics', collname=collname, history=100)
        config['dasdb'] = {'dbname': 'das',
                           'cachecollection': self.dascache,
                           'mrcollection': self.dasmr,
                           'mergecollection': self.dasmerge}
        config['keylearningdb'] = {'collname': collname, 'dbname': 'keylearning'}
        config['parserdb'] = {'collname': collname, 'dbname': 'parser', 
                                'enable': True, 'sizecap': 10000}
        config['services'] = ['dbs3', 'phedex', 'google_maps', 'ip']
        # Do not perform DAS map test, since we overwrite system and urls.
        # This is done to use standard DAS maps, but use local URLs, which
        # cause DAS hash map to be be wrong during a test
        config['map_test'] = False

        # Add fake mapping records
        self.clear_collections()
        self.add_service('ip', 'ip.yml')
        self.add_service('google_maps', 'google_maps.yml')
        self.add_service('dbs3', 'dbs3.yml')
        self.add_service('phedex', 'phedex.yml')

        # setup DAS mapper
        self.mgr = DASMapping(config)

        # mongo parser
        self.mongoparser = ql_manager(config)
        config['mongoparser'] = self.mongoparser

        # create DAS handler
        self.das = DASCore(config)

        # start TestDataService
        self.server = Root(config)
        self.server.start()
コード例 #22
0
ファイル: dbs_daemon.py プロジェクト: dmwm/DAS
def test(dbs_url):
    "Test function"
    uri = das_readconfig()["mongodb"]["dburi"][0]
    config = {"preserve_on_restart": True}
    mgr = DBSDaemon(dbs_url, uri, config)
    mgr.update()
    idx = 0
    limit = 10
    for row in mgr.find("zee*summer", idx, limit):
        print(row)
コード例 #23
0
ファイル: das_memcache_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug    = 0
     config   = das_readconfig()
     logger   = DASLogger(verbose=debug, stdout=debug)
     config['logger']  = logger
     config['verbose'] = debug
     self.memcache = DASMemcache(config)
コード例 #24
0
ファイル: das_config_t.py プロジェクト: ktf/DAS
 def testConfig(self):                          
     """test read/write of configuration file"""
     if  os.environ.has_key('DAS_CONFIG'):
         del os.environ['DAS_CONFIG']
     fds = NamedTemporaryFile()
     os.environ['DAS_CONFIG'] = fds.name
     dasconfig = das_configfile()
     write_configparser(dasconfig, True)
     readdict = deepcopy(das_readconfig())
     self.assertEqual(types.DictType, type(readdict))
コード例 #25
0
 def testConfig(self):                          
     """test read/write of configuration file"""
     if  'DAS_CONFIG' in os.environ:
         del os.environ['DAS_CONFIG']
     fds = NamedTemporaryFile()
     os.environ['DAS_CONFIG'] = fds.name
     dasconfig = das_configfile()
     write_configparser(dasconfig, True)
     readdict = deepcopy(das_readconfig())
     self.assertEqual(dict, type(readdict))
コード例 #26
0
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.config = das_readconfig()
     logger = DASLogger(verbose=debug, stdout=debug)
     self.config['logger'] = logger
     self.config['verbose'] = debug
     self.couchcache = DASCouchcache(self.config)
コード例 #27
0
ファイル: das_couchcache_t.py プロジェクト: dmwm/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug       = 0
     self.config = das_readconfig()
     logger      = DASLogger(verbose=debug, stdout=debug)
     self.config['logger']  = logger
     self.config['verbose'] = debug
     self.couchcache = DASCouchcache(self.config)
コード例 #28
0
ファイル: das_memcache_t.py プロジェクト: zdenekmaxa/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     config = das_readconfig()
     logger = DASLogger(verbose=debug, stdout=debug)
     config["logger"] = logger
     config["verbose"] = debug
     self.memcache = DASMemcache(config)
コード例 #29
0
ファイル: das_core_t.py プロジェクト: zdenekmaxa/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.das = DASCore(debug=debug)
     config = deepcopy(das_readconfig())
     dburi = config['mongodb']['dburi']
     connection = Connection(dburi)
     connection.drop_database('das') 
コード例 #30
0
ファイル: das_core_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.das = DASCore(debug=debug, multitask=False)
     config = deepcopy(das_readconfig())
     dburi = config['mongodb']['dburi']
     connection = MongoClient(dburi)
     connection.drop_database('das') 
コード例 #31
0
def test(dbs_url):
    "Test function"
    uri = das_readconfig()['mongodb']['dburi'][0]
    config = {'preserve_on_restart': True}
    mgr = DBSDaemon(dbs_url, uri, config)
    mgr.update()
    idx = 0
    limit = 10
    for row in mgr.find('zee*summer', idx, limit):
        print(row)
コード例 #32
0
ファイル: dataset_populator.py プロジェクト: ktf/DAS
 def __init__(self, config):
     self.sleep   = config.get('sleep', 5)
     pattern      = {'das.system':'dbs', 'das.primary_key': 'dataset.name'}
     self.pattern = config.get('query_pattern', pattern)
     nworkers     = int(config.get('nworkers', 10))
     name         = config.get('name', 'dataset_keeper')
     dasconfig    = das_readconfig()
     debug        = False
     self.dascore = DASCore(config=dasconfig, nores=True, debug=debug)
     self.taskmgr = TaskManager(nworkers=nworkers, name=name)
     self.conn    = db_connection(dasconfig['mongodb']['dburi'])
コード例 #33
0
ファイル: das_waitfordb.py プロジェクト: ktf/DAS
def waitfordb(max_time, callback=on_db_available):
    """
    waits until DB is ready as well as until DAS mappings are created/updated.
    """
    config = das_readconfig()
    dburi = config['mongodb']['dburi']
    sleep_time = 5
    db_monitor(dburi, callback,
               sleep=sleep_time, max_retries=max_time // sleep_time)

    print 'DB is not available and the timeout has passed'
    sys.exit(-1)
コード例 #34
0
ファイル: lumi_service.py プロジェクト: perrozzi/DAS
 def __init__(self, config=None):
     super(LumiService, self).__init__()
     if  not config:
         config   = {}
     self.dasconfig = das_readconfig()
     self.service_name = config.get('name', 'combined')
     self.service_api  = config.get('api', 'combined_lumi4dataset')
     self.uri       = self.dasconfig['mongodb']['dburi']
     self.urls      = None # defined at run-time via self.init()
     self.expire    = None # defined at run-time via self.init()
     self.ckey, self.cert = get_key_cert()
     self.init()
コード例 #35
0
    def __init__(self, cfg):
        config = das_readconfig().get('inputvals', {})

        self.dburi = get_db_uri()
        self.dbcoll = get_collection_name(cfg['input'])
        self.dbname = config.get('dbname', config.get('DBNAME', 'inputvals'))

        self.cfg = cfg
        self.cache_size = config.get('cache_size', 1000)
        self.expire = config.get('expire', 3600)
        self.write_hash = config.get('write_hash', False)

        self.init()
コード例 #36
0
ファイル: dbs_phedex.py プロジェクト: dmwm/DAS
 def __init__(self, config=None):
     if  not config:
         config = {}
     super(DBSPhedexService, self).__init__()
     self.config     = config
     self.dbname     = 'dbs_phedex'
     self.collname   = 'datasets'
     self.dasconfig  = das_readconfig()
     self.uri        = self.dasconfig['mongodb']['dburi']
     self.urls       = None # defined at run-time via self.init()
     self.expire     = 60   # defined at run-time via self.init()
     self.wthr       = None # defined at run-time via
     self.init()
コード例 #37
0
ファイル: input_values_tracker.py プロジェクト: perrozzi/DAS
    def __init__(self, cfg):
        config = das_readconfig().get('inputvals', {})

        self.dburi = get_db_uri()
        self.dbcoll = get_collection_name(cfg['input'])
        self.dbname = config.get('dbname', config.get('DBNAME', 'inputvals'))

        self.cfg = cfg
        self.cache_size = config.get('cache_size', 1000)
        self.expire = config.get('expire', 3600)
        self.write_hash = config.get('write_hash', False)

        self.init()
コード例 #38
0
ファイル: das_kwdsearch_t.py プロジェクト: perrozzi/DAS
    def __init__(cls, name, bases, d):
        type.__init__(cls, name, bases, d)

        # set up only once
        if hasattr(cls, 'global_dbs_inst') and cls.global_dbs_inst:
            return

        print('setUp in metaclass: getting dbs manager ' \
              '(and fetching datasets if needed)')
        cls.global_dbs_mngr = initialize_global_dbs_mngr(update_required=False)
        cls.global_dbs_inst = get_global_dbs_inst()
        cls.kws = KeywordSearch(dascore=DASCore(multitask=False))
        dasconfig = das_readconfig()
        cls.timeout = dasconfig['keyword_search']['timeout']
コード例 #39
0
ファイル: das_waitfordb.py プロジェクト: ktf/DAS
def check_mappings_readiness():
    """
    return whether DASMaps are initialized
    """
    print('db alive. checking it\'s state...')
    try:
        dasmapping = DASMapping(das_readconfig())
        if dasmapping.check_maps():
            DASCore(multitask=False)
            return True
    except Exception as exc:
        print exc
    print 'no DAS mappings present...'
    return False
コード例 #40
0
ファイル: das_kwdsearch_t.py プロジェクト: ktf/DAS
    def __init__(cls, name, bases, d):
        type.__init__(cls, name, bases, d)

        # set up only once
        if hasattr(cls, 'global_dbs_inst') and cls.global_dbs_inst:
            return

        print 'setUp in metaclass: getting dbs manager ' \
              '(and fetching datasets if needed)'
        cls.global_dbs_mngr = initialize_global_dbs_mngr(update_required=False)
        cls.global_dbs_inst = get_global_dbs_inst()
        cls.kws = KeywordSearch(dascore=DASCore(multitask=False))
        dasconfig = das_readconfig()
        cls.timeout = dasconfig['keyword_search']['timeout']
コード例 #41
0
ファイル: das_analytics_t.py プロジェクト: zdenekmaxa/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug    = 0
     self.db  = 'test_analytics.db'
     config   = deepcopy(das_readconfig())
     dburi    = config['mongodb']['dburi']
     logger   = PrintManager('TestDASAnalytics', verbose=debug)
     config['logger']  = logger
     config['verbose'] = debug
     config['analyticsdb'] = dict(dburi=dburi, history=5184000,
             dbname='test_analytics', collname='db')
     self.mgr = DASAnalytics(config)
コード例 #42
0
ファイル: das_ql.py プロジェクト: dmwm/DAS
def das_mapreduces():
    """
    Return list of DAS mapreduce functions
    """
    mlist   = []
    config  = das_readconfig()
    dburi   = config['mongodb']['dburi']
    dbname  = config['dasdb']['dbname']
    colname = config['dasdb']['mrcollection']
    conn    = db_connection(dburi)
    coll    = conn[dbname][colname]
    for row in coll.find({}):
        if  set(row.keys()) == set(['map', 'reduce', 'name', '_id']):
            mlist.append(row['name'])
    return mlist
コード例 #43
0
ファイル: das_ql.py プロジェクト: perrozzi/DAS
def das_mapreduces():
    """
    Return list of DAS mapreduce functions
    """
    mlist = []
    config = das_readconfig()
    dburi = config['mongodb']['dburi']
    dbname = config['dasdb']['dbname']
    colname = config['dasdb']['mrcollection']
    conn = db_connection(dburi)
    coll = conn[dbname][colname]
    for row in coll.find({}):
        if set(row.keys()) == set(['map', 'reduce', 'name', '_id']):
            mlist.append(row['name'])
    return mlist
コード例 #44
0
ファイル: das_mongocache_t.py プロジェクト: perrozzi/DAS
    def setUp(self):
        """
        set up DAS core module
        """
        debug = 0
        config = deepcopy(das_readconfig())
        logger = PrintManager('TestDASMongocache', verbose=debug)
        config['logger'] = logger
        config['verbose'] = debug
        dburi = config['mongodb']['dburi']

        connection = MongoClient(dburi)
        connection.drop_database('das')
        dasmapping = DASMapping(config)
        config['dasmapping'] = dasmapping
        self.dasmongocache = DASMongocache(config)
コード例 #45
0
ファイル: das_filecache_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     debug = 0
     self.dir = os.path.join(os.getcwd(), 'testfilecache')
     if os.path.isdir(self.dir):
         os.system('rm -rf %s' % self.dir)
     config = das_readconfig()
     logger = DASLogger(verbose=debug, stdout=debug)
     config['logger'] = logger
     config['verbose'] = debug
     config['filecache_dir'] = self.dir
     config['filecache_db_engine'] = 'sqlite:///%s/test_file_cache.db' \
             % self.dir
     self.dasfilecache = DASFilecache(config)
コード例 #46
0
 def __init__(self, config=None):
     if not config:
         config = das_readconfig()
     self.dasmapping = DASMapping(config)
     if not self.dasmapping.check_maps():
         msg = "No DAS maps found in MappingDB"
         raise Exception(msg)
     self.dasservices = config['services']
     self.daskeysmap = self.dasmapping.daskeys()
     self.operators = list(das_operators())
     self.daskeys = list(das_special_keys())
     self.verbose = config['verbose']
     self.logger = PrintManager('QLManger', self.verbose)
     for val in self.daskeysmap.values():
         for item in val:
             self.daskeys.append(item)
コード例 #47
0
def main():
    "Main function"
    optmgr = OptionParser()
    opts = optmgr.parser.parse_args()
    dasconfig = das_readconfig(opts.config)
    verbose = opts.verbose
    if opts.profile:
        import cProfile  # python profiler
        import pstats  # profiler statistics
        cmd = 'dbs_update(dasconfig,verbose)'
        cProfile.runctx(cmd, globals(), locals(), 'profile.dat')
        info = pstats.Stats('profile.dat')
        info.sort_stats('cumulative')
        info.print_stats()
    else:
        dbs_update(dasconfig, verbose)
コード例 #48
0
ファイル: das_viewmanager_t.py プロジェクト: perrozzi/DAS
 def setUp(self):
     """
     set up DAS core module
     """
     self.db = 'test_views.db'
     debug = 0
     config = das_readconfig()
     logger = DASLogger(verbose=debug, stdout=debug)
     config['logger'] = logger
     config['verbose'] = debug
     config['views_dir'] = os.getcwd()
     config['views_engine'] = 'sqlite:///%s' % self.db
     try:
         del config['sum_views']
     except:
         pass
     self.view = DASViewManager(config)
コード例 #49
0
ファイル: qlparser_t.py プロジェクト: perrozzi/DAS
    def setUp(self):
        """
        set up data used in the tests.
        setUp is called before each test function execution.
        """
        self.i1 = "find dataset, run, bfield where site = T2 and admin=VK and storage=castor"
        self.i2 = "  find dataset, run where (run=1 or run=2) and storage=castor or site = T2"

        debug = 0
        config = das_readconfig()
        logger = DASLogger(verbose=debug, stdout=debug)
        config['logger'] = logger
        config['verbose'] = debug
        config['mapping_dbhost'] = 'localhost'
        config['mapping_dbport'] = 27017
        config['mapping_dbname'] = 'mapping'
        config['dasmapping'] = DASMapping(config)
        config['dasanalytics'] = DASAnalytics(config)
        self.parser = MongoParser(config)
        self.operators = [o.strip() for o in DAS_OPERATORS]
コード例 #50
0
def main():
    "Main function"
    optmgr = DASOptionParser()
    opts, _ = optmgr.getOpt()

    dasconfig = das_readconfig()
    robot = Robot(config=dasconfig, query=opts.query, sleep=opts.sleep)
    if opts.start:
        robot.start()
    elif opts.stop:
        robot.stop()
    elif opts.restart:
        robot.restart()
    elif opts.status:
        robot.status()
        sys.exit(0)
    else:
        print(
            "Unknown operation, please use --start|stop|restart|status options"
        )
コード例 #51
0
def initialize_global_dbs_mngr(update_required=False):
    """
    Gets a DBSDaemon for global DBS and fetches the data if needed.
    *Used for testing purposes only*.
    """
    from DAS.core.das_mapping_db import DASMapping

    dasconfig = das_readconfig()
    dasmapping = DASMapping(dasconfig)

    dburi = dasconfig['mongodb']['dburi']
    dbsexpire = dasconfig.get('dbs_daemon_expire', 3600)
    main_dbs_url = dasmapping.dbs_url()
    dbsmgr = DBSDaemon(main_dbs_url, dburi, {'expire': dbsexpire,
                                             'preserve_on_restart': True})

    # if we have no datasets (fresh DB, fetch them)
    if update_required or not next(dbsmgr.find('*Zmm*'), False):
        print('fetching datasets from global DBS...')
        dbsmgr.update()
    return dbsmgr
コード例 #52
0
 def __init__(self, config):
     DASWebManager.__init__(self, config)
     self.base = config['web_server'].get('url_base', 'das')
     self.dasconfig = das_readconfig()
コード例 #53
0
ファイル: urlfetch_pycurl.py プロジェクト: perrozzi/DAS
#    pass

try:
    import cStringIO as StringIO
except ImportError:  # python3
    import io
except:
    import StringIO

# DAS modules
from DAS.utils.das_config import das_readconfig

PAT = re.compile(\
        "(https|http)://[-A-Za-z0-9_+&@#/%?=~_|!:,.;]*[-A-Za-z0-9+&@#/%=~_|]")

PYCURL = das_readconfig()['pycurl']


def validate_url(url):
    "Validate URL"
    if PAT.match(url):
        return True
    return False


def getdata(urls, ckey, cert, headers=None, num_conn=100):
    """
    Get data for given list of urls, using provided number of connections
    and user credentials
    """
コード例 #54
0
ファイル: das_core.py プロジェクト: perrozzi/DAS
    def __init__(self,
                 config=None,
                 debug=0,
                 nores=False,
                 logger=None,
                 engine=None,
                 multitask=True):
        if config:
            dasconfig = config
        else:
            dasconfig = das_readconfig()
        verbose = dasconfig['verbose']
        self.stdout = debug
        if isinstance(debug, int) and debug:
            self.verbose = debug
            dasconfig['verbose'] = debug
        else:
            self.verbose = verbose
        das_timer('DASCore::init', self.verbose)
        self.operators = das_operators()
        self.collect_wait_time = dasconfig['das'].get('collect_wait_time', 120)

        # set noresults option
        self.noresults = False
        if nores:
            dasconfig['write_cache'] = True
            self.noresults = nores

        self.init_expire = dasconfig['das'].get('init_expire', 5 * 60)
        self.multitask = dasconfig['das'].get('multitask', True)
        if debug or self.verbose:
            self.multitask = False  # in verbose mode do not use multitask
            dasconfig['das']['multitask'] = False
        if not multitask:  # explicitly call DASCore ctor
            self.multitask = False
            dasconfig['das']['multitask'] = False
        dasconfig['engine'] = engine
        if self.multitask:
            nworkers = dasconfig['das'].get('core_workers', 5)
            #             if  engine:
            #                 thr_name = 'DASCore:PluginTaskManager'
            #                 self.taskmgr = PluginTaskManager(\
            #                         engine, nworkers=nworkers, name=thr_name)
            #                 self.taskmgr.subscribe()
            #             else:
            #                 thr_name = 'DASCore:TaskManager'
            #                 self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
            thr_name = 'DASCore:TaskManager'
            self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name)
        else:
            self.taskmgr = None

        if logger:
            self.logger = logger
        else:
            self.logger = PrintManager('DASCore', self.verbose)

        # define Mapping/Analytics/Parser in this order since Parser depends
        # on first two
        dasmapping = DASMapping(dasconfig)
        dasconfig['dasmapping'] = dasmapping
        self.mapping = dasmapping

        self.keylearning = DASKeyLearning(dasconfig)
        dasconfig['keylearning'] = self.keylearning

        # init DAS cache
        self.rawcache = DASMongocache(dasconfig)
        dasconfig['rawcache'] = self.rawcache

        # plug-in architecture: loop over registered data-services in
        # dasconfig; load appropriate module/class; register data
        # service with DASCore.
        self.systems = dasmapping.list_systems()
        # pointer to the DAS top level directory
        dasroot = '/'.join(__file__.split('/')[:-3])
        for name in self.systems:
            try:
                klass  = 'DAS/services/%s/%s_service.py' \
                    % (name, name)
                srvfile = os.path.join(dasroot, klass)
                with open(srvfile) as srvclass:
                    for line in srvclass:
                        if line.find('(DASAbstractService)') != -1:
                            klass = line.split('(DASAbstractService)')[0]
                            klass = klass.split('class ')[-1]
                            break
                mname = 'DAS.services.%s.%s_service' % (name, name)
                module = __import__(mname, fromlist=[klass])
                obj = getattr(module, klass)(dasconfig)
                setattr(self, name, obj)
            except IOError as err:
                if debug > 1:
                    # we have virtual services, so IOError can be correct
                    print_exc(err)
                try:
                    mname = 'DAS.services.generic_service'
                    module = __import__(mname, fromlist=['GenericService'])
                    obj = module.GenericService(name, dasconfig)
                    setattr(self, name, obj)
                except Exception as exc:
                    print_exc(exc)
                    msg = "Unable to load %s data-service plugin" % name
                    raise Exception(msg)
            except Exception as exc:
                print_exc(exc)
                msg = "Unable to load %s data-service plugin" % name
                raise Exception(msg)

        # loop over systems and get system keys, add mapping keys to final list
        self.service_keys = {}
        self.service_parameters = {}
        for name in self.systems:
            skeys = list(getattr(self, name).keys())
            self.service_keys[getattr(self, name).name] = skeys
            sparams = getattr(self, name).parameters()
            self.service_parameters[getattr(self, name).name] = sparams

        self.service_keys['special'] = das_special_keys()
        self.dasconfig = dasconfig
        das_timer('DASCore::init', self.verbose)
コード例 #55
0
def main():
    "Main function"
    optmgr = OptionParser()
    opts = optmgr.parser.parse_args()
    dasconfig = das_readconfig(opts.config)
    cleanup(dasconfig, opts.verbose)
コード例 #56
0
def get_db_uri():
    """ returns default dburi from config """
    config = das_readconfig()
    return config['mongodb']['dburi']
コード例 #57
0
ファイル: input_values_tracker.py プロジェクト: perrozzi/DAS
def init_trackers():
    """ initialization """
    # get list of trackers
    mapping = DASMapping(config=das_readconfig())
    for provider in mapping.inputvalues_uris():
        TRACKERS[provider['input']] = InputValuesTracker(provider)