Пример #1
0
    def __init__(self, config):
        self.config = config

        self._backend = None
        self._db_name = None
        self._port = None
        self._host = None

        if 'cache_host' in self.config:
            import redis
            self._backend = "redis"
            self._host = self.config.get('cache_host')
            self._port = self.config.get('cache_port') or 6379
            self._db_name = self.config.get('cache_db_name') or 0
            self.db = redis.StrictRedis(host=self._host, port=self._port,
                                        db=self._db_name)
            self.set_value = self.db.set
            self.has_value = self.db.exists
        else:
            from lazydb import Db
            self._backend = "lazydb"
            self._db_name = self.config.get('cache_db_name') or 'databridge_cache_db'
            self.db = Db(self._db_name)
            self.set_value = self.db.put
            self.has_value = self.db.has
Пример #2
0
    def POST(self):
        """Organize/sort the comments according to votes, author,
        time, etc (heuristic)
        """
        i = web.input(pid=None, time=datetime.utcnow().ctime(),
                      comment="", username=session()['uname'], votes=0,
                      enabled=True)
        if i.pid:
            i.pid = int(i.pid)
            i.cid = 0 #sets default cid val if first comment

            if not session().logged:
                raise web.seeother('/login?redir=/item=?pid=%s' % i.pid)
            try:
                db = Db('db/openjournal')
                papers = db.get('papers')                 
                paper = papers[i.pid] #XXX get by key 'pid' instead
                if paper['comments']:
                    i.cid = paper['comments'][-1]['cid'] + 1
                papers[i.pid]['comments'].append(dict(i))
                db.put('papers', papers)
                record_comment(i.username, i.pid, i.cid)
                return render().item(i.pid, paper)
            except IndexError:
                return "No such item exists, id out of range"
        raise web.seeother('/')        
Пример #3
0
 def __init__(self, config):
     super(DbLazy, self).__init__(config)
     self._backend = "lazydb"
     self._db_name = self.config['storage_config'].get(
         'cache_db_name') or 'cache_db_name'
     self.db = Db(self._db_name)
     self.set_value = self.db.put
     self.has_value = self.db.has
    def __init__(self, config):
        super(ContractingDataBridge, self).__init__()
        self.config = config

        self.cache_db = Db(self.config.get('main'))

        self._backend = "redis"
        self._host = self.config.get('cache_host')
        self._port = self.config.get('cache_port') or 6379
        self._db_name = self.config.get('cache_db_name') or 0

        logger.info(
            "Caching backend: '{}', db name: '{}', host: '{}', port: '{}'".
            format(self.cache_db._backend, self.cache_db._db_name,
                   self.cache_db._host, self.cache_db._port),
            extra=journal_context({"MESSAGE_ID": DATABRIDGE_INFO}, {}))

        self.on_error_delay = self.config_get('on_error_sleep_delay') or 5
        self.jobs_watcher_delay = self.config_get('jobs_watcher_delay') or 15
        queue_size = self.config_get('buffers_size') or 500
        self.full_stack_sync_delay = self.config_get(
            'full_stack_sync_delay') or 15
        self.empty_stack_sync_delay = self.config_get(
            'empty_stack_sync_delay') or 101

        self.api_server = self.config_get('tenders_api_server')
        self.api_version = self.config_get('tenders_api_version')
        self.ro_api_server = self.config_get(
            'public_tenders_api_server') or self.api_server

        self.contracting_api_server = self.config_get('contracting_api_server')
        self.contracting_api_version = self.config_get(
            'contracting_api_version')

        self.clients_initialize()

        self.initial_sync_point = {}
        self.initialization_event = gevent.event.Event()
        self.tenders_queue = Queue(maxsize=queue_size)
        self.handicap_contracts_queue = Queue(maxsize=queue_size)
        self.contracts_put_queue = Queue(maxsize=queue_size)
        self.contracts_retry_put_queue = Queue(maxsize=queue_size)
        self.basket = {}
Пример #5
0
 def __init__(self, config):
     self.config = config
     if 'host' in self.config:
         config = {
             'host': self.config.get('host'),
             'port': self.config.get('port') or 6379,
             'db': self.config.get('name') or 0,
             'password': self.config.get('password') or None
         }
         self.db = StrictRedis(**config)
         LOGGER.info('Set redis store "{db}" at {host}:{port} '
                     'as auctions mapping'.format(**config))
         self._set_value = self.db.set
         self._has_value = self.db.exists
     else:
         db = self.config.get('name', 'auctions_mapping')
         self.db = LazyDB(db)
         LOGGER.info('Set lazydb "{}" as auctions mapping'.format(db))
         self._set_value = self.db.put
         self._has_value = self.db.has
Пример #6
0
 def GET(self):
     i = web.input(pid=None, cid=None)
     if i.pid:
         i.pid = int(i.pid)
         try:
             db = Db('db/openjournal')
             papers = db.get('papers')
             paper = papers[i.pid]
             if i.cid:
                 i.cid = int(i.cid)
                 # XXX Revise data storage scheme s.t. comment can
                 # be retrieved by cid pkey, not i.cid'th index (below)
                 # reason: cnnt can be deleted and lead to consistency
                 # errors (id would then reference wrong entity)
                 comment = paper['comments'][i.cid]
                 return render().comment(i.pid, i.cid, comment)
             return render().item(i.pid, paper)
         except IndexError:
             return "No such item exists, id out of range"
     raise web.seeother('/')
Пример #7
0
    def GET(self):
        """Research http://news.ycombinator.com/item?id=1781013 how
        hacker news voting works and emulate

        XXX Restrict voting to session().logged users + element id
        must not already exist in user['votes'] set.

        XXX Requires accounting + record keeping

        XXX Preserve the web.ctx GET query params to preserve sorting
        / ordering

        Algo:
        1. Add karma to paper['submitter'] if vote
        2. Record vote in user['votes'] set by id
        - calc unique vote id via some linear combination of paper pid
          (and or comment id [cid], if it exists)
        """
        msg = None
        i = web.input(pid=None, sort="popular")
        
        if not session().logged:
            raise web.seeother('/register')
        db = Db('db/openjournal')
        ps = db.get('papers')
        u = User.get(session()['uname'])
        if i.pid:
            i.pid = int(i.pid)
            if canvote(u, i.pid):
                try:
                    ps[i.pid]['votes'] += 1
                    db.put('papers', ps)
                    submitter_uname = ps[i.pid]['submitter']
                    record_vote(u['username'], submitter_uname, i.pid)
                except IndexError:
                    return "No such items exists to vote on"
        raise web.seeother('/?sort=%s' % i.sort)
Пример #8
0
class AuctionsMapping(object):
    """Mapping for processed auctions"""

    def __init__(self, config):
        self.config = config
        if 'host' in self.config:
            config = {
                'host': self.config.get('host'),
                'port': self.config.get('port') or 6379,
                'db': self.config.get('name') or 0,
                'password': self.config.get('password') or None
            }
            self.db = StrictRedis(**config)
            LOGGER.info('Set redis store "{db}" at {host}:{port} '
                        'as auctions mapping'.format(**config))
            self._set_value = self.db.set
            self._has_value = self.db.exists
        else:
            db = self.config.get('name', 'auctions_mapping')
            self.db = LazyDB(db)
            LOGGER.info('Set lazydb "{}" as auctions mapping'.format(db))
            self._set_value = self.db.put
            self._has_value = self.db.has

    def get(self, key):
        return self.db.get(key)

    def put(self, key, value, **kwargs):
        LOGGER.info('Save ID {} in cache'.format(key))
        self._set_value(key, value, **kwargs)

    def has(self, key):
        return self._has_value(key)

    def delete(self, key):
        return self.db.delete(key)
Пример #9
0
class Db(object):
    """ Database proxy """

    def __init__(self, config):
        self.config = config

        self._backend = None
        self._db_name = None
        self._port = None
        self._host = None

        if 'cache_host' in self.config:
            import redis
            self._backend = "redis"
            self._host = self.config.get('cache_host')
            self._port = self.config.get('cache_port') or 6379
            self._db_name = self.config.get('cache_db_name') or 0
            self.db = redis.StrictRedis(host=self._host, port=self._port,
                                        db=self._db_name)
            self.set_value = self.db.set
            self.has_value = self.db.exists
        else:
            from lazydb import Db
            self._backend = "lazydb"
            self._db_name = self.config.get('cache_db_name') or 'databridge_cache_db'
            self.db = Db(self._db_name)
            self.set_value = self.db.put
            self.has_value = self.db.has


    def get(self, key):
        return self.db.get(key)

    def put(self, key, value):
        self.set_value(key, value)

    def has(self, key):
        return self.has_value(key)
 def tearDown(self):
     del self.server[self.config['db']['name']]
     test_mapping_name = self.config.get('auctions_mapping',
                                         {}).get('name', 'auctions_mapping')
     Db(test_mapping_name).destroy(test_mapping_name)
Пример #11
0
class ContractingDataBridge(object):
    """ Contracting Data Bridge """

    def __init__(self, config):
        super(ContractingDataBridge, self).__init__()
        self.config = config

        self.cache_db = Db(self.config.get('main'))

        self._backend = "redis"
        self._host = self.config.get('cache_host')
        self._port = self.config.get('cache_port') or 6379
        self._db_name = self.config.get('cache_db_name') or 0

        logger.info("Caching backend: '{}', db name: '{}', host: '{}', port: '{}'".format(self.cache_db._backend,
                                                                                          self.cache_db._db_name,
                                                                                          self.cache_db._host,
                                                                                          self.cache_db._port),
                    extra=journal_context({"MESSAGE_ID": DATABRIDGE_INFO}, {}))


        self.on_error_delay = self.config_get('on_error_sleep_delay') or 5
        self.jobs_watcher_delay = self.config_get('jobs_watcher_delay') or 15
        queue_size = self.config_get('buffers_size') or 500
        self.full_stack_sync_delay = self.config_get('full_stack_sync_delay') or 15
        self.empty_stack_sync_delay = self.config_get('empty_stack_sync_delay') or 101

        self.api_server = self.config_get('tenders_api_server')
        self.api_version = self.config_get('tenders_api_version')
        self.ro_api_server = self.config_get('public_tenders_api_server') or self.api_server

        self.contracting_api_server = self.config_get('contracting_api_server')
        self.contracting_api_version = self.config_get('contracting_api_version')

        self.clients_initialize()

        self.initial_sync_point = {}
        self.initialization_event = gevent.event.Event()
        self.tenders_queue = Queue(maxsize=queue_size)
        self.handicap_contracts_queue = Queue(maxsize=queue_size)
        self.contracts_put_queue = Queue(maxsize=queue_size)
        self.contracts_retry_put_queue = Queue(maxsize=queue_size)
        self.basket = {}

    def contracting_client_init(self):
        logger.info('Initialization contracting clients.',  extra=journal_context({"MESSAGE_ID": DATABRIDGE_INFO}, {}))
        self.contracting_client = ContractingClient(
            self.config_get('api_token'),
            host_url=self.contracting_api_server, api_version=self.contracting_api_version
        )

        self.contracting_client_ro = self.contracting_client
        if self.config_get('public_tenders_api_server'):
            if self.api_server == self.contracting_api_server and self.api_version == self.contracting_api_version:
                self.contracting_client_ro = ContractingClient(
                    '',
                    host_url=self.ro_api_server, api_version=self.api_version
                )

    def clients_initialize(self):
        self.client = TendersClient(
            self.config_get('api_token'),
            host_url=self.api_server, api_version=self.api_version,
        )

        self.contracting_client_init()

        self.tenders_sync_client = TendersClientSync('',
            host_url=self.ro_api_server, api_version=self.api_version,
        )

    def config_get(self, name):
        return self.config.get('main').get(name)

    @retry(stop_max_attempt_number=5, wait_exponential_multiplier=1000)
    def get_tender_credentials(self, tender_id):
        self.client.headers.update({'X-Client-Request-ID': generate_req_id()})
        logger.info("Getting credentials for tender {}".format(tender_id), extra=journal_context({"MESSAGE_ID": DATABRIDGE_GET_CREDENTIALS},
                                                                                                 {"TENDER_ID": tender_id}))
        data = self.client.extract_credentials(tender_id)
        logger.info("Got tender {} credentials".format(tender_id), extra=journal_context({"MESSAGE_ID": DATABRIDGE_GOT_CREDENTIALS},
                                                                                         {"TENDER_ID": tender_id}))
        return data

    def initialize_sync(self, params=None, direction=None):
        self.initialization_event.clear()
        if direction == "backward":
            assert params['descending']
            response = self.tenders_sync_client.sync_tenders(params, extra_headers={'X-Client-Request-ID': generate_req_id()})
            # set values in reverse order due to 'descending' option
            self.initial_sync_point = {'forward_offset': response.prev_page.offset,
                                       'backward_offset': response.next_page.offset}
            self.initialization_event.set()  # wake up forward worker
            logger.info("Initial sync point {}".format(self.initial_sync_point))
            return response
        else:
            assert 'descending' not in params
            gevent.wait([self.initialization_event])
            params['offset'] = self.initial_sync_point['forward_offset']
            logger.info("Starting forward sync from offset {}".format(params['offset']))
            return self.tenders_sync_client.sync_tenders(params, extra_headers={'X-Client-Request-ID': generate_req_id()})

    def get_tenders(self, params={}, direction=""):
        response = self.initialize_sync(params=params, direction=direction)

        while not (params.get('descending') and not len(response.data) and params.get('offset') == response.next_page.offset):
            tenders_list = response.data
            params['offset'] = response.next_page.offset

            delay = self.empty_stack_sync_delay
            if tenders_list:
                delay = self.full_stack_sync_delay
                logger.info("Client {} params: {}".format(direction, params))
            for tender in tenders_list:
                if tender.get('procurementMethodType') in ['competitiveDialogueUA', 'competitiveDialogueEU']:
                    logger.info('Skipping {} tender {}'.format(tender['procurementMethodType'], tender['id']),
                                extra=journal_context({"MESSAGE_ID": DATABRIDGE_INFO}, params={"TENDER_ID": tender['id']}))
                    continue
                if tender['status'] in ("active.qualification", "active",
                                        "active.awarded", "complete"):
                    if hasattr(tender, "lots"):
                        if any([1 for lot in tender['lots'] if lot['status'] == "complete"]):
                            logger.info('{} sync: Found multilot tender {} in status {}'.format(direction.capitalize(), tender['id'], tender['status']),
                                        extra=journal_context({"MESSAGE_ID": DATABRIDGE_FOUND_MULTILOT_COMPLETE}, {"TENDER_ID": tender['id']}))
                            yield tender
                    elif tender['status'] == "complete":
                        logger.info('{} sync: Found tender in complete status {}'.format(direction.capitalize(), tender['id']),
                                    extra=journal_context({"MESSAGE_ID": DATABRIDGE_FOUND_NOLOT_COMPLETE}, {"TENDER_ID": tender['id']}))
                        yield tender
                else:
                    logger.debug('{} sync: Skipping tender {} in status {}'.format(direction.capitalize(), tender['id'], tender['status']),
                                 extra=journal_context(params={"TENDER_ID": tender['id']}))

            logger.info('Sleep {} sync...'.format(direction), extra=journal_context({"MESSAGE_ID": DATABRIDGE_SYNC_SLEEP}))
            gevent.sleep(delay)
            logger.info('Restore {} sync'.format(direction), extra=journal_context({"MESSAGE_ID": DATABRIDGE_SYNC_RESUME}))
            logger.debug('{} {}'.format(direction, params))
            response = self.tenders_sync_client.sync_tenders(params, extra_headers={'X-Client-Request-ID': generate_req_id()})

    def _put_tender_in_cache_by_contract(self, contract, tender_id):
        dateModified = self.basket.get(contract['id'])
        if dateModified:
            # TODO: save tender in cache only if all active contracts are
            # handled successfully
            self.cache_db.put(tender_id, dateModified)
        self.basket.pop(contract['id'], None)

    def _get_tender_contracts(self):
        try:
            tender_to_sync = self.tenders_queue.get()
            tender = self.tenders_sync_client.get_tender(tender_to_sync['id'],
                                                         extra_headers={'X-Client-Request-ID': generate_req_id()})['data']
        except Exception, e:
            logger.warn('Fail to get tender info {}'.format(tender_to_sync['id']), extra=journal_context({"MESSAGE_ID": DATABRIDGE_EXCEPTION}, params={"TENDER_ID": tender_to_sync['id']}))
            logger.exception(e)
            logger.info('Put tender {} back to tenders queue'.format(tender_to_sync['id']), extra=journal_context({"MESSAGE_ID": DATABRIDGE_EXCEPTION}, params={"TENDER_ID": tender_to_sync['id']}))
            self.tenders_queue.put(tender_to_sync)
            gevent.sleep(self.on_error_delay)
        else:
Пример #12
0
 def clear(pid):
     """Clear comments for an item"""
     db = Db('db/openjournal')
     papers = db.get('papers')
     papers[pid]['comments'] = []
     return db.put('papers', papers)
Пример #13
0
 def db(dbname=os.getcwd() + "/db/openjournal"):
     return Db(dbname)