def main(global_config, **settings): version = settings.get('api_version') route_prefix = '/api/{}'.format(version) config = Configurator( autocommit=True, settings=settings, route_prefix=route_prefix, ) config.include('pyramid_exclog') config.include("cornice") config.add_request_method(request_params, 'params', reify=True) config.add_renderer('prettyjson', JSON(indent=4)) config.add_renderer('jsonp', JSONP(param_name='opt_jsonp')) config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp')) config.add_subscriber(add_logging_context, NewRequest) config.add_subscriber(set_logging_context, ContextFound) config.add_subscriber(set_renderer, NewRequest) config.add_subscriber(beforerender, BeforeRender) config.scan("openprocurement.edge.views.spore") config.scan("openprocurement.edge.views.health") resources = settings.get('resources') and settings['resources'].split(',') couch_url = settings.get('couchdb.url') + settings.get('couchdb.db_name') for resource in resources: config.scan("openprocurement.edge.views." + resource) prepare_couchdb_views(couch_url, resource, LOGGER) LOGGER.info('Push couch {} views successful.'.format(resource)) LOGGER.info('{} resource initialized successful.'.format( resource.title())) # CouchDB connection server = Server(settings.get('couchdb.url'), session=Session(retry_delays=range(10))) config.registry.couchdb_server = server config.registry.db = prepare_couchdb(settings.get('couchdb.url'), settings.get('couchdb.db_name'), LOGGER) config.registry.server_id = settings.get('id', '') config.registry.health_threshold = float( settings.get('health_threshold', 99)) config.registry.api_version = version config.registry.update_after = asbool(settings.get('update_after', True)) return config.make_wsgi_app()
def __init__(self, config): super(EdgeDataBridge, self).__init__() self.config = config self.workers_config = {} self.bridge_id = uuid.uuid4().hex self.api_host = self.config_get('resources_api_server') self.api_version = self.config_get('resources_api_version') self.retrievers_params = self.config_get('retrievers_params') # Check up_wait_sleep up_wait_sleep = self.retrievers_params.get('up_wait_sleep') if up_wait_sleep is not None and up_wait_sleep < 30: raise DataBridgeConfigError('Invalid \'up_wait_sleep\' in ' '\'retrievers_params\'. Value must be ' 'grater than 30.') # Workers settings for key in WORKER_CONFIG: self.workers_config[key] = (self.config_get(key) or WORKER_CONFIG[key]) # Init config for key in DEFAULTS: setattr(self, key, self.config_get(key) or DEFAULTS[key]) # Pools self.workers_pool = gevent.pool.Pool(self.workers_max) self.retry_workers_pool = gevent.pool.Pool(self.retry_workers_max) self.filter_workers_pool = gevent.pool.Pool(self.filter_workers_count) # Queues if self.input_queue_size == -1: self.input_queue = Queue() else: self.input_queue = Queue(self.input_queue_size) if self.resource_items_queue_size == -1: self.resource_items_queue = Queue() else: self.resource_items_queue = Queue(self.resource_items_queue_size) self.api_clients_queue = Queue() # self.retry_api_clients_queue = Queue() if self.retry_resource_items_queue_size == -1: self.retry_resource_items_queue = Queue() else: self.retry_resource_items_queue = Queue( self.retry_resource_items_queue_size) self.process = psutil.Process(os.getpid()) if self.api_host != '' and self.api_host is not None: api_host = urlparse(self.api_host) if api_host.scheme == '' and api_host.netloc == '': raise DataBridgeConfigError( 'Invalid \'tenders_api_server\' url.') else: raise DataBridgeConfigError('In config dictionary empty or missing' ' \'tenders_api_server\'') self.db = prepare_couchdb(self.couch_url, self.db_name, logger) db_url = self.couch_url + '/' + self.db_name prepare_couchdb_views(db_url, self.workers_config['resource'], logger) self.server = Server(self.couch_url, session=Session(retry_delays=range(10))) self.view_path = '_design/{}/_view/by_dateModified'.format( self.workers_config['resource']) extra_params = { 'mode': self.retrieve_mode, 'limit': self.resource_items_limit } self.feeder = ResourceFeeder(host=self.api_host, version=self.api_version, key='', resource=self.workers_config['resource'], extra_params=extra_params, retrievers_params=self.retrievers_params, adaptive=True) self.api_clients_info = {}
def __init__(self, config): self.config = config self.workers_config = {} self.log_dict = {} self.bridge_id = uuid.uuid4().hex self.api_host = self.config_get('resources_api_server') self.api_version = self.config_get('resources_api_version') # Workers settings for key in WORKER_CONFIG: self.workers_config[key] = (self.config_get(key) or WORKER_CONFIG[key]) # Init config for key in DEFAULTS: value = self.config_get(key) setattr(self, key, type(DEFAULTS[key])(value) if value else DEFAULTS[key]) # Pools self.workers_pool = Pool(self.workers_max) self.retry_workers_pool = Pool(self.retry_workers_max) self.filter_workers_pool = Pool() # Queues self.api_clients_queue = Queue() if self.resource_items_queue_size == -1: self.resource_items_queue = Queue() else: self.resource_items_queue = Queue(self.resource_items_queue_size) if self.retry_resource_items_queue_size == -1: self.retry_resource_items_queue = Queue() else: self.retry_resource_items_queue = Queue( self.retry_resource_items_queue_size) # Default values for statistic variables for key in ( 'droped', 'add_to_resource_items_queue', 'add_to_retry', 'exceptions_count', 'not_found_count', 'archived', 'moved_to_public_archive', 'dumped_to_secret_archive', ): self.log_dict[key] = 0 if self.api_host != '' and self.api_host is not None: api_host = urlparse(self.api_host) if api_host.scheme == '' and api_host.netloc == '': raise ConfigError('Invalid \'resources_api_server\' url.') else: raise ConfigError('In config dictionary empty or missing' ' \'resources_api_server\'') self.db = prepare_couchdb(self.couch_url, self.db_name, LOGGER) self.archive_db = prepare_couchdb(self.couch_url, self.db_archive_name) # find storages for secret db for entry_point in iter_entry_points( 'openprocurement.archivarius.storages', self.secret_storage): storage = entry_point.load() storage(self) self.resources = {} for entry_point in iter_entry_points( 'openprocurement.archivarius.resources'): self.resources[entry_point.name] = { 'filter': entry_point.load(), 'view_path': '_design/{}/_view/by_dateModified'.format(entry_point.name) } for resource in self.resources: prepare_couchdb_views(self.couch_url + '/' + self.db_name, resource, LOGGER)