def configure_extensions(self): Empty.configure_extensions(self) self.celery = Celery(self.name, broker=self.config['CELERY_BROKER_URL'], beat=True) self.celery.conf.update(self.config) self.celery.conf.ONCE = { 'backend': 'celery_once.backends.Redis', 'settings': { 'url': self.config['CELERY_BROKER_URL'], 'default_timeout': 60 * 60 * 24 } } class ContextTask(self.celery.Task): def __call__(self, *args, **kwargs): with app.app_context(): return self.run(*args, **kwargs) self.celery.Task = ContextTask # Make QueueOnce app context aware. class ContextQueueOnce(QueueOnce): def __call__(self, *args, **kwargs): with app.app_context(): return super(ContextQueueOnce, self).__call__(*args, **kwargs) # Attach to celery object for easy access. self.celery.QueueOnce = ContextQueueOnce app = self if 'root_path' in self.config: self.root_path = self.config['root_path'] if 'WHYIS_TEMPLATE_DIR' in self.config and app.config[ 'WHYIS_TEMPLATE_DIR'] is not None: my_loader = jinja2.ChoiceLoader([ jinja2.FileSystemLoader(p) for p in self.config['WHYIS_TEMPLATE_DIR'] ] + [app.jinja_loader]) app.jinja_loader = my_loader @self.celery.task(base=QueueOnce, once={'graceful': True}) def process_resource(service_name, taskid=None): service = self.config['inferencers'][service_name] service.process_graph(app.db) @self.celery.task def process_nanopub(nanopub_uri, service_name, taskid=None): service = self.config['inferencers'][service_name] print(service, nanopub_uri) if app.nanopub_manager.is_current(nanopub_uri): nanopub = app.nanopub_manager.get(nanopub_uri) service.process_graph(nanopub) else: print("Skipping retired nanopub", nanopub_uri) def setup_periodic_task(task): @self.celery.task def find_instances(): print("Triggered task", task['name']) for x, in task['service'].getInstances(app.db): task['do'](x) @self.celery.task def do_task(uri): print("Running task", task['name'], 'on', uri) resource = app.get_resource(uri) # result never used task['service'].process_graph(resource.graph) task['service'].app = app task['find_instances'] = find_instances task['do'] = do_task return task app.inference_tasks = [] if 'inference_tasks' in self.config: app.inference_tasks = [ setup_periodic_task(task) for task in self.config['inference_tasks'] ] for name, task in list(self.config['inferencers'].items()): task.app = app for task in app.inference_tasks: if 'schedule' in task: #print "Scheduling task", task['name'], task['schedule'] self.celery.add_periodic_task(crontab(**task['schedule']), task['find_instances'].s(), name=task['name']) else: task['find_instances'].delay() @self.celery.task() def update(nanopub_uri): '''gets called whenever there is a change in the knowledge graph. Performs a breadth-first knowledge expansion of the current change.''' #print "Updating on", nanopub_uri #if not app.nanopub_manager.is_current(nanopub_uri): # print("Skipping retired nanopub", nanopub_uri) # return nanopub = app.nanopub_manager.get(nanopub_uri) nanopub_graph = ConjunctiveGraph(nanopub.store) if 'inferencers' in self.config: for name, service in list(self.config['inferencers'].items()): service.app = self if service.query_predicate == self.NS.whyis.updateChangeQuery: if service.getInstances(nanopub_graph): print("invoking", name, nanopub_uri) process_nanopub.apply_async(kwargs={ 'nanopub_uri': nanopub_uri, 'service_name': name }, priority=1) for name, service in list(self.config['inferencers'].items()): service.app = self if service.query_predicate == self.NS.whyis.globalChangeQuery: process_resource.apply_async( kwargs={'service_name': name}, priority=5) def run_update(nanopub_uri): update.apply_async(args=[nanopub_uri], priority=9) self.nanopub_update_listener = run_update app = self @self.celery.task(base=self.celery.QueueOnce, once={'graceful': True}, retry_backoff=True, retry_jitter=True, autoretry_for=(Exception, ), max_retries=4, bind=True) def run_importer(self, entity_name): entity_name = URIRef(entity_name) print('importing', entity_name) importer = app.find_importer(entity_name) if importer is None: return importer.app = app modified = importer.last_modified(entity_name, app.db, app.nanopub_manager) updated = importer.modified(entity_name) if updated is None: updated = datetime.now(pytz.utc) print("Remote modified:", updated, type(updated), "Local modified:", modified, type(modified)) if modified is None or (updated - modified ).total_seconds() > importer.min_modified: importer.load(entity_name, app.db, app.nanopub_manager) self.run_importer = run_importer self.template_imports = {} if 'template_imports' in self.config: for name, imp in list(self.config['template_imports'].items()): try: m = importlib.import_module(imp) self.template_imports[name] = m except Exception: print( "Error importing module %s into template variable %s." % (imp, name)) raise self.nanopub_manager = NanopublicationManager( self.db.store, Namespace('%s/pub/' % (self.config['lod_prefix'])), self, update_listener=self.nanopub_update_listener) if 'CACHE_TYPE' in self.config: from flask_caching import Cache self.cache = Cache(self) else: self.cache = None
def get_send_file_max_age(self, filename): if self.debug: return 0 else: return Empty.get_send_file_max_age(self, filename)
def configure_extensions(self): Empty.configure_extensions(self) self.celery = Celery(self.name, broker=self.config['CELERY_BROKER_URL'], beat=True) self.celery.conf.update(self.config) app = self self.redis = self.celery.broker_connection().default_channel.client if 'root_path' in self.config: self.root_path = self.config['root_path'] if 'WHYIS_TEMPLATE_DIR' in self.config and app.config[ 'WHYIS_TEMPLATE_DIR'] is not None: my_loader = jinja2.ChoiceLoader([ jinja2.FileSystemLoader(p) for p in self.config['WHYIS_TEMPLATE_DIR'] ] + [app.jinja_loader]) app.jinja_loader = my_loader @self.celery.task def process_resource(service_name, taskid=None): service = self.config['inferencers'][service_name] if is_waiting(service_name): print("Deferring to a later invocation.", service_name) return print(service_name) service.process_graph(app.db) @self.celery.task def process_nanopub(nanopub_uri, service_name, taskid=None): service = self.config['inferencers'][service_name] print(service, nanopub_uri) if app.nanopub_manager.is_current(nanopub_uri): nanopub = app.nanopub_manager.get(nanopub_uri) service.process_graph(nanopub) else: print("Skipping retired nanopub", nanopub_uri) def setup_periodic_task(task): @self.celery.task def find_instances(): print("Triggered task", task['name']) for x, in task['service'].getInstances(app.db): task['do'](x) @self.celery.task def do_task(uri): print("Running task", task['name'], 'on', uri) resource = app.get_resource(uri) # result never used task['service'].process_graph(resource.graph) task['service'].app = app task['find_instances'] = find_instances task['do'] = do_task return task app.inference_tasks = [] if 'inference_tasks' in self.config: app.inference_tasks = [ setup_periodic_task(task) for task in self.config['inference_tasks'] ] for name, task in list(self.config['inferencers'].items()): task.app = app for task in app.inference_tasks: if 'schedule' in task: #print "Scheduling task", task['name'], task['schedule'] self.celery.add_periodic_task(crontab(**task['schedule']), task['find_instances'].s(), name=task['name']) else: task['find_instances'].delay() @self.celery.task() def update(nanopub_uri): '''gets called whenever there is a change in the knowledge graph. Performs a breadth-first knowledge expansion of the current change.''' #print "Updating on", nanopub_uri if not app.nanopub_manager.is_current(nanopub_uri): print("Skipping retired nanopub", nanopub_uri) return nanopub = app.nanopub_manager.get(nanopub_uri) nanopub_graph = ConjunctiveGraph(nanopub.store) if 'inferencers' in self.config: for name, service in list(self.config['inferencers'].items()): service.app = self if service.query_predicate == self.NS.whyis.updateChangeQuery: #print "checking", name, nanopub_uri, service.get_query() if service.getInstances(nanopub_graph): print("invoking", name, nanopub_uri) process_nanopub.apply_async(kwargs={ 'nanopub_uri': nanopub_uri, 'service_name': name }, priority=1) for name, service in list(self.config['inferencers'].items()): service.app = self if service.query_predicate == self.NS.whyis.globalChangeQuery and not is_running_waiting( name): #print "checking", name, service.get_query() process_resource.apply_async( kwargs={'service_name': name}, priority=5) def run_update(nanopub_uri): update.apply_async(args=[nanopub_uri], priority=9) self.nanopub_update_listener = run_update app = self @self.celery.task(retry_backoff=True, retry_jitter=True, autoretry_for=(Exception, ), max_retries=4, bind=True) def run_importer(self, entity_name): entity_name = URIRef(entity_name) counter = app.redis.incr("import__" + entity_name) if counter > 1: return print('importing', entity_name) importer = app.find_importer(entity_name) if importer is None: return importer.app = app modified = importer.last_modified(entity_name, app.db, app.nanopub_manager) updated = importer.modified(entity_name) if updated is None: updated = datetime.now(pytz.utc) print("Remote modified:", updated, type(updated), "Local modified:", modified, type(modified)) if modified is None or (updated - modified ).total_seconds() > importer.min_modified: importer.load(entity_name, app.db, app.nanopub_manager) app.redis.set("import__" + entity_name, 0) self.run_importer = run_importer self.template_imports = {} if 'template_imports' in self.config: for name, imp in list(self.config['template_imports'].items()): try: m = importlib.import_module(imp) self.template_imports[name] = m except Exception: print( "Error importing module %s into template variable %s." % (imp, name)) raise self.nanopub_manager = NanopublicationManager( self.db.store, Namespace('%s/pub/' % (self.config['lod_prefix'])), self, update_listener=self.nanopub_update_listener)