def handle(self, domain, case_type, data_source_ids, **options): configs = [] for data_source_id in data_source_ids: config, _ = get_datasource_config(data_source_id, domain) assert config.asynchronous assert config.referenced_doc_type == CASE_DOC_TYPE configs.append(config) fake_change_doc = {'doc_type': CASE_DOC_TYPE, 'domain': domain} for config in configs: adapter = get_indicator_adapter(config, can_handle_laboratory=True) adapter.build_table() # normally called after rebuilding finishes adapter.after_table_build() self.domain = domain self.case_type = case_type config_ids = [config._id for config in configs] for case_id in self._get_case_ids_to_process(): change = FakeChange(case_id, fake_change_doc) AsyncIndicator.update_from_kafka_change(change, config_ids) for config in configs: if not config.is_static: config.meta.build.rebuilt_asynchronously = True config.save()
def _save_ids(self, ids): if self.bulk: AsyncIndicator.bulk_creation(ids, self.referenced_type, self.domain, self.config_ids) else: for id_ in ids: change = FakeChange(id_, self.fake_change_doc) AsyncIndicator.update_from_kafka_change(change, self.config_ids)
def _save_ids(self, ids): if self.bulk: AsyncIndicator.bulk_creation(ids, self.referenced_type, self.domain, self.config_ids) else: for id_ in ids: change = FakeChange(id_, self.fake_change_doc) AsyncIndicator.update_from_kafka_change( change, self.config_ids)
def handle(self, *args, **options): fake_change_doc = {'doc_type': CASE_DOC_TYPE, 'domain': DOMAIN} for data_source_id in DATA_SOURCES: print("processing data source %s" % data_source_id) data_source, is_static = get_datasource_config(data_source_id, DOMAIN) assert is_static adapter = get_indicator_adapter(data_source) table = adapter.get_table() for case_id in self._get_case_ids_to_process(adapter, table, data_source_id): change = FakeChange(case_id, fake_change_doc) AsyncIndicator.update_from_kafka_change(change, [data_source_id])
def process_change(self, change): self.bootstrap_if_needed() domain = change.metadata.domain if not domain or domain not in self.table_adapters_by_domain: # if no domain we won't save to any UCR table return if change.deleted: adapters = list(self.table_adapters_by_domain[domain]) for table in adapters: table.delete({'_id': change.metadata.document_id}) async_tables = [] doc = change.get_document() ensure_document_exists(change) ensure_matched_revisions(change, doc) if doc is None: return with TimingContext() as timer: eval_context = EvaluationContext(doc) # make copy to avoid modifying list during iteration adapters = list(self.table_adapters_by_domain[domain]) doc_subtype = change.metadata.document_subtype for table in adapters: if table.config.filter(doc, eval_context): if table.run_asynchronous: async_tables.append(table.config._id) else: self._save_doc_to_table(domain, table, doc, eval_context) eval_context.reset_iteration() elif (doc_subtype is None or doc_subtype in table.config.get_case_type_or_xmlns_filter()): table.delete(doc) if async_tables: AsyncIndicator.update_from_kafka_change(change, async_tables) self.domain_timing_context.update(**{ domain: timer.duration })
def process_change(self, change): self.bootstrap_if_needed() domain = change.metadata.domain if not domain or domain not in self.table_adapters_by_domain: # if no domain we won't save to any UCR table return if change.deleted: adapters = list(self.table_adapters_by_domain[domain]) for table in adapters: table.delete({'_id': change.metadata.document_id}) async_tables = [] doc = change.get_document() ensure_document_exists(change) ensure_matched_revisions(change, doc) if doc is None: return with TimingContext() as timer: eval_context = EvaluationContext(doc) # make copy to avoid modifying list during iteration adapters = list(self.table_adapters_by_domain[domain]) for table in adapters: if table.config.filter(doc): if table.run_asynchronous: async_tables.append(table.config._id) else: self._save_doc_to_table(domain, table, doc, eval_context) eval_context.reset_iteration() elif table.config.deleted_filter(doc) or table.doc_exists(doc): table.delete(doc) if async_tables: AsyncIndicator.update_from_kafka_change(change, async_tables) self.domain_timing_context.update(**{ domain: timer.duration })