def handle(self, domain, case_type, data_source_ids, **options): configs = [] for data_source_id in data_source_ids: config, _ = get_datasource_config(data_source_id, domain) assert config.asynchronous assert config.referenced_doc_type == CASE_DOC_TYPE configs.append(config) fake_change_doc = {'doc_type': CASE_DOC_TYPE, 'domain': domain} doc_store = get_document_store(domain, CASE_DOC_TYPE) case_accessor = doc_store.case_accessors case_ids = case_accessor.get_case_ids_in_domain(type=case_type) num_case_ids = len(case_ids) print("inserting %d docs" % num_case_ids) for config in configs: adapter = get_indicator_adapter(config, can_handle_laboratory=True) adapter.build_table() # normally called after rebuilding finishes adapter.after_table_build() config_ids = [config._id for config in configs] for i, case_id in enumerate(case_ids): change = FakeChange(case_id, fake_change_doc) AsyncIndicator.update_indicators(change, config_ids) if i % 1000 == 0: print("inserted %d / %d docs" % (i, num_case_ids))
def process_change(self, pillow_instance, change): self.bootstrap_if_needed() if change.deleted: # we don't currently support hard-deletions at all. # we may want to change this at some later date but seem ok for now. # see https://github.com/dimagi/commcare-hq/pull/6944 for rationale return domain = change.metadata.domain if not domain: # if no domain we won't save to any UCR table return async_tables = [] doc = change.get_document() ensure_document_exists(change) ensure_matched_revisions(change) if doc is None: return eval_context = EvaluationContext(doc) for table in self.table_adapters_by_domain[domain]: if table.config.filter(doc): if table.run_asynchronous: async_tables.append(table.config._id) else: self._save_doc_to_table(table, doc, eval_context) eval_context.reset_iteration() elif table.config.deleted_filter(doc): table.delete(doc) if async_tables: AsyncIndicator.update_indicators(change, async_tables)
def handle(self, domain, case_type, data_source_ids, **options): configs = [] for data_source_id in data_source_ids: config, _ = get_datasource_config(data_source_id, domain) assert config.asynchronous assert config.referenced_doc_type == CASE_DOC_TYPE configs.append(config) fake_change_doc = {'doc_type': CASE_DOC_TYPE, 'domain': domain} for config in configs: adapter = get_indicator_adapter(config, can_handle_laboratory=True) adapter.build_table() # normally called after rebuilding finishes adapter.after_table_build() self.domain = domain self.case_type = case_type config_ids = [config._id for config in configs] for case_id in self._get_case_ids_to_process(): change = FakeChange(case_id, fake_change_doc) AsyncIndicator.update_indicators(change, config_ids) for config in configs: if not config.is_static: config.meta.build.rebuilt_asynchronously = True config.save()
def handle(self, *args, **options): fake_change_doc = {'doc_type': CASE_DOC_TYPE, 'domain': DOMAIN} for data_source_id in DATA_SOURCES: print("processing data source %s" % data_source_id) data_source, is_static = get_datasource_config( data_source_id, DOMAIN) assert is_static adapter = get_indicator_adapter(data_source) table = adapter.get_table() for case_id in self._get_case_ids_to_process( adapter, table, data_source_id): change = FakeChange(case_id, fake_change_doc) AsyncIndicator.update_indicators(change, [data_source_id])
def process_change(self, pillow_instance, change): self.bootstrap_if_needed() domain = change.metadata.domain if not domain or domain not in self.table_adapters_by_domain: # if no domain we won't save to any UCR table return if change.deleted: adapters = list(self.table_adapters_by_domain[domain]) for table in adapters: table.delete({'_id': change.metadata.document_id}) async_tables = [] doc = change.get_document() ensure_document_exists(change) ensure_matched_revisions(change) if doc is None: return with TimingContext() as timer: eval_context = EvaluationContext(doc) # make copy to avoid modifying list during iteration adapters = list(self.table_adapters_by_domain[domain]) for table in adapters: if table.config.filter(doc): if table.run_asynchronous: async_tables.append(table.config._id) else: self._save_doc_to_table(domain, table, doc, eval_context) eval_context.reset_iteration() elif table.config.deleted_filter(doc) or table.doc_exists(doc): table.delete(doc) if async_tables: AsyncIndicator.update_indicators(change, async_tables) self.domain_timing_context.update(**{domain: timer.duration})