def rebuild_indicators(indicator_config_id): is_static = indicator_config_id.startswith(StaticDataSourceConfiguration._datasource_id_prefix) if is_static: config = StaticDataSourceConfiguration.by_id(indicator_config_id) else: config = DataSourceConfiguration.get(indicator_config_id) # Save the start time now in case anything goes wrong. This way we'll be # able to see if the rebuild started a long time ago without finishing. config.meta.build.initiated = datetime.datetime.utcnow() config.save() adapter = IndicatorSqlAdapter(config) adapter.rebuild_table() couchdb = _get_db(config.referenced_doc_type) relevant_ids = get_doc_ids(config.domain, config.referenced_doc_type, database=couchdb) for doc in iter_docs(couchdb, relevant_ids, chunksize=500): try: # save is a noop if the filter doesn't match adapter.save(doc) except DataError as e: logging.exception("problem saving document {} to table. {}".format(doc["_id"], e)) if not is_static: config.meta.build.finished = True config.save()
def handle(self, file_path, *args, **options): domain = 'icds-cas' data_source_id = StaticDataSourceConfiguration.get_doc_id(domain, PERSON_TABLE_ID) config = StaticDataSourceConfiguration.by_id(data_source_id) adapter = get_indicator_adapter(config) session_helper = connection_manager.get_session_helper(adapter.engine_id) person_table_name = get_table_name(domain, PERSON_TABLE_ID) awc_location_table_name = get_table_name(domain, AWC_LOCATION_TABLE_ID) session = session_helper.Session with open( os.path.join(os.path.dirname(__file__), 'sql_scripts', 'nos_of_deaths.sql'), encoding='utf-8' ) as f: sql_script = f.read() rows = session.execute( sql_script % { 'person_table_name': person_table_name, 'awc_location_table_name': awc_location_table_name } ) with open(file_path, 'w', encoding='utf-8') as file_object: writer = csv.writer(file_object) writer.writerow([ 'State', 'District', 'AWC', 'Month', 'Deaths', ]) writer.writerows(rows)
def _init_table(cls, data_source_id): datasource_id = StaticDataSourceConfiguration.get_doc_id(cls.domain, data_source_id) datasource = StaticDataSourceConfiguration.by_id(datasource_id) adapter = get_indicator_adapter(datasource) adapter.build_table() cls.adapters.append(adapter) return adapter
def handle(self, domain, file_path, *args, **options): data_source_id = StaticDataSourceConfiguration.get_doc_id( domain, PERSON_TABLE_ID) config = StaticDataSourceConfiguration.by_id(data_source_id) adapter = get_indicator_adapter(config) session_helper = connection_manager.get_session_helper( adapter.engine_id) person_table_name = get_table_name(domain, PERSON_TABLE_ID) awc_location_table_name = get_table_name(domain, AWC_LOCATION_TABLE_ID) session = session_helper.Session with open(os.path.join(os.path.dirname(__file__), 'sql_scripts', 'select_non_aadhar.sql'), encoding='utf-8') as f: sql_script = f.read() rows = session.execute( sql_script % { 'person_table_name': person_table_name, 'awc_location_table_name': awc_location_table_name }) with open(file_path, 'wb') as file_object: writer = csv.writer(file_object) writer.writerow([ 'Name of Beneficiary', 'Date of Birth', 'AWC', 'Block', 'District', 'State' ]) writer.writerows(rows)
def copy_ucr_datasources(self): datasource_map = {} datasources = get_datasources_for_domain(self.existing_domain) for datasource in datasources: datasource.meta.build.finished = False datasource.meta.build.initiated = None old_id, new_id = self.save_couch_copy(datasource, self.new_domain) datasource_map[old_id] = new_id for static_datasource in StaticDataSourceConfiguration.by_domain(self.existing_domain): table_id = static_datasource.get_id.replace( StaticDataSourceConfiguration._datasource_id_prefix + self.existing_domain + "-", "" ) new_id = StaticDataSourceConfiguration.get_doc_id(self.new_domain, table_id) # check that new datasource is in new domain's list of static datasources StaticDataSourceConfiguration.by_id(new_id) datasource_map[static_datasource.get_id] = new_id return datasource_map
def test_doc_filter_mapping(self, doc_type, data_source_id): config_id = StaticDataSourceConfiguration.get_doc_id( DASHBOARD_DOMAIN, data_source_id) config = StaticDataSourceConfiguration.by_id(config_id) doc_filters = UCR_MAPPING[doc_type][data_source_id] self.assertEqual(doc_type, config.referenced_doc_type) self.assertEqual(set(doc_filters), set(config.get_case_type_or_xmlns_filter()))
def copy_ucr_datasources(self): datasource_map = {} datasources = get_datasources_for_domain(self.existing_domain) for datasource in datasources: datasource.meta.build.finished = False datasource.meta.build.initiated = None old_id, new_id = self.save_couch_copy(datasource, self.new_domain) datasource_map[old_id] = new_id for static_datasource in StaticDataSourceConfiguration.by_domain(self.existing_domain): table_id = static_datasource.get_id.replace( StaticDataSourceConfiguration._datasource_id_prefix + self.existing_domain + '-', '' ) new_id = StaticDataSourceConfiguration.get_doc_id(self.new_domain, table_id) # check that new datasource is in new domain's list of static datasources StaticDataSourceConfiguration.by_id(new_id) datasource_map[static_datasource.get_id] = new_id return datasource_map
def get_ucr_datasource_config_by_id(indicator_config_id, allow_deleted=False): from corehq.apps.userreports.models import ( id_is_static, StaticDataSourceConfiguration, DataSourceConfiguration, ) if id_is_static(indicator_config_id): return StaticDataSourceConfiguration.by_id(indicator_config_id) else: doc = DataSourceConfiguration.get_db().get(indicator_config_id) return _wrap_data_source_by_doc_type(doc, allow_deleted)
def rebuild_indicators(indicator_config_id): is_static = indicator_config_id.startswith(StaticDataSourceConfiguration._datasource_id_prefix) if is_static: config = StaticDataSourceConfiguration.by_id(indicator_config_id) rev = 'static' else: config = DataSourceConfiguration.get(indicator_config_id) rev = config._rev # Save the start time now in case anything goes wrong. This way we'll be # able to see if the rebuild started a long time ago without finishing. config.meta.build.initiated = datetime.datetime.utcnow() config.save() adapter = IndicatorSqlAdapter(config) couchdb = _get_db(config.referenced_doc_type) client = get_redis_client().client.get_client() redis_key = 'ucr_queue-{}:{}'.format(indicator_config_id, rev) if len(client.smembers(redis_key)) > 0: relevant_ids = client.smembers(redis_key) else: adapter.rebuild_table() relevant_ids = get_doc_ids_in_domain_by_type(config.domain, config.referenced_doc_type, database=couchdb) if relevant_ids: client.sadd(redis_key, *relevant_ids) for doc in iter_docs(couchdb, relevant_ids, chunksize=500): try: # save is a noop if the filter doesn't match adapter.save(doc) client.srem(redis_key, doc.get('_id')) except DataError as e: logging.exception('problem saving document {} to table. {}'.format(doc['_id'], e)) if not is_static: client.delete(redis_key) config.meta.build.finished = True config.save()
def compute_awws_in_vhnd_timeframe(domain): """ This computes awws with vhsnd_date_past_month less than 37 days. Result is cached in local memory, so that indvidual reminder tasks per AWW/LS dont hit the database each time """ table = get_table_name(domain, 'static-vhnd_form') query = """ SELECT DISTINCT awc_id FROM "{table}" WHERE vhsnd_date_past_month > %(37_days_ago)s """.format(table=table) cutoff = datetime.now(tz=pytz.timezone('Asia/Kolkata')).date() query_params = {"37_days_ago": cutoff - timedelta(days=37)} datasource_id = StaticDataSourceConfiguration.get_doc_id(domain, 'static-vhnd_form') data_source = StaticDataSourceConfiguration.by_id(datasource_id) django_db = connection_manager.get_django_db_alias(data_source.engine_id) with connections[django_db].cursor() as cursor: cursor.execute(query, query_params) return {row[0] for row in cursor.fetchall()}
def _get_config_by_id(indicator_config_id): if id_is_static(indicator_config_id): return StaticDataSourceConfiguration.by_id(indicator_config_id) else: return DataSourceConfiguration.get(indicator_config_id)
def get_asha_table(self, metadata): config = StaticDataSourceConfiguration.by_id( StaticDataSourceConfiguration.get_doc_id(DOMAIN, TABLE_ID) ) return get_indicator_table(config, custom_metadata=metadata)
def _get_adapter(cls, data_source_id): datasource_id = StaticDataSourceConfiguration.get_doc_id(cls.domain, data_source_id) datasource = StaticDataSourceConfiguration.by_id(datasource_id) return get_indicator_adapter(datasource)
def setup_datasource(cls): datasource_id = StaticDataSourceConfiguration.get_doc_id(cls.domain, 'static-vhnd_form') data_source = StaticDataSourceConfiguration.by_id(datasource_id) cls.adapter = get_indicator_adapter(data_source) cls.adapter.rebuild_table()
def _get_adapter(cls, data_source_id): datasource_id = StaticDataSourceConfiguration.get_doc_id( cls.domain, data_source_id) datasource = StaticDataSourceConfiguration.by_id(datasource_id) return get_indicator_adapter(datasource)
def _get_config_by_id(indicator_config_id): if is_static(indicator_config_id): return StaticDataSourceConfiguration.by_id(indicator_config_id) else: return DataSourceConfiguration.get(indicator_config_id)
def get_asha_table_name(self): config = StaticDataSourceConfiguration.by_id( StaticDataSourceConfiguration.get_doc_id(DOMAIN, TABLE_ID) ) return get_table_name(config.domain, config.table_id)
def get_asha_table(self, metadata): config = StaticDataSourceConfiguration.by_id( StaticDataSourceConfiguration.get_doc_id(DOMAIN, TABLE_ID) ) return get_indicator_table(config, custom_metadata=metadata)
def get_asha_table_name(self): config = StaticDataSourceConfiguration.by_id( StaticDataSourceConfiguration.get_doc_id(DOMAIN, TABLE_ID)) return get_table_name(config.domain, config.table_id)
def __init__(self, domain): self.datasource = StaticDataSourceConfiguration.by_id( "static-{}-adherence".format(domain)) self.adapter = get_indicator_adapter(self.datasource) self.es = self.adapter.get_query_object().es