def _rebuild_sql_tables(self, adapters): tables_by_engine = defaultdict(dict) for adapter in adapters: try: tables_by_engine[adapter.engine_id][adapter.get_table().name] = adapter except BadSpecError: _soft_assert = soft_assert(to='{}@{}'.format('jemord', 'dimagi.com')) _soft_assert(False, "Broken data source {}".format(adapter.config.get_id)) _assert = soft_assert(notify_admins=True) _notify_rebuild = lambda msg, obj: _assert(False, msg, obj) for engine_id, table_map in tables_by_engine.items(): table_names = list(table_map) engine = connection_manager.get_engine(engine_id) # Temporary measure necessary during the process of renaming tables # - Configs point to new tables which may just be views and not real tables # - The global metadata contains references to the new table names legacy_tables = {} table_names_for_diff = [] diff_metadata = sqlalchemy.MetaData() with engine.begin() as connection: for table_name in table_names: sql_adapter = table_map[table_name] legacy_table_name = get_legacy_table_name( sql_adapter.config.domain, sql_adapter.config.table_id ) if not table_exists(connection, table_name) and table_exists(connection, legacy_table_name): legacy_tables[legacy_table_name] = table_name pillow_logging.debug("[rebuild] Using legacy table: %s", legacy_table_name) # popultate metadata with the table schema get_indicator_table( sql_adapter.config, metadata=diff_metadata, override_table_name=legacy_table_name ) table_names_for_diff.append(legacy_table_name) else: # popultate metadata with the table schema get_indicator_table(sql_adapter.config, metadata=diff_metadata) table_names_for_diff.append(table_name) diffs = get_table_diffs(engine, table_names_for_diff, diff_metadata) tables_to_act_on = get_tables_rebuild_migrate(diffs, table_names_for_diff) for real_table_name in tables_to_act_on.rebuild: table_name = legacy_tables.get(real_table_name, real_table_name) pillow_logging.debug("[rebuild] Rebuilding table: %s (%s)", real_table_name, table_name) sql_adapter = table_map[table_name] if not sql_adapter.config.is_static: try: self.rebuild_table(sql_adapter) except TableRebuildError as e: _notify_rebuild(six.text_type(e), sql_adapter.config.to_json()) else: self.rebuild_table(sql_adapter) pillow_logging.debug("[rebuild] Application migrations to tables: %s", tables_to_act_on.migrate) migrate_tables(engine, diffs.raw, tables_to_act_on.migrate)
def get_choices(data_source, filter, search_term=None, limit=20): table = get_indicator_table(data_source) sql_column = table.c[filter.name] query = Session.query(sql_column) if search_term: query = query.filter(sql_column.contains(search_term)) return [v[0] for v in query.distinct().limit(limit)]
def _get_all_tables(): session = Session() try: connection = session.connection() tables = [sql.get_indicator_table(config) for config in userreports_models.DataSourceConfiguration.all()] return [t for t in tables if t.exists(bind=connection)] except: session.rollback() raise finally: session.close()
def preview_data_source(request, domain, config_id): config = get_document_or_404(DataSourceConfiguration, domain, config_id) table = get_indicator_table(config) q = Session.query(table) context = _shared_context(domain) context.update({ 'data_source': config, 'columns': q.column_descriptions, 'data': q[:20], }) return render(request, "userreports/preview_data.html", context)
def get_choices(data_source, filter, search_term=None, limit=20, page=0): table = get_indicator_table(data_source) sql_column = table.c[filter.field] query = Session.query(sql_column) if search_term: query = query.filter(sql_column.contains(search_term)) offset = page * limit try: return [v[0] for v in query.distinct().order_by(sql_column).limit(limit).offset(offset)] except ProgrammingError: return []
def preview_data_source(request, domain, config_id): config, is_static = get_datasource_config_or_404(config_id, domain) table = get_indicator_table(config) q = Session.query(table) context = _shared_context(domain) context.update({ 'data_source': config, 'columns': q.column_descriptions, 'data': q[:20], 'total_rows': q.count(), }) return render(request, "userreports/preview_data.html", context)
def _get_all_tables(): session = Session() try: connection = session.connection() tables = [ sql.get_indicator_table(config) for config in userreports_models.DataSourceConfiguration.all() ] return [t for t in tables if t.exists(bind=connection)] except: session.rollback() raise finally: session.close()
def get_choices(data_source, filter, search_term=None, limit=20, page=0): # todo: we may want to log this as soon as mobile UCR stops hitting this # for misconfigured filters if not isinstance(filter, DynamicChoiceListFilter): return [] table = get_indicator_table(data_source) sql_column = table.c[filter.field] query = Session.query(sql_column) if search_term: query = query.filter(sql_column.contains(search_term)) offset = page * limit try: return [v[0] for v in query.distinct().order_by(sql_column).limit(limit).offset(offset)] except ProgrammingError: return []
def get_choices(data_source, filter, search_term=None, limit=20, page=0): # todo: we may want to log this as soon as mobile UCR stops hitting this # for misconfigured filters if not isinstance(filter, DynamicChoiceListFilter): return [] table = get_indicator_table(data_source) sql_column = table.c[filter.field] query = Session.query(sql_column) if search_term: query = query.filter(sql_column.contains(search_term)) offset = page * limit try: return [ v[0] for v in query.distinct().order_by(sql_column).limit( limit).offset(offset) ] except ProgrammingError: return []
def export_data_source(request, domain, config_id): format = request.GET.get('format', Format.UNZIPPED_CSV) config = get_document_or_404(DataSourceConfiguration, domain, config_id) table = get_indicator_table(config) q = Session.query(table) column_headers = [col['name'] for col in q.column_descriptions] # apply filtering if any filter_values = {key: value for key, value in request.GET.items() if key != 'format'} for key in filter_values: if key not in column_headers: return HttpResponse('Invalid filter parameter: {}'.format(key), status=400) q = q.filter_by(**filter_values) # build export def get_table(q): yield column_headers for row in q: yield row fd, path = tempfile.mkstemp() with os.fdopen(fd, 'wb') as temp: export_from_tables([[config.table_id, get_table(q)]], temp, format) return export_response(Temp(path), format, config.display_name)
def get_asha_table(self, metadata): config = StaticDataSourceConfiguration.by_id( StaticDataSourceConfiguration.get_doc_id(DOMAIN, TABLE_ID) ) return get_indicator_table(config, custom_metadata=metadata)