def test_deactivate_does_not_handle_activation_by_default(self): old_data_table = DataTable.new_data_table(self.table) old_data_table.activate() new_data_table = DataTable.new_data_table(self.table) new_data_table.activate() new_data_table.deactivate() old_data_table.refresh_from_db() new_data_table.refresh_from_db() assert old_data_table.active is False assert new_data_table.active is False
def test_activate_data_table_updates_previous_active_as_inactive(self): old_data_table = DataTable.new_data_table(self.table) old_data_table.activate() new_data_table = DataTable.new_data_table(self.table) new_data_table.activate() old_data_table.refresh_from_db() new_data_table.refresh_from_db() assert old_data_table.active is False assert new_data_table.active is True assert self.table.get_model.called is False
def test_activate_data_table_updates_previous_active_as_inactive_and_delete_table_if_flagged(self): old_data_table = DataTable.new_data_table(self.table) old_data_table.activate() new_data_table = DataTable.new_data_table(self.table) new_data_table.activate(drop_inactive_table=True) old_data_table.refresh_from_db() new_data_table.refresh_from_db() assert old_data_table.active is False assert new_data_table.active is True self.table.get_model.assert_called_once_with(cache=False, data_table=old_data_table) Model = self.table.get_model(cache=False, data_table=old_data_table) Model.delete_table.assert_called_once_with()
def test_deactivate_activates_most_recent_if_flag(self): oldest_data_table = DataTable.new_data_table(self.table) oldest_data_table.activate() old_data_table = DataTable.new_data_table(self.table) old_data_table.activate() new_data_table = DataTable.new_data_table(self.table) new_data_table.activate() new_data_table.deactivate(activate_most_recent=True) oldest_data_table.refresh_from_db() old_data_table.refresh_from_db() new_data_table.refresh_from_db() assert oldest_data_table.active is False assert new_data_table.active is False assert old_data_table.active is True
def test_activate_data_table(self): data_table = DataTable.new_data_table(self.table) data_table.activate() data_table.refresh_from_db() assert data_table.active is True
def setUpTestData(cls): cls.validate_config() Dataset.objects.filter(slug=cls.DATASET_SLUG).delete() cls.dataset = baker.make(Dataset, slug=cls.DATASET_SLUG, show=True) cls.version = baker.make(Version, dataset=cls.dataset) cls.table = baker.make( "core.Table", dataset=cls.dataset, name=cls.TABLE_NAME, version=cls.version, ) cls.data_table = DataTable.new_data_table(cls.table) cls.data_table.activate() for f_kwargs in [deepcopy(k) for k in cls.FIELDS_KWARGS]: f_kwargs["frontend_filter"] = bool(f_kwargs.pop("filtering", None)) f_kwargs["has_choices"] = "choices" in f_kwargs baker.make("core.Field", dataset=cls.dataset, table=cls.table, **f_kwargs) cls.TableModel = cls.table.get_model(cache=False) try: cls.TableModel.delete_table() except ProgrammingError: # Does not exist pass finally: cls.TableModel.create_table(indexes=False)
def handle(self, *args, **kwargs): truncate = kwargs.get("truncate", False) update_functions = [ (Dataset, dataset_update_data), (Link, link_update_data), (Version, version_update_data), (Table, table_update_data), (Field, field_update_data), ] data_tables_map = {} for table in Table.with_hidden.all(): key = (table.dataset.slug, table.name) data_tables_map[key] = table.data_table if truncate: print("Deleting metadata to create new objects...") for Model, _ in update_functions: Model.objects.all().delete() else: print("WARNING: updating data only. If some field was removed " "this change will not be reflected on your database. " "Consider using --truncate") self.datasets, self.tables, self.versions = {}, {}, {} response_data = http_get(settings.DATA_URL, 5) if response_data is None: raise RuntimeError(f"Cannot download {settings.DATA_URL}") for Model, update_data_function in update_functions: table = rows.import_from_xlsx(io.BytesIO(response_data), sheet_name=Model.__name__, workbook_kwargs={"read_only": False}) self._update_data(Model, table, update_data_function) print("Updating DataTable...", end="", flush=True) total_created, total_updated, total_skipped = 0, 0, 0 for table in Table.with_hidden.select_related("dataset"): key = (table.dataset.slug, table.name) data_table = data_tables_map.get(key, None) if data_table is None: # create DataTable if new Table or if previous was None data_table = DataTable.new_data_table(table, suffix_size=0) data_table.activate() total_created += 1 elif data_table.table != table: # Tables were truncated so previous DataTables get updated total_updated += 1 data_table.table = table data_table.save() else: # Same table as before, so no need to update total_skipped += 1 if table.filtering_fields: # avoid None table.fields.filter(name__in=table.filtering_fields).update( frontend_filter=True) if table.search_fields: table.fields.filter(name__in=table.search_fields).update( searchable=True) print(" created: {}, updated: {}, skipped: {}.".format( total_created, total_updated, total_skipped))
def test_new_datatable_without_table_name_suffix(self): data_table = DataTable.new_data_table(self.table, suffix_size=0) splited_name = data_table.db_table_name.split("_") assert not data_table.id # creates an instance but doesn't save it in the DB assert data_table.table == self.table assert data_table.active is False assert len(splited_name) == 3 # data + ds slug + table assert splited_name[0] == "data" assert splited_name[1] == "dsslug" assert splited_name[2] == "tablename"
def setUpTestData(cls): Dataset.objects.filter(slug=cls.DATASET_SLUG).delete() cls.covid19 = baker.make(Dataset, slug=cls.DATASET_SLUG) cls.cases_table = baker.make(Table, dataset=cls.covid19, name=cls.CASES_TABLE_NAME) cls.data_table = DataTable.new_data_table(cls.cases_table) cls.data_table.activate() for f_kwargs in cls.FIELDS_KWARGS: baker.make("core.Field", dataset=cls.covid19, table=cls.cases_table, **f_kwargs) cls.Covid19Cases = cls.cases_table.get_model(cache=False) try: cls.Covid19Cases.delete_table() except ProgrammingError: # Does not exist pass finally: cls.Covid19Cases.create_table(create_indexes=False)
def setUpTestData(cls): cls.validate_config() Dataset.objects.filter(slug=cls.DATASET_SLUG).delete() cls.dataset = baker.make(Dataset, slug=cls.DATASET_SLUG) cls.version = baker.make(Version, dataset=cls.dataset) cls.table = baker.make("core.Table", dataset=cls.dataset, name=cls.TABLE_NAME, version=cls.version) cls.data_table = DataTable.new_data_table(cls.table) cls.data_table.activate() for f_kwargs in cls.FIELDS_KWARGS: baker.make("core.Field", dataset=cls.dataset, table=cls.table, **f_kwargs) cls.TableModel = cls.table.get_model(cache=False) try: cls.TableModel.delete_table() except ProgrammingError: # Does not exist pass finally: cls.TableModel.create_table(indexes=False)
def execute(cls, dataset_slug, tablename, filename, **options): table = Table.with_hidden.for_dataset(dataset_slug).named(tablename) self = cls(table, **options) data_table = DataTable.new_data_table( self.table) # in memory instance, not persisted in the DB Model = self.refresh_model_table(data_table) if self.flag_import_data: self.log(f"Importing data to new table {data_table.db_table_name}") self.import_data(filename, Model) # Vaccum and concurrent index creation cannot run inside a transaction block if self.flag_create_filter_indexes: self.create_filter_indexes(Model) if self.flag_vacuum: self.run_vacuum(Model) try: with transaction.atomic(): if self.flag_fill_choices: self.fill_choices(Model, data_table) if self.table.data_table is not None: self.table.data_table.deactivate( drop_table=self.flag_delete_old_table) data_table.activate() self.table.refresh_from_db() # To have data_table filled except Exception as e: self.log( f"Deleting import table {data_table.db_table_name} due to an error." ) data_table.delete_data_table() raise e if self.flag_clear_view_cache: self.log("Clearing view and table caches...") self.table.invalidate_cache() cache.clear()