def test_format_is_passed_to_data_json(self, data_json, *_): read_datajson(self.task, whitelist=True) self.node.catalog_format = 'xlsx' index_catalog(self.node, self.mgmt_task) self.assertEqual(data_json.call_args[1]['catalog_format'], self.node.catalog_format)
def test_first_time_distribution_indexable(self, *_): read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True, ) distribution = Distribution.objects.get(identifier='212.1') self.assertEqual(distribution.enhanced_meta.get(key=meta_keys.CHANGED).value, 'True')
def test_error_distribution_logs(self, *_): catalog = os.path.join(SAMPLES_DIR, 'distribution_missing_downloadurl.json') self.node.catalog_url = catalog self.node.save() read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True) self.assertGreater(len(ReadDataJsonTask.objects.get(id=self.task.id).logs), 10)
def read_datajson(task, read_local=False, force=False): """Tarea raíz de indexación. Itera sobre todos los nodos indexables (federados) e inicia la tarea de indexación sobre cada uno de ellos """ nodes = Node.objects.filter(indexable=True) task.status = task.RUNNING for node in nodes: index_catalog(node, task, read_local, force)
def test_index_same_series_different_catalogs(self, *_): read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True) read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True) count = Field.objects.filter(identifier='212.1_PSCIOS_ERN_0_0_25').count() self.assertEqual(count, 1)
def test_dont_index_same_distribution_twice(self, *_): read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True) read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True) distribution = Distribution.objects.get(identifier='212.1') # La distribucion es marcada como no indexable hasta que cambien sus datos self.assertEqual(distribution.enhanced_meta.get(key=meta_keys.CHANGED).value, 'False')
def test_custom_validation_options(self, *_): # Fallarán todas las validaciones config = DistributionValidatorConfig.get_solo() config.max_field_title_len = 0 config.save() read_datajson(self.task, whitelist=True) index_catalog(self.node, self.mgmt_task) distribution = Distribution.objects.get(identifier='212.1') self.assertTrue(distribution.error)
def test_index_same_distribution_if_data_changed(self, *_): read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True, ) new_catalog = os.path.join(SAMPLES_DIR, 'full_ts_data_changed.json') self.node.catalog_url = new_catalog self.node.save() read_datajson(self.task, whitelist=True, read_local=True) index_catalog(self.node, self.mgmt_task, read_local=True) distribution = Distribution.objects.get(identifier='212.1') # La distribución fue indexada nuevamente, está marcada como indexable self.assertEqual(distribution.enhanced_meta.get(key=meta_keys.CHANGED).value, 'True')
def test_index_YYYY_distribution(self, *_): catalog = os.path.join(SAMPLES_DIR, 'single_data_yyyy.json') self.node.catalog_url = catalog self.node.save() read_datajson(self.task, whitelist=True) index_catalog(self.node, self.mgmt_task) distribution = Distribution.objects.get(identifier='102.1') self.assertEqual( distribution.enhanced_meta.get(key=meta_keys.CHANGED).value, 'True')
def test_significant_figures(self, *_): Catalog.objects.all().delete() catalog = os.path.join(SAMPLES_DIR, 'ipc_data.json') self.node.catalog_url = catalog self.node.save() read_datajson(self.task, whitelist=True) index_catalog(self.node, self.mgmt_task) field = Field.objects.get( identifier='serie_inflacion') # Sacado del data.json self.assertEqual( field.enhanced_meta.get(key='significant_figures').value, '4')