def validate() -> bool: """Check local files against schema.""" errors = [] try: validate_context() except ValidationError as e: errors.append(e) errors.extend(validate_local_state()) errors_by_severity = defaultdict(list) for error in errors: errors_by_severity[error.severity].append(error) if len(errors_by_severity[ValidationErrorSeverity.WARNING]) > 0: echo_warnings(errors_by_severity[ValidationErrorSeverity.WARNING]) echo_info('') if len(errors_by_severity[ValidationErrorSeverity.ERROR]) > 0: echo_errors(errors_by_severity[ValidationErrorSeverity.ERROR]) return False echo_info("Success: All files are valid.") return True
def test_validate_local_state_duplicate_dataset_scoped_field( tmp_path, monkeypatch, invalid_field): monkeypatch.chdir(tmp_path) dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(VALID_DATASET)) with (dataset_dir / 'test_model.model.yaml').open('w') as f: f.write( yaml.dump({ **VALID_MODEL_MINIMAL, 'fields': [{ 'field_map': ['field_slug'], 'data_reference': '"FIELD_SLUG"' }] })) field_dir = Paths.fields_dir(dataset_dir) field_dir.mkdir() with (field_dir / 'first_field.field.yaml').open('w') as f: f.write(yaml.dump(VALID_FIELD_MINIMAL)) with (field_dir / 'duplicate.field.yaml').open('w') as f: f.write(yaml.dump(VALID_FIELD_MINIMAL)) errors = validate_local_state() assert len(errors) == 1
def test_validate_local_state_missing_field_file(tmp_path, monkeypatch): monkeypatch.chdir(tmp_path) dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(VALID_DATASET)) with (dataset_dir / 'model1.model.yaml').open('w') as f: f.write( yaml.dump({ **VALID_MODEL_MINIMAL, 'fields': [{ 'data_reference': '"COLUMN1"', 'field_map': ['field_slug', 'field_slug_2'] }], })) field_dir = Paths.fields_dir(dataset_dir) field_dir.mkdir() with (field_dir / 'field_slug.field.yaml').open('w') as f: f.write(yaml.dump(VALID_FIELD_MINIMAL)) errors = validate_local_state() assert errors == [ MissingFieldFileError( field_slug='field_slug_2', dataset_slug='test_dataset', data_reference='"COLUMN1"', identifier=False, model_name='model1', ) ]
def test_validate_local_state_invalid_dataset(tmp_path, monkeypatch, dataset): monkeypatch.chdir(tmp_path) dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(dataset)) errors = validate_local_state() assert len(errors) == 1
def test_validate_local_state_invalid_company_scoped_field( tmp_path, monkeypatch, invalid_field): monkeypatch.chdir(tmp_path) global_field_dir = tmp_path / 'fields' global_field_dir.mkdir() with (global_field_dir / 'a_field.field.yaml').open('w') as f: f.write(yaml.dump(invalid_field)) errors = validate_local_state() assert len(errors) == 1
def test_validate_local_state_duplicate_model_names(tmp_path, monkeypatch): monkeypatch.chdir(tmp_path) dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(VALID_DATASET)) with (dataset_dir / 'test_model-1.model.yaml').open('w') as f: f.write(yaml.dump(VALID_MODEL_MINIMAL)) with (dataset_dir / 'test_model-2.model.yaml').open('w') as f: f.write(yaml.dump(VALID_MODEL_MINIMAL)) errors = validate_local_state() assert len(errors) == 1
def test_validate_local_state_invalid_dataset_scoped_field( tmp_path, monkeypatch, invalid_field): monkeypatch.chdir(tmp_path) dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(VALID_DATASET)) field_dir = Paths.fields_dir(dataset_dir) field_dir.mkdir() with (field_dir / 'first_field.field.yaml').open('w') as f: f.write(yaml.dump(invalid_field)) errors = validate_local_state() assert len(errors) == 1
def test_validate_local_state_valid(tmp_path, monkeypatch): monkeypatch.chdir(tmp_path) global_fields_dir = Paths.fields_dir(tmp_path) global_fields_dir.mkdir() dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() dataset_fields_dir = Paths.fields_dir(dataset_dir) dataset_fields_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(VALID_DATASET)) model1 = {**VALID_MODEL_MINIMAL, 'model_name': 'sf.db.schema.table1'} model2 = { **VALID_MODEL_MINIMAL, 'model_name': 'sf.db.schema.table2', 'fields': [{ 'field_map': ['field_slug'], 'data_reference': '"FIELD_SLUG"' }], } with (dataset_dir / 'test_model-1.model.yaml').open('w') as f: f.write(yaml.dump(model1)) with (dataset_dir / 'test_model-2.model.yaml').open('w') as f: f.write(yaml.dump(model2)) with (global_fields_dir / 'company_field.field.yaml').open('w') as f: f.write(yaml.dump(VALID_FIELD_FULL)) with (dataset_fields_dir / 'first_field.field.yaml').open('w') as f: f.write(yaml.dump(VALID_FIELD_MINIMAL)) errors = validate_local_state() assert len(errors) == 0 state = get_state() assert len(state.models) == 2 assert len(state.data_sources) == 1 assert len(state.fields) == 2
def test_validate_local_state_orphan_field_files(tmp_path, monkeypatch): monkeypatch.chdir(tmp_path) dataset_dir = tmp_path / 'test_dataset' dataset_dir.mkdir() with (dataset_dir / PresetFileName.DATASET_YAML.value).open('w') as f: f.write(yaml.dump(VALID_DATASET)) with (dataset_dir / 'test_model.model.yaml').open('w') as f: f.write( yaml.dump({ **VALID_MODEL_FULL, 'fields': [{ 'field_map': ['field_slug'], 'data_reference': '"FIELD_SLUG"' }], })) Paths.fields_dir(dataset_dir).mkdir() with (Paths.fields_dir(dataset_dir) / 'test_field.field.yaml').open('w') as f: f.write(yaml.dump(VALID_FIELD_MINIMAL)) with (Paths.fields_dir(dataset_dir) / 'calculated_field.field.yaml').open('w') as f: f.write( yaml.dump({ **VALID_FIELD_MINIMAL, 'slug': 'calculated_slug', 'calculation': '2+2' })) with (Paths.fields_dir(dataset_dir) / 'orphan_field.field.yaml').open('w') as f: f.write(yaml.dump({**VALID_FIELD_MINIMAL, 'slug': 'orphan_slug'})) errors = validate_local_state() assert errors == [ OrphanFieldFileError(field_slug='orphan_slug', dataset_slug='test_dataset') ]
def invoke(self, ctx: Context): from panoramic.cli.validate import validate_local_state errors_by_severity: defaultdict = defaultdict(list) for error in validate_local_state(): errors_by_severity[error.severity].append(error) if len(errors_by_severity[ValidationErrorSeverity.WARNING]) > 0: echo_warnings(errors_by_severity[ValidationErrorSeverity.WARNING]) echo_info('') if len(errors_by_severity[ValidationErrorSeverity.ERROR]) > 0: echo_errors(errors_by_severity[ValidationErrorSeverity.ERROR]) sys.exit(1) # preload data for taxonomy and calculate TEL metadata from panoramic.cli.husky.core.taxonomy.getters import Taxonomy Taxonomy.preload_taxons_from_state() Taxonomy.precalculate_tel_metadata() return super().invoke(ctx)