def should_report_invalid_json( validation_results_factory_mock: MagicMock) -> None: # Given metadata_url = any_s3_url() url_reader = MockJSONURLReader({metadata_url: StringIO(initial_value="{")}) validator = STACDatasetValidator(url_reader, validation_results_factory_mock) # When with raises(JSONDecodeError): validator.validate(metadata_url) # Then assert validation_results_factory_mock.mock_calls == [ call.save( metadata_url, Check.JSON_PARSE, ValidationResult.FAILED, details={ "message": "Expecting property name enclosed in double quotes:" " line 1 column 2 (char 1)" }, ), ]
def should_collect_assets_from_validated_collection_metadata_files( subtests: SubTests) -> None: # Given one asset in another directory and one relative link base_url = any_s3_url() metadata_url = f"{base_url}/{any_safe_filename()}" stac_object = deepcopy(MINIMAL_VALID_STAC_COLLECTION_OBJECT) first_asset_url = f"{base_url}/{any_safe_filename()}/{any_safe_filename()}" first_asset_multihash = any_hex_multihash() second_asset_filename = any_safe_filename() second_asset_url = f"{base_url}/{second_asset_filename}" second_asset_multihash = any_hex_multihash() stac_object["assets"] = { any_asset_name(): { "href": first_asset_url, "file:checksum": first_asset_multihash }, any_asset_name(): { "href": second_asset_filename, "file:checksum": second_asset_multihash, }, } expected_assets = [ { "multihash": first_asset_multihash, "url": first_asset_url }, { "multihash": second_asset_multihash, "url": second_asset_url }, ] expected_metadata = [{"url": metadata_url}] url_reader = MockJSONURLReader({metadata_url: stac_object}) with patch( "backend.check_stac_metadata.utils.processing_assets_model_with_meta" ): validator = STACDatasetValidator(url_reader, MockValidationResultFactory()) # When validator.validate(metadata_url) # Then with subtests.test(): assert _sort_assets( validator.dataset_assets) == _sort_assets(expected_assets) with subtests.test(): assert validator.dataset_metadata == expected_metadata
def should_collect_assets_from_validated_item_metadata_files(subtests: SubTests) -> None: base_url = any_s3_url() metadata_url = f"{base_url}/{any_safe_filename()}" stac_object = deepcopy(MINIMAL_VALID_STAC_ITEM_OBJECT) first_asset_url = f"{base_url}/{any_safe_filename()}" first_asset_multihash = any_hex_multihash() second_asset_filename = any_safe_filename() second_asset_multihash = any_hex_multihash() stac_object[STAC_ASSETS_KEY] = { any_asset_name(): { STAC_HREF_KEY: first_asset_url, STAC_FILE_CHECKSUM_KEY: first_asset_multihash, }, any_asset_name(): { STAC_HREF_KEY: second_asset_filename, STAC_FILE_CHECKSUM_KEY: second_asset_multihash, }, } expected_assets = [ { PROCESSING_ASSET_MULTIHASH_KEY: first_asset_multihash, PROCESSING_ASSET_URL_KEY: first_asset_url, }, { PROCESSING_ASSET_MULTIHASH_KEY: second_asset_multihash, PROCESSING_ASSET_URL_KEY: f"{base_url}/{second_asset_filename}", }, ] expected_metadata = [{PROCESSING_ASSET_URL_KEY: metadata_url}] url_reader = MockJSONURLReader({metadata_url: stac_object}) with patch("backend.check_stac_metadata.utils.processing_assets_model_with_meta"): validator = STACDatasetValidator(url_reader, MockValidationResultFactory()) validator.validate(metadata_url) with subtests.test(): assert _sort_assets(validator.dataset_assets) == _sort_assets(expected_assets) with subtests.test(): assert validator.dataset_metadata == expected_metadata