コード例 #1
0
ファイル: test_metadata.py プロジェクト: sixy6e/eo-datasets
def test_ortho_find_file():
    path = write_files({'11': {'LO8BPF20141104220030_20141104224617.01': ''}})
    matched = level1._get_file(path, 'LO8BPF20141104220030_20141104224617.01')
    assert matched is not None
    assert matched == path.joinpath('11',
                                    'LO8BPF20141104220030_20141104224617.01')

    # Not found: mandatory
    with pytest.raises(RuntimeError) as e:
        level1._get_file(path, 'LO8BPF20141104220030_20141104224617.03')
    # Not found: not mandatory.
    matched = level1._get_file(path,
                               'LO8BPF20141104220030_20141104224617.03',
                               mandatory=False)
    assert matched is None

    # Real-world error: ancillary folder inside ancillary folder leads to duplicates.
    path = write_files({
        '11': {
            'LO8BPF20141104220030_20141104224617.01': '',
            '11': {
                'LO8BPF20141104220030_20141104224617.01': ''
            }
        }
    })
    matched = level1._get_file(path, 'LO8BPF20141104220030_20141104224617.01')
    assert matched is not None
    assert matched.name == 'LO8BPF20141104220030_20141104224617.01'
コード例 #2
0
    def test_parse_from_driver(self):
        d = write_files({
            'NPP.VIIRS.11361.ALICE': {
                'RNSCA-RVIRS_npp_d20140106_t0444094_e0451182_'
                'b11361_c20140106045941812000_nfts_drl.h5': ''
            }
        })

        metadata = drivers.RawDriver().fill_metadata(
            ptype.DatasetMetadata(),
            d.joinpath('NPP.VIIRS.11361.ALICE')
        )

        self.assertEqual(metadata.platform.code, 'NPP')
        self.assertEqual(metadata.instrument.name, 'VIIRS')
        self.assertEqual(metadata.ga_level, 'P00')
        self.assertEqual(metadata.format_.name, 'HDF5')

        # Groundstation should be found from surrounding adsfolder.
        self.assertEqual(
            metadata.acquisition.groundstation,
            ptype.GroundstationMetadata(code='ASA')
        )

        self.assertEqual(metadata.acquisition.aos,
                         datetime.datetime(2014, 1, 6, 4, 44, 9))
        self.assertEqual(metadata.acquisition.los,
                         datetime.datetime(2014, 1, 6, 4, 59, 41))

        self.assertEqual(metadata.acquisition.platform_orbit, 11361)
コード例 #3
0
    def test_multi_copy_hardlink(self):
        # Copy two files.
        test_path = write_files({
            'source_dir': {
                'LC81010782014285LGN00_B6.img': 'test',
                'LC81010782014285LGN00_B4.tif': 'best'
            }
        })
        source_path = test_path.joinpath('source_dir')
        dest_path = test_path.joinpath('dest_dir')

        package.prepare_target_imagery(source_path,
                                       dest_path,
                                       compress_imagery=False,
                                       hard_link=True)

        # Ensure dest files were created.
        self.assertTrue(dest_path.is_dir())
        dest_file = dest_path.joinpath('LC81010782014285LGN00_B6.img')
        self.assertTrue(dest_file.is_file())
        self.assertTrue(dest_file.stat().st_size, 4)
        dest_file = dest_path.joinpath('LC81010782014285LGN00_B4.tif')
        self.assertTrue(dest_file.is_file())
        self.assertTrue(dest_file.stat().st_size, 4)

        # Source should be untouched.
        source_file = source_path.joinpath('LC81010782014285LGN00_B4.tif')
        self.assertTrue(source_file.is_file())
        self.assertTrue(source_file.stat().st_size, 4)

        # Ensure they were hard linked (share the same inode)
        self.assertEqual(source_file.stat().st_ino, dest_file.stat().st_ino)
コード例 #4
0
    def test_parse_passinfo_ls7_file(self):
        d = write_files({'passinfo': [
            "STATION	ALICE\n",
            "STRING	S1A1C2D3R3\n",
            "PASSID	LANDSAT-7.30486\n",
            "SATELLITE	LANDSAT-7\n",
            "ORBIT	30486\n",
            "LINKID	L\n",
            "BITRATE	150000000\n",
            "SENSOR	ETM\n",
            "START	2005-01-07T02:00:28.000\n",
            "STOP	2005-01-07T02:07:19.000\n",
            "DURATION	528\n",
            "COMMENTS	Bit\n",
            "logfile	acs	acs.log\n",
            "logfile	ref	ref.log\n",
            "logfile	1070030486.eph	1070030486.eph.log\n",
            "logfile	demod	demod.log\n",
            "telemetry	telemetry.data\n",
        ]})
        md = extraction.extract_md(ptype.DatasetMetadata(), d)

        self.assertEqual(md.acquisition.groundstation.code, "ASA")
        self.assertEqual(md.acquisition.platform_orbit, 30486)
        self.assertEqual(md.platform.code, "LANDSAT_7")
        self.assertEqual(md.instrument.name, "ETM")
        self.assertEqual(md.acquisition.aos, datetime.datetime(2005, 1, 7, 2, 0, 28))
        self.assertEqual(md.acquisition.los, datetime.datetime(2005, 1, 7, 2, 7, 19))
コード例 #5
0
def test_create_typical_browse_metadata():
    class TestDriver(drivers.DatasetDriver):
        def browse_image_bands(self, d):
            return '5', '1', '3'

    d = write_files({})
    dataset = browseimage.create_typical_browse_metadata(
        TestDriver(), ptype.DatasetMetadata(), d)

    expected = ptype.DatasetMetadata(
        browse={
            'full':
            ptype.BrowseMetadata(path=d.joinpath('browse.fr.jpg'),
                                 file_type='image/jpg',
                                 red_band='5',
                                 green_band='1',
                                 blue_band='3'),
            'medium':
            ptype.BrowseMetadata(
                path=d.joinpath('browse.jpg'),
                # Default medium size.
                shape=ptype.Point(1024, None),
                file_type='image/jpg',
                red_band='5',
                green_band='1',
                blue_band='3')
        })

    expected.id_, dataset.id_ = None, None
    assert_same(expected, dataset)
コード例 #6
0
def test_extract_md(_run_pdsinfo_exe):
    input_dir = write_files({'P1540064AAAAAAAAAAAAAA14219032341001.PDS': ''})

    # def run_pdsinfo(file_):
    #     assert file_ == input_dir
    #
    #     return

    _run_pdsinfo_exe.return_value = b"""APID 64: count 610338 invalid 0 missing 6255
    first packet: 2014/08/07 03:16:28.750910
    last packet: 2014/08/07 03:21:28.604695
    missing seconds: 2
    day packets: 545223/64311
    night packets: 0/0
    engineering packets: 804/0
    """

    md = pds.extract_md(ptype.DatasetMetadata(), input_dir)

    expected = ptype.DatasetMetadata(
        platform=ptype.PlatformMetadata(code='AQUA'),
        instrument=ptype.InstrumentMetadata(name='MODIS'),
        format_=ptype.FormatMetadata(name='PDS'),
        acquisition=ptype.AcquisitionMetadata(
            aos=datetime.datetime(2014, 8, 7, 3, 16, 28, 750910),
            los=datetime.datetime(2014, 8, 7, 3, 21, 28, 604695)
        ),
        image=ptype.ImageMetadata(
            day_percentage_estimate=100.0
        )
    )

    md.id_, expected.id_ = None, None

    assert_same(expected, md)
コード例 #7
0
    def test_prepare_metadata(self):
        f = write_files({'first.txt': 'test', 'second.txt': 'test2'})

        class FauxDriver(drivers.DatasetDriver):
            def to_band(self, dataset, path):
                numbers = {
                    'first': ptype.BandMetadata(path=path, number='1'),
                    'second': None
                }
                return numbers.get(path.stem)

            def get_ga_label(self, dataset):
                return 'DATASET_ID_1234'

            def get_id(self):
                return 'faux'

        d = ptype.DatasetMetadata()
        d = package.expand_driver_metadata(FauxDriver(), d, list(f.iterdir()))

        self.assert_same(
            d,
            ptype.DatasetMetadata(
                id_=d.id_,
                ga_label='DATASET_ID_1234',
                product_type='faux',
                size_bytes=9,
                image=ptype.ImageMetadata(
                    bands={
                        '1':
                        ptype.BandMetadata(path=f.joinpath('first.txt'),
                                           number='1')
                    })))
コード例 #8
0
    def test_pqa_fill(self):
        input_folder = write_files({
            'pqa.tif': '',
            'pq_metadata.yaml': dedent(
                """
                    algorithm_information:
                        software_version: 1.0
                        pq_doi:
                    ancillary: {}
                    tests_run: {}
                """)
        })

        dataset = ptype.DatasetMetadata(
            id_=_EXPECTED_PQA.id_,
            lineage=ptype.LineageMetadata(
                source_datasets={
                    'nbar': _EXPECTED_NBAR,
                    'level1': test_ls8.EXPECTED_OUT
                }
            )
        )

        received_dataset = drivers.PqaDriver().fill_metadata(dataset, input_folder)

        self.assert_same(_EXPECTED_PQA, received_dataset)
コード例 #9
0
    def test_files_with_usgs_id(self):
        # No MDF directory, only files. Can we still extract enough info?
        d = write_files({
            '446.000.2013254233714881.ASA': 'a',
            '447.000.2013254233711482.ASA': 'a',
            'LC80880750762013254ASA00_IDF.xml': 'a',
            'LC80880750762013254ASA00_MD5.txt': 'a',
        })

        def _test_mdf_output(metadata):
            self.assertEqual(metadata.usgs.interval_id,
                             'LC80880750762013254ASA00')
            self.assertEqual(metadata.platform.code, 'LANDSAT_8')
            self.assertEqual(metadata.instrument.name, 'OLI_TIRS')
            self.assertEqual(metadata.format_.name, 'MD')
            self.assertEqual(metadata.ga_level, 'P00')
            self.assertEqual(metadata.image.satellite_ref_point_start,
                             ptype.Point(88, 75))
            self.assertEqual(metadata.image.satellite_ref_point_end,
                             ptype.Point(88, 76))
            self.assertEqual(metadata.acquisition.groundstation.code, 'ASA')
            # No longer bother with vague center date.
            # self.assertEqual(metadata.extent.center_dt, datetime.date(2013, 9, 11))
            self.assertEqual(
                metadata.acquisition.aos,
                datetime.datetime(2013, 9, 11, 23, 36, 11, 482000))
            self.assertEqual(
                metadata.acquisition.los,
                datetime.datetime(2013, 9, 11, 23, 37, 14, 881000))

        metadata = mdf.extract_md(ptype.DatasetMetadata(), d)
        _test_mdf_output(metadata)
コード例 #10
0
    def test_parse_passinfo_file(self):
        d = write_files({
            'subdirectory': {

            },
            'passinfo': [
                "STATION ALICE\n",
                "STRING  S1A1C1D1R1\n",
                "PASSID  LANDSAT-5.110912\n",
                "SATELLITE   LANDSAT-5\n",
                "ORBIT   110912\n",
                "LINKID  X\n",
                "BITRATE 84900000\n",
                "SENSOR  TM\n",
                "START   2005-01-06T23:32:14\n",
                "STOP    2005-01-06T23:39:12\n",
                "DURATION    423\n",
                "COMMENTS    Bit\n",
                "logfile acs acs.log\n",
                "logfile ref ref.log\n",
                "logfile demod   demod.log\n",
                "logfile 1050110912.eph  1050110912.eph.log\n",
                "telemetry   telemetry.data\n"
            ]})
        # It should find a passinfo file one directory up.
        md = extraction.extract_md(ptype.DatasetMetadata(), d.joinpath('subdirectory'))

        self.assertEqual(md.acquisition.groundstation.code, "ASA")
        self.assertEqual(md.acquisition.platform_orbit, 110912)
        self.assertEqual(md.platform.code, "LANDSAT_5")
        self.assertEqual(md.instrument.name, "TM")
        self.assertEqual(md.acquisition.aos, datetime.datetime(2005, 1, 6, 23, 32, 14))
        self.assertEqual(md.acquisition.los, datetime.datetime(2005, 1, 6, 23, 39, 12))
コード例 #11
0
    def test_expand_metadata_without_bands(self):
        # We have imagery files but no bands (eg: RAW data)

        f = write_files({'first.txt': 'test', 'second.txt': 'test2'})

        class FauxDriver(drivers.DatasetDriver):
            def to_band(self, dataset, path):
                return None

            def get_ga_label(self, dataset):
                return 'DATASET_ID_1234'

            def get_id(self):
                return 'faux'

        d = ptype.DatasetMetadata()
        # Iterator is falsey, but returns files. This triggered a bug previously.
        # noinspection PyTypeChecker
        d = package.expand_driver_metadata(FauxDriver(), d, f.iterdir())

        self.assert_same(
            d,
            ptype.DatasetMetadata(id_=d.id_,
                                  ga_label='DATASET_ID_1234',
                                  product_type='faux',
                                  size_bytes=9))
コード例 #12
0
def test_find_any_metatadata_suffix():
    files = write_files({
        'directory_dataset': {
            'file1.txt': '',
            'file2.txt': '',
            'agdc-metadata.json.gz': ''
        },
        'file_dataset.tif.ga-md.yaml': '',
        'dataset_metadata.YAML': '',
        'no_metadata.tif': '',
    })

    path = _find_any_metadata_suffix(files.joinpath('dataset_metadata'))
    assert path.absolute() == files.joinpath(
        'dataset_metadata.YAML').absolute()

    path = _find_any_metadata_suffix(
        files.joinpath('directory_dataset', 'agdc-metadata'))
    assert path.absolute() == files.joinpath(
        'directory_dataset', 'agdc-metadata.json.gz').absolute()

    path = _find_any_metadata_suffix(files.joinpath('file_dataset.tif.ga-md'))
    assert path.absolute() == files.joinpath(
        'file_dataset.tif.ga-md.yaml').absolute()

    # Returns none if none exist
    path = _find_any_metadata_suffix(files.joinpath('no_metadata'))
    assert path is None
コード例 #13
0
    def test_unusual_filenames(self):
        # Some passinfo filenames have orbit numbers appended

        d = write_files({'passinfo.24775': [
            "STATION TERSS\n",
            "STRING  S1A1C1D1R1\n",
            "PASSID  LANDSAT-5.110912\n",
            "SATELLITE   LANDSAT-5\n",
            "ORBIT   110912\n",
            "LINKID  X\n",
            "BITRATE 84900000\n",
            "SENSOR  TM\n",
            "START   2005-01-06T23:32:14\n",
            "STOP    2005-01-06T23:39:12\n",
            "DURATION    423\n",
            "COMMENTS    Bit\n",
            "logfile acs acs.log\n",
            "logfile ref ref.log\n",
            "logfile demod   demod.log\n",
            "logfile 1050110912.eph  1050110912.eph.log\n",
            "telemetry   telemetry.data\n"
        ]})
        md = extraction.extract_md(ptype.DatasetMetadata(), d)

        # Station "TERSS" is hobart.
        self.assertEqual(md.acquisition.groundstation.code, "HOA")
        self.assertEqual(md.acquisition.platform_orbit, 110912)
        self.assertEqual(md.platform.code, "LANDSAT_5")
        self.assertEqual(md.instrument.name, "TM")
        self.assertEqual(md.acquisition.aos, datetime.datetime(2005, 1, 6, 23, 32, 14))
        self.assertEqual(md.acquisition.los, datetime.datetime(2005, 1, 6, 23, 39, 12))
コード例 #14
0
def test_find_metadata_path():
    files = write_files({
        'directory_dataset': {
            'file1.txt': '',
            'file2.txt': '',
            'ga-metadata.yaml.gz': ''
        },
        'file_dataset.tif': '',
        'file_dataset.tif.agdc-md.yaml': '',
        'dataset_metadata.yaml': '',
        'no_metadata.tif': '',
    })

    # A metadata file can be specified directly.
    path = find_metadata_path(files.joinpath('dataset_metadata.yaml'))
    assert path.absolute() == files.joinpath(
        'dataset_metadata.yaml').absolute()

    # A dataset directory will have an internal 'agdc-metadata' file.
    path = find_metadata_path(files.joinpath('directory_dataset'))
    assert path.absolute() == files.joinpath('directory_dataset',
                                             'ga-metadata.yaml.gz').absolute()

    # Other files can have a sibling file ending in 'agdc-md.yaml'
    path = find_metadata_path(files.joinpath('file_dataset.tif'))
    assert path.absolute() == files.joinpath(
        'file_dataset.tif.agdc-md.yaml').absolute()

    # No metadata to find.
    assert find_metadata_path(files.joinpath('no_metadata.tif')) is None

    # Dataset itself doesn't exist.
    assert find_metadata_path(files.joinpath('missing-dataset.tif')) is None
コード例 #15
0
 def test_pqa_translate_path(self):
     input_folder = write_files({
         'pqa.tif': '',
         'process.log': '',
         'passinfo': ''
     })
     self.assertEqual(
         input_folder.joinpath('LS8_OLITIRS_PQ_P55_GAPQ01-032_101_078_20141012.tif'),
         drivers.PqaDriver().translate_path(
             _EXPECTED_PQA,
             input_folder.joinpath('pqa.tif')
         )
     )
     # Other files unchanged.
     self.assertEqual(
         input_folder.joinpath('process.log'),
         drivers.PqaDriver().translate_path(
             _EXPECTED_PQA,
             input_folder.joinpath('process.log')
         )
     )
     self.assertEqual(
         input_folder.joinpath('passinfo'),
         drivers.PqaDriver().translate_path(
             _EXPECTED_PQA,
             input_folder.joinpath('passinfo')
         )
     )
コード例 #16
0
def test_find_any_metatadata_suffix():
    files = write_files({
        "directory_dataset": {
            "file1.txt": "",
            "file2.txt": "",
            "agdc-metadata.json.gz": "",
        },
        "file_dataset.tif.ga-md.yaml": "",
        "dataset_metadata.YAML": "",
        "no_metadata.tif": "",
    })

    path = _find_any_metadata_suffix(files.joinpath("dataset_metadata"))
    assert path.absolute() == files.joinpath(
        "dataset_metadata.YAML").absolute()

    path = _find_any_metadata_suffix(
        files.joinpath("directory_dataset", "agdc-metadata"))
    assert (path.absolute() == files.joinpath(
        "directory_dataset", "agdc-metadata.json.gz").absolute())

    path = _find_any_metadata_suffix(files.joinpath("file_dataset.tif.ga-md"))
    assert path.absolute() == files.joinpath(
        "file_dataset.tif.ga-md.yaml").absolute()

    # Returns none if none exist
    path = _find_any_metadata_suffix(files.joinpath("no_metadata"))
    assert path is None
コード例 #17
0
ファイル: test_rccfile.py プロジェクト: sixy6e/eo-datasets
 def test_parse_rcc_with_subdir(self):
     d = write_files({
         'RCCDATA': {
             'L7EB2013259012832ASN213I00.data': 'nothing',
             'L7EB2013259012832ASN213Q00.data': 'nothing'
         }
     })
     self._check_rcc_parse(d)
コード例 #18
0
def test_find_terra_pds_file():
    d = write_files({
        'P0420064AAAAAAAAAAAAAA14202013839000.PDS': '',
        'P0420064AAAAAAAAAAAAAA14202013839001.PDS': '',
    })

    found = pds.find_pds_file(d)
    # It should find the '0064' APID with '01' suffix.
    expected = d.joinpath('P0420064AAAAAAAAAAAAAA14202013839001.PDS')
    assert expected == found
コード例 #19
0
def test_find_metadata_path():
    files = write_files({
        "directory_dataset": {
            "file1.txt": "",
            "file2.txt": "",
            "ga-metadata.yaml.gz": "",
        },
        "file_dataset.tif": "",
        "file_dataset.agdc-md.yaml": "",
        "dataset_metadata.yaml": "",
        "no_metadata.tif": "",
        # Newer eo3-style names.`
        # Sibling
        "newer-dataset.tar": "",
        "newer-dataset.odc-metadata.yaml": "",
        # Directory
        "newer_directory_dataset": {
            "newer-dataset.txt": "",
            "newer-dataset-b2.txt": "",
            "newer-dataset.odc-metadata.yaml.gz": "",
        },
    })

    # A metadata file can be specified directly.
    path = find_metadata_path(files.joinpath("dataset_metadata.yaml"))
    assert path.absolute() == files.joinpath(
        "dataset_metadata.yaml").absolute()

    # A older dataset directory will have an internal 'agdc-metadata' file.
    path = find_metadata_path(files.joinpath("directory_dataset"))
    assert (path.absolute() == files.joinpath(
        "directory_dataset", "ga-metadata.yaml.gz").absolute())

    # Other older files can have a sibling file ending in 'agdc-md.yaml'
    path = find_metadata_path(files.joinpath("file_dataset.tif"))
    assert path.absolute() == files.joinpath(
        "file_dataset.agdc-md.yaml").absolute()

    # No metadata to find.
    assert find_metadata_path(files.joinpath("no_metadata.tif")) is None

    # Dataset itself doesn't exist.
    assert find_metadata_path(files.joinpath("missing-dataset.tif")) is None

    # EO3-style dataset metadata
    path = find_metadata_path(files.joinpath("newer-dataset.tar"))
    assert (path.absolute() == files.joinpath(
        "newer-dataset.odc-metadata.yaml").absolute())

    # EO3-style dataset in a directory
    path = find_metadata_path(files.joinpath("newer_directory_dataset"))
    assert (path.absolute() == files.joinpath(
        "newer_directory_dataset",
        "newer-dataset.odc-metadata.yaml.gz").absolute())
コード例 #20
0
    def test_unchanged_without_id(self):
        # No MDF directory, only files. Don't try to extract information from the files.
        d = write_files({
            '446.000.2013254233714881.ASA': 'a',
            '447.000.2013254233711482.ASA': 'a',
        })

        id_ = uuid.uuid1()
        date = datetime.datetime.utcnow()
        metadata = mdf.extract_md(
            ptype.DatasetMetadata(id_=id_, creation_dt=date), d)
        # Should be unchanged: No USGS ID found.
        assert_same(metadata, ptype.DatasetMetadata(id_=id_, creation_dt=date))
コード例 #21
0
ファイル: test_verify.py プロジェクト: jeremyh/eo-datasets
    def test_checksum(self):
        d = write_files({"test1.txt": "test"})

        test_file = d.joinpath("test1.txt")

        sha1_hash = verify.calculate_file_hash(test_file)
        self.assertEqual(sha1_hash, "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3")

        md5_hash = verify.calculate_file_hash(test_file, hash_fn=hashlib.md5)
        self.assertEqual(md5_hash, "098f6bcd4621d373cade4e832627b4f6")

        crc32_checksum = verify.calculate_file_crc32(test_file)
        self.assertEqual(crc32_checksum, "d87f7e0c")
コード例 #22
0
    def test_checksum(self):  # noqa: T003
        d = write_files({"test1.txt": "test"})

        test_file = d.joinpath("test1.txt")

        sha1_hash = verify.calculate_file_hash(test_file)
        assert sha1_hash == "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"

        md5_hash = verify.calculate_file_hash(test_file, hash_fn=hashlib.md5)
        assert md5_hash == "098f6bcd4621d373cade4e832627b4f6"

        crc32_checksum = verify.calculate_file_crc32(test_file)
        assert crc32_checksum == "d87f7e0c"
コード例 #23
0
 def test_eods_fill_metadata(self):
     dataset_folder = "LS8_OLI_TIRS_NBAR_P54_GANBAR01-015_101_078_20141012"
     bandname = '10'
     bandfile = dataset_folder + '_B' + bandname + '.tif'
     input_folder = write_files({
         dataset_folder: {
             'metadata.xml': """<EODS_DATASET>
             <ACQUISITIONINFORMATION>
             <EVENT>
             <AOS>20141012T03:23:36</AOS>
             <LOS>20141012T03:29:10</LOS>
             </EVENT>
             </ACQUISITIONINFORMATION>
             <EXEXTENT>
             <TEMPORALEXTENTFROM>20141012 00:55:54</TEMPORALEXTENTFROM>
             <TEMPORALEXTENTTO>20141012 00:56:18</TEMPORALEXTENTTO>
             </EXEXTENT>
             </EODS_DATASET>""",
             'scene01': {
                 bandfile: ''
             }
         }
     })
     expected = ptype.DatasetMetadata(
         id_=_EXPECTED_NBAR.id_,
         ga_label=dataset_folder,
         ga_level='P54',
         product_type='EODS_NBAR',
         platform=ptype.PlatformMetadata(code='LANDSAT_8'),
         instrument=ptype.InstrumentMetadata(name='OLI_TIRS'),
         format_=ptype.FormatMetadata(name='GeoTiff'),
         acquisition=ptype.AcquisitionMetadata(aos=datetime.datetime(2014, 10, 12, 3, 23, 36),
                                               los=datetime.datetime(2014, 10, 12, 3, 29, 10),
                                               groundstation=ptype.GroundstationMetadata(code='LGS')),
         extent=ptype.ExtentMetadata(
             center_dt=datetime.datetime(2014, 10, 12, 0, 56, 6),
             from_dt=datetime.datetime(2014, 10, 12, 0, 55, 54),
             to_dt=datetime.datetime(2014, 10, 12, 0, 56, 18)
         ),
         image=ptype.ImageMetadata(satellite_ref_point_start=ptype.Point(x=101, y=78),
                                   satellite_ref_point_end=ptype.Point(x=101, y=78),
                                   bands={bandname: ptype.BandMetadata(number=bandname,
                                                                       path=Path(input_folder, dataset_folder,
                                                                                 'scene01', bandfile))})
     )
     dataset = ptype.DatasetMetadata(
         id_=_EXPECTED_NBAR.id_
     )
     received = drivers.EODSDriver().fill_metadata(dataset, input_folder.joinpath(dataset_folder))
     self.assert_same(expected, received)
コード例 #24
0
    def test_extract_groundstation(self):
        d = write_files({'LANDSAT-7.ALICE': {'dataset': {}}})

        d = d.joinpath('LANDSAT-7.ALICE')
        md = adsfolder.extract_md(ptype.DatasetMetadata(), d)
        self.assertIsNotNone(md.acquisition)
        self.assertEqual(md.acquisition.groundstation,
                         ptype.GroundstationMetadata(code='ASA'))

        d = d.joinpath('dataset')
        md = adsfolder.extract_md(ptype.DatasetMetadata(), d)
        self.assertIsNotNone(md.acquisition)
        self.assertEqual(md.acquisition.groundstation,
                         ptype.GroundstationMetadata(code='ASA'))
コード例 #25
0
    def test_no_directory(self):
        d = write_files({
            'L7EB2013259012832ASN213I00.data': 'nothing',
            'L7EB2013259012832ASN213Q00.data': 'nothing'
        })

        self.assertEqual((None, set()), mdf.find_mdf_files(d))

        # Make sure that metadata is not modified when no MDF is found.
        starting_md = ptype.DatasetMetadata()
        id_ = starting_md.id_
        creation_dt = starting_md.creation_dt
        expected_dt = ptype.DatasetMetadata(id_=id_, creation_dt=creation_dt)

        output = mdf.extract_md(starting_md, d)
        self.assertEqual(expected_dt, output)
コード例 #26
0
    def test_pqa_to_band(self):
        input_folder = write_files({
            'pqa.tif': '',
            'process.log': '',
            'passinfo': '',
        })

        # Creates a single band.
        self.assertEqual(
            ptype.BandMetadata(path=input_folder.joinpath('pqa.tif'), number='pqa'),
            drivers.PqaDriver().to_band(None, input_folder.joinpath('pqa.tif'))
        )

        # Other files should not be bands.
        self.assertIsNone(drivers.PqaDriver().to_band(None, input_folder.joinpath('process.log')))
        self.assertIsNone(drivers.PqaDriver().to_band(None, input_folder.joinpath('passinfo')))
コード例 #27
0
ファイル: test_metadata.py プロジェクト: sixy6e/eo-datasets
    def test_expand_band(self):
        # Create fake image file.
        image_file = write_files({'LC81010782014285LGN00_B6.TIF': 'test'})
        image_file = image_file.joinpath('LC81010782014285LGN00_B6.TIF')

        md = metadata._expand_band_information(
            'LANDSAT_8', 'OLI_TIRS', BandMetadata(path=image_file, number='6'))

        expected = BandMetadata(
            path=Path(image_file),
            type_=u'reflective',
            label=u'Short-wave Infrared 1',
            number='6',
            # MD5 of image contents ('test')
            cell_size=25.0)
        assert_same(md, expected)
コード例 #28
0
ファイル: test_rccfile.py プロジェクト: sixy6e/eo-datasets
    def test_parse_l5_rcc_filenames(self):
        d = write_files({'L5TB2003339014237ASA111I00.data': 'nothing'})
        md = rccfile.extract_md(ptype.DatasetMetadata(), d)

        self.assertEqual(md.platform.code, 'LANDSAT_5')
        self.assertEqual(md.instrument.name, 'TM')
        self.assertEqual(md.acquisition.groundstation.code, 'ASA')
        self.assertEqual(md.format_.name, 'RCC')
        self.assertEqual(md.usgs.interval_id, 'L5TB2003339014237ASA111')

        self.assertEqual(md.acquisition.aos,
                         datetime.datetime(2003, 12, 5, 1, 42, 37))

        # From the old onreceipt codebase,
        # Default L5 LOS is: AOS + (I.data fileSize) * 8.0 / 84900000.0
        self.assertEqual(md.acquisition.los,
                         datetime.datetime(2003, 12, 5, 1, 42, 37))
コード例 #29
0
    def test_nbar_fill_metadata(self):
        input_folder = write_files({
            'reflectance_brdf_1.tif': '',
            'reflectance_brdf_2.tif': '',
            'reflectance_brdf_3.tif': '',
            'reflectance_brdf_4.tif': '',
            'reflectance_brdf_5.tif': '',
            'reflectance_brdf_6.tif': '',
            'reflectance_brdf_7.tif': '',
            'reflectance_terrain_1.tif': '',
            'reflectance_terrain_2.tif': '',
            'reflectance_terrain_3.tif': '',
            'reflectance_terrain_4.tif': '',
            'reflectance_terrain_5.tif': '',
            'reflectance_terrain_6.tif': '',
            'reflectance_terrain_7.tif': '',
            'reflectance_lambertian_1.tif': '',
            'reflectance_lambertian_2.tif': '',
            'reflectance_lambertian_3.tif': '',
            'reflectance_lambertian_4.tif': '',
            'reflectance_lambertian_5.tif': '',
            'reflectance_lambertian_6.tif': '',
            'reflectance_lambertian_7.tif': '',
            'nbar_metadata.yaml': dedent(
                """
                    algorithm_information:
                        software_version: 1.0
                        algorithm_version: 1.0
                        arg25_doi:
                        nbar_doi:
                        nbar_terrain_corrected_doi:
                    ancillary_data: {}
                """),
        })
        dataset = ptype.DatasetMetadata(
            id_=_EXPECTED_NBAR.id_,
            lineage=ptype.LineageMetadata(
                source_datasets={
                    'level1': test_ls8.EXPECTED_OUT
                }
            )
        )
        received_dataset = drivers.NbarDriver('terrain').fill_metadata(dataset, input_folder)

        self.assert_same(_EXPECTED_NBAR, received_dataset)
コード例 #30
0
 def _get_raw_ls8(self):
     d = write_files({
         'LANDSAT-8.11308': {
             'LC81160740842015089ASA00': {
                 '480.000.2015089022657325.ASA': '',
                 '481.000.2015089022653346.ASA': '',
                 'LC81160740742015089ASA00_IDF.xml': '',
                 'LC81160740742015089ASA00_MD5.txt': '',
                 'file.list': '',
             }
         }
     })
     raw_driver = drivers.RawDriver()
     metadata = raw_driver.fill_metadata(
         ptype.DatasetMetadata(),
         d.joinpath('LANDSAT-8.11308', 'LC81160740842015089ASA00')
     )
     return metadata, raw_driver