Example #1
0
 def setUp(self):
     # create mapping
     Mapping(
         timerange=DateTimeTZRange(
             UTCDateTime(2002, 1, 1).datetime,
             UTCDateTime(2016, 1, 2).datetime),
         network="TA", station="A25A", location="", channel="BHE",
         new_network="XX", new_station="YY", new_location="00",
         new_channel="ZZZ").save()
     ContinuousTrace.update_all_mappings()
     # index waveform files
     [process_file(f) for f in FILES]
Example #2
0
    def setUp(self):
        # index waveform files
        [process_file(f) for f in FILES]
        User.objects.get_or_create(username='******',
                                   password=make_password('random'))

        credentials = base64.b64encode(b'random:random')
        self.valid_auth_headers = {
            'HTTP_AUTHORIZATION': 'Basic ' + credentials.decode("ISO-8859-1")
        }

        credentials = base64.b64encode(b'random:random2')
        self.invalid_auth_headers = {
            'HTTP_AUTHORIZATION': 'Basic ' + credentials.decode("ISO-8859-1")
        }
Example #3
0
 def setUp(self):
     # index waveform files
     [process_file(f) for f in FILES]
Example #4
0
    def test_dataselect_query_for_slightly_messed_up_files(self):
        # Create a file with interleaved traces and some noise traces
        # in-between
        traces = [
            Trace(data=np.ones(10), header={"starttime": UTCDateTime(0)}),
            Trace(data=np.ones(10), header={"starttime": UTCDateTime(5)}),
            Trace(data=np.ones(10), header={"starttime": UTCDateTime(10)}),
            Trace(data=np.ones(10), header={"starttime": UTCDateTime(-5)}),
            Trace(data=np.ones(8), header={"starttime": UTCDateTime(2)}),
            Trace(data=np.ones(12), header={"starttime": UTCDateTime(0)}),
            Trace(data=np.ones(10), header={"starttime": UTCDateTime(1)})
        ]
        for tr in traces:
            tr.stats.network = "XX"
            tr.stats.station = "YY"
            tr.stats.channel = "EHZ"

        # Add a couple more random traces in between. These should just be
        # ignored for the query.
        traces.insert(0, Trace(
            data=np.ones(5),
            header={"starttime": UTCDateTime(2), "network": "DD",
                    "station": "BLA", "channel": "BHZ", "location": "10"}))
        traces.insert(4, Trace(
            data=np.ones(17),
            header={"starttime": UTCDateTime(-2), "network": "DD",
                    "station": "BLA", "channel": "BHZ", "location": "10"}))
        traces.append(Trace(
            data=np.ones(2),
            header={"starttime": UTCDateTime(5), "network": "AB",
                    "station": "CD", "channel": "XYZ", "location": ""}))

        # Write to a temporary file.
        filename = tempfile.mkstemp()[1]
        try:
            Stream(traces=traces).write(filename, format="mseed")
            process_file(filename)

            params = {
                "network": "XX",
                "station": "YY",
                "location": "",
                "channel": "EHZ"}

            def _test_number_of_returned_traces(start, end, expected):
                params["start"] = str(UTCDateTime(start))
                params["end"] = str(UTCDateTime(end))
                response = self.client.get('/fdsnws/dataselect/1/query',
                                           params)
                if expected == 0:
                    self.assertEqual(response.status_code, 204)
                    return

                self.assertEqual(response.status_code, 200)
                self.assertTrue('OK' in response.reason_phrase)
                st = read(io.BytesIO(response.getvalue()))
                self.assertEqual(len(st), expected)

            _test_number_of_returned_traces(1, 10, 7)
            _test_number_of_returned_traces(1, 9, 6)
            _test_number_of_returned_traces(-10, 0, 3)
            _test_number_of_returned_traces(-10, -1, 1)
            _test_number_of_returned_traces(-10, -4, 1)
            _test_number_of_returned_traces(-10, -5, 1)
            _test_number_of_returned_traces(-10, -5.5, 0)
            _test_number_of_returned_traces(10, 15, 4)
            _test_number_of_returned_traces(4, 5, 6)
            _test_number_of_returned_traces(-6, -5, 1)
            _test_number_of_returned_traces(-6, -1, 1)
            _test_number_of_returned_traces(-6, 0, 3)
            _test_number_of_returned_traces(10, 15, 4)
            _test_number_of_returned_traces(11, 15, 3)
            _test_number_of_returned_traces(12, 15, 2)
            _test_number_of_returned_traces(14, 17, 2)
            _test_number_of_returned_traces(15, 17, 1)
            _test_number_of_returned_traces(18, 20, 1)
            _test_number_of_returned_traces(19, 20, 1)
            _test_number_of_returned_traces(19.5, 20, 0)

        finally:
            os.remove(filename)
Example #5
0
    def test_waveform_mappings(self):
        def delete_indexed_waveforms():
            models.File.objects.all().delete()
            assert models.ContinuousTrace.objects.count() == 0

        # Let's use an example file from the fdsnws test suite.
        filename = os.path.join(os.path.dirname(os.path.dirname(self.path)), "fdsnws", "tests", "data", "TA.A25A.mseed")
        assert os.path.exists(filename)

        expected_ids = [
            "TA.A25A..BHE",
            "TA.A25A..BHN",
            "TA.A25A..BHZ",
            "TA.A25A..LCE",
            "TA.A25A..LCQ",
            "TA.A25A..LHE",
            "TA.A25A..LHN",
            "TA.A25A..LHZ",
            "TA.A25A..UHE",
            "TA.A25A..UHN",
            "TA.A25A..UHZ",
            "TA.A25A..VCO",
            "TA.A25A..VEA",
            "TA.A25A..VEC",
            "TA.A25A..VEP",
            "TA.A25A..VHE",
            "TA.A25A..VHN",
            "TA.A25A..VHZ",
            "TA.A25A..VKI",
            "TA.A25A..VM1",
            "TA.A25A..VM2",
            "TA.A25A..VM3",
        ]

        # Process that file.
        process_file(filename)
        # Make sure it all got stored in the database.
        ids = sorted([_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()

        # Now create a mapping that does not actually do anything, because
        # it is out of temporal range.
        models.Mapping(
            timerange=DateTimeTZRange(obspy.UTCDateTime(2000, 1, 1).datetime, obspy.UTCDateTime(2000, 1, 2).datetime),
            network="TA",
            station="A25A",
            location="",
            channel="BHE",
            new_network="XX",
            new_station="YY",
            new_location="00",
            new_channel="ZZZ",
        ).save()

        # Nothing should have changed.
        process_file(filename)
        ids = sorted([_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()

        # Now create a mapping that does something.
        models.Mapping(
            timerange=DateTimeTZRange(obspy.UTCDateTime(2002, 1, 1).datetime, obspy.UTCDateTime(2016, 1, 2).datetime),
            network="TA",
            station="A25A",
            location="",
            channel="BHE",
            new_network="XX",
            new_station="YY",
            new_location="00",
            new_channel="ZZZ",
        ).save()

        # Nothing should have changed.
        process_file(filename)
        ids = sorted([_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(len(ids), len(expected_ids))
        self.assertIn("XX.YY.00.ZZZ", ids)
        self.assertNotIn("TA.A25A..BHE", ids)

        # Now remove the mappings and test the reindexing!
        models.Mapping.objects.all().delete()

        # Without reindex, nothing changed.
        ids = sorted([_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(len(ids), len(expected_ids))
        self.assertIn("XX.YY.00.ZZZ", ids)
        self.assertNotIn("TA.A25A..BHE", ids)

        # Now reindex - it should have changed.
        models.ContinuousTrace.update_all_mappings()
        ids = sorted([_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()

        # One last thing - we'll add a mapping that would match but it does
        # not match the path regex - it will thus fail to be applied.
        models.Mapping(
            timerange=DateTimeTZRange(obspy.UTCDateTime(2002, 1, 1).datetime, obspy.UTCDateTime(2016, 1, 2).datetime),
            network="TA",
            station="A25A",
            location="",
            channel="BHE",
            new_network="XX",
            new_station="YY",
            new_location="00",
            new_channel="ZZZ",
            # Does not match the path - this the mapping is ignored.
            full_path_regex="^/very/random/path/.*gse2$",
        ).save()

        process_file(filename)
        ids = sorted([_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()
Example #6
0
    def test_waveform_mappings(self):
        def delete_indexed_waveforms():
            models.File.objects.all().delete()
            assert models.ContinuousTrace.objects.count() == 0

        # Let's use an example file from the fdsnws test suite.
        filename = os.path.join(os.path.dirname(os.path.dirname(self.path)),
                                "fdsnws", "tests", "data", "TA.A25A.mseed")
        assert os.path.exists(filename)

        expected_ids = [
            'TA.A25A..BHE', 'TA.A25A..BHN', 'TA.A25A..BHZ', 'TA.A25A..LCE',
            'TA.A25A..LCQ', 'TA.A25A..LHE', 'TA.A25A..LHN', 'TA.A25A..LHZ',
            'TA.A25A..UHE', 'TA.A25A..UHN', 'TA.A25A..UHZ', 'TA.A25A..VCO',
            'TA.A25A..VEA', 'TA.A25A..VEC', 'TA.A25A..VEP', 'TA.A25A..VHE',
            'TA.A25A..VHN', 'TA.A25A..VHZ', 'TA.A25A..VKI', 'TA.A25A..VM1',
            'TA.A25A..VM2', 'TA.A25A..VM3'
        ]

        # Process that file.
        process_file(filename)
        # Make sure it all got stored in the database.
        ids = sorted(
            [_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()

        # Now create a mapping that does not actually do anything, because
        # it is out of temporal range.
        models.Mapping(timerange=DateTimeTZRange(
            obspy.UTCDateTime(2000, 1, 1).datetime,
            obspy.UTCDateTime(2000, 1, 2).datetime),
                       network="TA",
                       station="A25A",
                       location="",
                       channel="BHE",
                       new_network="XX",
                       new_station="YY",
                       new_location="00",
                       new_channel="ZZZ").save()

        # Nothing should have changed.
        process_file(filename)
        ids = sorted(
            [_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()

        # Now create a mapping that does something.
        models.Mapping(timerange=DateTimeTZRange(
            obspy.UTCDateTime(2002, 1, 1).datetime,
            obspy.UTCDateTime(2016, 1, 2).datetime),
                       network="TA",
                       station="A25A",
                       location="",
                       channel="BHE",
                       new_network="XX",
                       new_station="YY",
                       new_location="00",
                       new_channel="ZZZ").save()

        # Nothing should have changed.
        process_file(filename)
        ids = sorted(
            [_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(len(ids), len(expected_ids))
        self.assertIn("XX.YY.00.ZZZ", ids)
        self.assertNotIn("TA.A25A..BHE", ids)

        # Now remove the mappings and test the reindexing!
        models.Mapping.objects.all().delete()

        # Without reindex, nothing changed.
        ids = sorted(
            [_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(len(ids), len(expected_ids))
        self.assertIn("XX.YY.00.ZZZ", ids)
        self.assertNotIn("TA.A25A..BHE", ids)

        # Now reindex - it should have changed.
        models.ContinuousTrace.update_all_mappings()
        ids = sorted(
            [_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()

        # One last thing - we'll add a mapping that would match but it does
        # not match the path regex - it will thus fail to be applied.
        models.Mapping(
            timerange=DateTimeTZRange(
                obspy.UTCDateTime(2002, 1, 1).datetime,
                obspy.UTCDateTime(2016, 1, 2).datetime),
            network="TA",
            station="A25A",
            location="",
            channel="BHE",
            new_network="XX",
            new_station="YY",
            new_location="00",
            new_channel="ZZZ",
            # Does not match the path - this the mapping is ignored.
            full_path_regex="^/very/random/path/.*gse2$").save()

        process_file(filename)
        ids = sorted(
            [_i.seed_id for _i in models.ContinuousTrace.objects.all()])
        self.assertEqual(expected_ids, ids)
        delete_indexed_waveforms()