def test_get_surrounding_mis(self):
        request = PlanTripRequestType()
        calculator = PlanTripCalculator(self._planner, request, Queue.Queue())
        date = date_type(year=2010, month=4, day=8)

        position = LocationStructure(Longitude=1, Latitude=1)
        self.assertEquals(calculator._get_surrounding_mis(position, date), {1})

        position = LocationStructure(Longitude=3, Latitude=3)
        # MIS 3 excluded (not geographic compliant)
        self.assertEquals(calculator._get_surrounding_mis(position, date), set([]))

        position = LocationStructure(Longitude=7, Latitude=33)
        self.assertEquals(calculator._get_surrounding_mis(position, date), set([]))

        position = LocationStructure(Longitude=0, Latitude=0)
        # MIS 3 excluded (not geographic compliant)
        self.assertEquals(calculator._get_surrounding_mis(position, date), {1, 2})

        position = LocationStructure(Longitude=1.4, Latitude=1.4)
        self.assertEquals(calculator._get_surrounding_mis(position, date), {2})

        position = LocationStructure(Longitude=2, Latitude=2)
        # outside of mis shape
        self.assertEquals(calculator._get_surrounding_mis(position, date), set([]))
Exemplo n.º 2
0
def _date_arg(s):
    match = date_pattern.match(s)
    if match is None:
        raise RuntimeError(s)
    return date_type(
        year=int(match.group('year')),
        month=int(match.group('month')),
        day=int(match.group('day')), )
Exemplo n.º 3
0
def search_events(date: date_type, timezone: int = 0) -> [Event]:
    start_time = get_timescale().utc(date.year, date.month, date.day, -timezone)
    end_time = get_timescale().utc(date.year, date.month, date.day + 1, -timezone)

    try:
        return sorted(flatten_list([
            _search_oppositions(start_time, end_time, timezone),
            _search_conjunction(start_time, end_time, timezone),
            _search_maximal_elongations(start_time, end_time, timezone)
        ]), key=lambda event: event.start_time)
    except EphemerisRangeError as error:
        start_date = translate_to_timezone(error.start_time.utc_datetime(), timezone)
        end_date = translate_to_timezone(error.end_time.utc_datetime(), timezone)

        start_date = date_type(start_date.year, start_date.month, start_date.day)
        end_date = date_type(end_date.year, end_date.month, end_date.day)

        raise OutOfRangeDateError(start_date, end_date)
Exemplo n.º 4
0
def _date_arg(s):
    match = date_pattern.match(s)
    if match is None:
        raise RuntimeError(s)
    return date_type(
        year=int(match.group('year')),
        month=int(match.group('month')),
        day=int(match.group('day')),
    )
Exemplo n.º 5
0
def gdal_release_date(date=False): 
    """
    Returns the release date in a string format, e.g, "2007/06/27".
    If the date keyword argument is set to True, a Python datetime object
    will be returned instead.
    """
    from datetime import date as date_type
    rel = _version_info('RELEASE_DATE')
    yy, mm, dd = map(int, (rel[0:4], rel[4:6], rel[6:8]))
    d = date_type(yy, mm, dd)
    if date: return d
    else: return d.strftime('%Y/%m/%d')
Exemplo n.º 6
0
def gdal_release_date(date=False): 
    """
    Returns the release date in a string format, e.g, "2007/06/27".
    If the date keyword argument is set to True, a Python datetime object
    will be returned instead.
    """
    from datetime import date as date_type
    rel = _version_info('RELEASE_DATE')
    yy, mm, dd = map(int, (rel[0:4], rel[4:6], rel[6:8]))
    d = date_type(yy, mm, dd)
    if date: return d
    else: return d.strftime('%Y/%m/%d')
Exemplo n.º 7
0
def parse(date,
          receiver_name,
          constellations,
          input_folder='',
          output_folder=''):
    # Print a message.
    date = date_type(int(date[:4]), int(date[4:6]), int(date[6:]))

    # Define PRNs.
    PRNs_to_parse = []
    if 'G' in constellations:
        PRNs_to_parse.extend(['G' + str(i) for i in range(1, 33)])
    if 'R' in constellations:
        PRNs_to_parse.extend(['R' + str(i) for i in range(1, 25)])
    if 'E' in constellations:
        PRNs_to_parse.extend(['E' + str(i) for i in range(1, 31)])

    # Define parsing parameters.
    parameters = ParseSettings()
    parameters.binary_dir = input_folder + filesep + receiver_name
    parameters.CSV_dir = output_folder + filesep + receiver_name + filesep + 'CSV_FILES'
    parameters.receiver_name = receiver_name
    parameters.date_range = False
    parameters.start_date = [date.year, date.month, date.day]
    parameters.end_date = [date.year, date.month, date.day]
    parameters.reduced = True
    parameters.raw = False
    parameters.PRNs_to_parse = PRNs_to_parse
    parameters.set_time_range = False

    # Binary dir and file.
    binary_file = str(date2gpswd(date)[0]) + '_' + str(
        date2gpswd(date)[1]) + '_00_' + receiver_name + '.GPS'
    binary_dir = parameters.binary_dir + filesep + str(
        date2gpswd(date)[0]) + filesep + binary_file

    # If the binary file exists, parse.
    if os.path.exists(binary_dir):
        # Print status to command window.
        print(
            "\n---------------------------------------------------------------------"
        )
        print(
            "PART 1: EISA PARSING. Receiver: {}. Date: ({}, {}, {})\n".format(
                receiver_name, date.year, date.month, date.day))

        # Parse.
        run_parsing(parameters, cwd + filesep + "Parsing")
    else:
        print("The following binary file does not exist: {}. Receiver: {}.".
              format(binary_dir, receiver_name))
Exemplo n.º 8
0
def ML_event_detection(date,
                       receiver_name,
                       constellations,
                       threshold,
                       location,
                       input_folder='',
                       output_folder=''):
    # Determine the date, CSV dir, and GRAPHS dir.
    year, month, day = date[:4], date[4:6], date[6:]

    binary_dir = input_folder + filesep + receiver_name
    CSV_dir = output_folder + filesep + receiver_name + filesep + 'CSV_FILES'
    graphs_dir = output_folder + filesep + receiver_name + filesep + 'GRAPHS'

    # Define PRNs.
    PRNs_to_process = []
    if 'G' in constellations:
        PRNs_to_process.extend(['G' + str(i) for i in range(1, 33)])
    if 'R' in constellations:
        PRNs_to_process.extend(['R' + str(i) for i in range(1, 25)])
    if 'E' in constellations:
        PRNs_to_process.extend(['E' + str(i) for i in range(1, 31)])

    # Continue only if the path containing the csv files of the corresponding date exists.
    if os.path.exists(CSV_dir + filesep + date):

        # Print status to command window.
        print(
            "\n---------------------------------------------------------------------"
        )
        print("PART 3: EISA ML MODULE. Receiver: {}. Date: ({}, {}, {})\n".
              format(receiver_name, year, month, day))

        # Create S4 Neural Network, and load the weights.
        S4_model = NNModel('S4')
        S4_model.load_weights('ML' + filesep + 's4_scintillation.h5')

        # Create sigma Neural Network, and load the weights.
        sigma_model = NNModel('sigma')
        sigma_model.load_weights('ML' + filesep + 'sigma_scintillation.h5')

        # Identify binary file name.
        day = date_type(int(year), int(month), int(day))
        week_number, week_day_number = int(date2gpswd(day)[0]), int(
            date2gpswd(day)[1])
        binary_file_name = "{}_{}_00_{}.GPS".format(str(week_number),
                                                    str(week_day_number),
                                                    receiver_name)
        binary_file = binary_dir + filesep + str(
            week_number) + filesep + binary_file_name

        # Ionospheric scintillation detection.
        for prn in PRNs_to_process:
            # Files.
            input_file = CSV_dir + filesep + date + filesep + 'REDOBS_{}_{}.csv'.format(
                prn, date)
            output_file = CSV_dir + filesep + date + filesep + r'\ML_Detection\REDOBS_{}_{}_ML'.format(
                prn, date)

            # Convert date to list format (which is the input format for the run_ML function).
            date_list = [date[:4], date[4:6], date[6:]]

            # Directory to the new (ML) plots.
            graphs_output_dir = graphs_dir + filesep + date + filesep + 'ML'

            # Check that the input file exists.
            if not os.path.isfile(input_file):
                continue

            # ML Detection. Use try except, so that the code continues running even in the case of an error.
            try:

                # ML Detection: S4 scintillation.
                print(
                    '\n ----- Ionospheric Scintillation Event Detection (ML Module). Date: {}. PRN: {}. -----'
                    .format(date, prn))
                output_files_s4 = run_ML(input_file,
                                         output_file,
                                         S4_model,
                                         prn,
                                         date_list,
                                         scintillation_type='S4',
                                         save_plot=True,
                                         save_plot_dir=graphs_output_dir +
                                         filesep + 'Amplitude',
                                         threshold=threshold,
                                         location=location)

                # ML Detection: sigma scintillation.
                output_files_sigma = run_ML(input_file,
                                            output_file,
                                            sigma_model,
                                            prn,
                                            date_list,
                                            scintillation_type='sigma',
                                            save_plot=True,
                                            save_plot_dir=graphs_output_dir +
                                            filesep + 'Phase',
                                            threshold=threshold,
                                            location=location)

                # Raw data period processing.
                output_files = output_files_s4 + output_files_sigma
                if output_files:
                    success, msg = parse_file(binary_file,
                                              CSV_dir,
                                              os.getcwd() + filesep +
                                              'parsing', [prn],
                                              week_number,
                                              week_day_number,
                                              reduced_or_raw='raw',
                                              print_header=False)
                    if not success:
                        print(msg)
                # If no output files were generated, print a message.
                else:
                    print(
                        'No scintillation event was found for prn {}.'.format(
                            prn))

            # Print exception if an error is raised.
            except Exception as e:
                print(
                    'Could not process prn {}. The following exception occurred: {}.'
                    .format(prn, e.message))
    def test_mis_dates_overlap(self):
        mis1_id = self.add_mis(name="mis1")
        mis2_id = self.add_mis(name="mis2")
        mis1 = self.db_session.query(metabase.Mis).get(mis1_id)
        mis2 = self.db_session.query(metabase.Mis).get(mis2_id)

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2009, month=6, day=4)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2012, month=6, day=5)
        mis2.end_date = date_type(year=2013, month=2, day=3)
        self.db_session.commit()
        self.assertFalse(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2013, month=4, day=1)
        mis2.end_date = date_type(year=2016, month=2, day=3)
        self.db_session.commit()
        self.assertFalse(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2008, month=1, day=2)
        mis1.end_date = date_type(year=2010, month=6, day=4)
        mis2.start_date = date_type(year=2009, month=6, day=4)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2008, month=1, day=2)
        mis1.end_date = date_type(year=2010, month=6, day=4)
        mis2.start_date = date_type(year=2010, month=6, day=4)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2008, month=1, day=2)
        mis1.end_date = date_type(year=2010, month=6, day=4)
        mis2.start_date = date_type(year=2010, month=4, day=3)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2013, month=4, day=1)
        mis2.end_date = None
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))
    def test_compute_mis_connection(self):
        mis1_id = self.add_mis("mis1")
        mis2_id = self.add_mis("mis2")
        mis3_id = self.add_mis("mis3")
        mis4_id = self.add_mis("mis4")

        stop1_id = self.add_stop(code="code1", mis_id=mis1_id)
        stop2_id = self.add_stop(code="code2", mis_id=mis1_id)
        stop3_id = self.add_stop(mis_id=mis2_id)
        stop4_id = self.add_stop(mis_id=mis3_id)
        stop5_id = self.add_stop(code="code1", mis_id=mis4_id)
        stop6_id = self.add_stop(code="code2", mis_id=mis4_id)
        stop7_id = self.add_stop(code="code3", mis_id=mis4_id)

        transfers = [(stop1_id, stop3_id), (stop1_id, stop4_id),
                     (stop1_id, stop5_id), (stop2_id, stop6_id),
                     (stop2_id, stop5_id), (stop3_id, stop4_id),
                     (stop3_id, stop7_id)]

        mis_connections = []
        for t in transfers:
            mis_connections.append(tuple([self.db_session.query(metabase.Stop.mis_id).filter_by(id=t[0]).one()[0],
                                          self.db_session.query(metabase.Stop.mis_id).filter_by(id=t[1]).one()[0]]))
        mis_connections = set(mis_connections)  # Remove duplicates

        for s1, s2 in transfers:
            self.add_transfer(s1, s2)

        _compute_mis_connections(self.db_session)
        db_mis_connections = self.db_session.query(metabase.MisConnection.mis1_id,
                                                   metabase.MisConnection.mis2_id) \
            .all()

        # Check that there are no duplicates in the mis_connection table.
        self.assertEqual(len(db_mis_connections), len(set(db_mis_connections)),
                         "Found duplicates in the mis_connection table")
        self.assertEqual(set(db_mis_connections), mis_connections,
                         "MisConnection calculation results not as expected")

        self.db_session.query(metabase.Transfer).filter_by(stop1_id=stop2_id, stop2_id=stop5_id).delete()
        self.db_session.query(metabase.Transfer).filter_by(stop1_id=stop2_id, stop2_id=stop6_id).delete()
        self.db_session.flush()
        _compute_mis_connections(self.db_session)
        # mis_connection should still be here
        self.assertEqual(self.db_session.query(metabase.MisConnection)
                         .filter_by(mis1_id=mis1_id, mis2_id=mis4_id).count(),
                         1, "MisConnection not found")

        self.db_session.query(metabase.Transfer).filter_by(stop1_id=stop1_id, stop2_id=stop5_id).delete()
        self.db_session.flush()
        _compute_mis_connections(self.db_session)
        # mis_connection should not exist as all transfers between mis1 and mis4 have been deleted
        self.assertEqual(self.db_session.query(metabase.MisConnection)
                         .filter_by(mis1_id=mis1_id, mis2_id=mis4_id).count(),
                         0, "MisConnection should have been deleted")

        self.add_transfer(stop1_id, stop6_id)
        self.db_session.flush()
        _compute_mis_connections(self.db_session)
        # mis_connection should come back now that a transfer has been added
        self.assertEqual(self.db_session.query(metabase.MisConnection)
                         .filter_by(mis1_id=mis1_id, mis2_id=mis4_id).count(),
                         1, "MisConnection not found")

        mis1 = self.db_session.query(metabase.Mis).get(mis1_id)
        mis4 = self.db_session.query(metabase.Mis).get(mis4_id)
        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis4.start_date = date_type(year=2012, month=7, day=1)
        mis4.end_date = date_type(year=2013, month=4, day=1)
        self.db_session.commit()
        _compute_mis_connections(self.db_session)
        # Validity periods don't overlap, so mis_connection should not have been created.
        self.assertEqual(self.db_session.query(metabase.MisConnection)
                         .filter_by(mis1_id=mis1_id, mis2_id=mis4_id).count(),
                         0, "MisConnection should have been deleted")

        mis4.start_date = date_type(year=2011, month=7, day=1)
        self.db_session.commit()
        _compute_mis_connections(self.db_session)
        # Validity periods now overlap, so mis_connection should exist.
        self.assertEqual(self.db_session.query(metabase.MisConnection)
                         .filter_by(mis1_id=mis1_id, mis2_id=mis4_id).count(),
                         1, "MisConnection not found")
Exemplo n.º 11
0
    def test_mis_dates_overlap(self):
        mis1_id = self.add_mis(name="mis1")
        mis2_id = self.add_mis(name="mis2")
        mis1 = self.db_session.query(metabase.Mis).get(mis1_id)
        mis2 = self.db_session.query(metabase.Mis).get(mis2_id)

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2009, month=6, day=4)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2012, month=6, day=5)
        mis2.end_date = date_type(year=2013, month=2, day=3)
        self.db_session.commit()
        self.assertFalse(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2013, month=4, day=1)
        mis2.end_date = date_type(year=2016, month=2, day=3)
        self.db_session.commit()
        self.assertFalse(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2008, month=1, day=2)
        mis1.end_date = date_type(year=2010, month=6, day=4)
        mis2.start_date = date_type(year=2009, month=6, day=4)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2008, month=1, day=2)
        mis1.end_date = date_type(year=2010, month=6, day=4)
        mis2.start_date = date_type(year=2010, month=6, day=4)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2008, month=1, day=2)
        mis1.end_date = date_type(year=2010, month=6, day=4)
        mis2.start_date = date_type(year=2010, month=4, day=3)
        mis2.end_date = date_type(year=2012, month=2, day=3)
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))

        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis2.start_date = date_type(year=2013, month=4, day=1)
        mis2.end_date = None
        self.db_session.commit()
        self.assertTrue(mis_dates_overlap(self.db_session, mis1_id, mis2_id))
Exemplo n.º 12
0
    def test_compute_mis_connection(self):
        mis1_id = self.add_mis("mis1")
        mis2_id = self.add_mis("mis2")
        mis3_id = self.add_mis("mis3")
        mis4_id = self.add_mis("mis4")

        stop1_id = self.add_stop(code="code1", mis_id=mis1_id)
        stop2_id = self.add_stop(code="code2", mis_id=mis1_id)
        stop3_id = self.add_stop(mis_id=mis2_id)
        stop4_id = self.add_stop(mis_id=mis3_id)
        stop5_id = self.add_stop(code="code1", mis_id=mis4_id)
        stop6_id = self.add_stop(code="code2", mis_id=mis4_id)
        stop7_id = self.add_stop(code="code3", mis_id=mis4_id)

        transfers = [(stop1_id, stop3_id), (stop1_id, stop4_id),
                     (stop1_id, stop5_id), (stop2_id, stop6_id),
                     (stop2_id, stop5_id), (stop3_id, stop4_id),
                     (stop3_id, stop7_id)]

        mis_connections = []
        for t in transfers:
            mis_connections.append(
                tuple([
                    self.db_session.query(
                        metabase.Stop.mis_id).filter_by(id=t[0]).one()[0],
                    self.db_session.query(
                        metabase.Stop.mis_id).filter_by(id=t[1]).one()[0]
                ]))
        mis_connections = set(mis_connections)  # Remove duplicates

        for s1, s2 in transfers:
            self.add_transfer(s1, s2)

        _compute_mis_connections(self.db_session)
        db_mis_connections = self.db_session.query(metabase.MisConnection.mis1_id,
                                                   metabase.MisConnection.mis2_id) \
            .all()

        # Check that there are no duplicates in the mis_connection table.
        self.assertEqual(len(db_mis_connections), len(set(db_mis_connections)),
                         "Found duplicates in the mis_connection table")
        self.assertEqual(set(db_mis_connections), mis_connections,
                         "MisConnection calculation results not as expected")

        self.db_session.query(metabase.Transfer).filter_by(
            stop1_id=stop2_id, stop2_id=stop5_id).delete()
        self.db_session.query(metabase.Transfer).filter_by(
            stop1_id=stop2_id, stop2_id=stop6_id).delete()
        self.db_session.flush()
        _compute_mis_connections(self.db_session)
        # mis_connection should still be here
        self.assertEqual(
            self.db_session.query(metabase.MisConnection).filter_by(
                mis1_id=mis1_id, mis2_id=mis4_id).count(), 1,
            "MisConnection not found")

        self.db_session.query(metabase.Transfer).filter_by(
            stop1_id=stop1_id, stop2_id=stop5_id).delete()
        self.db_session.flush()
        _compute_mis_connections(self.db_session)
        # mis_connection should not exist as all transfers between mis1 and mis4 have been deleted
        self.assertEqual(
            self.db_session.query(metabase.MisConnection).filter_by(
                mis1_id=mis1_id, mis2_id=mis4_id).count(), 0,
            "MisConnection should have been deleted")

        self.add_transfer(stop1_id, stop6_id)
        self.db_session.flush()
        _compute_mis_connections(self.db_session)
        # mis_connection should come back now that a transfer has been added
        self.assertEqual(
            self.db_session.query(metabase.MisConnection).filter_by(
                mis1_id=mis1_id, mis2_id=mis4_id).count(), 1,
            "MisConnection not found")

        mis1 = self.db_session.query(metabase.Mis).get(mis1_id)
        mis4 = self.db_session.query(metabase.Mis).get(mis4_id)
        mis1.start_date = date_type(year=2010, month=6, day=4)
        mis1.end_date = date_type(year=2012, month=6, day=4)
        mis4.start_date = date_type(year=2012, month=7, day=1)
        mis4.end_date = date_type(year=2013, month=4, day=1)
        self.db_session.commit()
        _compute_mis_connections(self.db_session)
        # Validity periods don't overlap, so mis_connection should not have been created.
        self.assertEqual(
            self.db_session.query(metabase.MisConnection).filter_by(
                mis1_id=mis1_id, mis2_id=mis4_id).count(), 0,
            "MisConnection should have been deleted")

        mis4.start_date = date_type(year=2011, month=7, day=1)
        self.db_session.commit()
        _compute_mis_connections(self.db_session)
        # Validity periods now overlap, so mis_connection should exist.
        self.assertEqual(
            self.db_session.query(metabase.MisConnection).filter_by(
                mis1_id=mis1_id, mis2_id=mis4_id).count(), 1,
            "MisConnection not found")