def match_overnight_flights_on_day(
        date, max_time_difference=DEFAULT_MAXIMUM_TIME_DELTA):
    """
    Match flights for the given day with flights for the previous day.

    Parameters
    ----------
    date: string
        The date in ISO8601 format, e.g. 2017-08-16

    """
    if is_valid_iso8601_date(date):

        # get the CPR data from the Google bucket
        log.info(f'Getting data for date: {date}')

        match_flights_files = create_match_overnight_flights_input_filenames(
            date)
        if not get_processed(REFINED_MERGED_DAILY_CPR_FR24,
                             match_flights_files):
            log.error('Flights file not found in daily_cpr_fr24 bucket')
            return errno.ENOENT

        error_code = match_overnight_flights(match_flights_files,
                                             max_time_difference)
        if error_code:
            return error_code
        gc.collect()

        prev_ids_filename = create_matching_ids_filename(PREV_DAY, date)
        if not put_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24_IDS,
                             [prev_ids_filename]):
            log.error('Could not write ids to overnight_cpr_fr24/ids bucket')
            return errno.EACCES

        extract_data_input_files = create_extract_overnight_data_input_filenames(
            date)
        if not get_processed(REFINED_MERGED_DAILY_CPR_FR24,
                             extract_data_input_files[2:]):
            log.error(
                'Positions or events file not found in daily_cpr_fr24 bucket')
            return errno.ENOENT

        error_code = extract_overnight_data(extract_data_input_files)
        if error_code:
            return error_code

        extract_data_output_files = create_extract_overnight_data_output_filenames(
            date)
        if not put_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24,
                             extract_data_output_files):
            log.error('Could not write to overnight_cpr_fr24 bucket')
            return errno.EACCES

    else:
        log.error(f'invalid date: {date}')
        return errno.EINVAL

    return 0
예제 #2
0
def find_trajectory_user_airspace_intersections(
        trajectory_filename,
        source=CPR_FR24,
        logging_msg_count=DEFAULT_LOGGING_COUNT):
    """
    """
    if not path_exists(trajectory_filename):
        source_path = '/'.join([PRODUCTS, TRAJECTORIES, source])
        log.debug("Getting trajectory file: %s", trajectory_filename)
        if not get_processed(source_path, [trajectory_filename]):
            log.error('Trajectory file not found in %s bucket', source_path)
            return False

    log.info("find_user_airspace_intersections for: %s", trajectory_filename)
    if find_user_airspace_intersections(trajectory_filename,
                                        logging_msg_count):
        return False

    output_filename = trajectory_filename.replace(TRAJECTORIES,
                                                  USER_INTERSECTIONS)
    output_filename = output_filename.replace(JSON_FILE_EXTENSION,
                                              CSV_FILE_EXTENSION)

    intersections_path = '/'.join([PRODUCTS_INTERSECTIONS_USER, source])
    return put_processed(intersections_path, [output_filename])
예제 #3
0
def match_previous_days_flights(date):
    """
    Match merged CPR and FR24 ADS-B data for the given date,
    with merged CPR and FR24 ADS-B data for the previous day.

    Parameters
    ----------
    date: string
        The date in ISO8601 format, e.g. 2017-08-16

    Returns
    -------
        True if succesful, False otherwise.

    """
    # Get the previous days files from the overnight bucket, if not available locally
    merge_files = create_merge_consecutive_day_input_filenames(date)
    if not path_exists(merge_files[1]) or \
            not path_exists(merge_files[3]) or \
            not path_exists(merge_files[5]):
        log.debug("Getting files %s, %s, %s", merge_files[1], merge_files[3],
                  merge_files[5])
        if not get_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24,
                             merge_files[1::2]):
            log.error(
                'Previous days files not found in overnight_cpr_fr24 bucket')
            return False

    # Get the current days files from the daily bucket, if not available locally
    if not path_exists(merge_files[2]) or \
            not path_exists(merge_files[4]) or \
            not path_exists(merge_files[6]):
        log.debug("Getting files %s, %s, %s", merge_files[2], merge_files[4],
                  merge_files[6])
        if not get_processed(REFINED_MERGED_DAILY_CPR_FR24, merge_files[2::2]):
            log.error('Current days files not found in daily_cpr_fr24 bucket')
            return False

    filenames = create_match_consecutive_day_input_filenames(date)
    if match_consecutive_day_trajectories(filenames):
        return False
    gc.collect()

    prev_ids_filename = create_matching_ids_filename(PREV_DAY, date)
    return put_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24_IDS,
                         [prev_ids_filename])
예제 #4
0
def interpolate_trajectory_file(trajectory_filename,
                                source=CPR_FR24,
                                straight_interval=DEFAULT_STRAIGHT_INTERVAL,
                                turn_interval=DEFAULT_TURN_INTERVAL,
                                logging_msg_count=DEFAULT_LOGGING_COUNT):
    """
    Interpolate a trajectories file for the given date.

    Parameters
    ----------
    trajectory_filename: string
        The trajectory filename.

    source: string
        The data to interpolate, default cpr_fr24

    straight_interval: float
        The time interval between points on straight legs.

    turn_interval: float
        The time interval between points around turns.

    Returns
    -------
        True if succesful, False otherwise.

    """
    source_path = '/'.join([PRODUCTS, TRAJECTORIES, source])
    get_processed(source_path, [trajectory_filename])

    log.debug("Interpolating trajectories for file %s", trajectory_filename)
    if interpolate_trajectories(trajectory_filename, straight_interval,
                                turn_interval, logging_msg_count):
        return False
    gc.collect()

    os.remove(trajectory_filename)  # delete the trajectory file

    output_filename = trajectory_filename.replace(TRAJECTORIES,
                                                  SYNTH_POSITIONS)
    output_filename = output_filename.replace(JSON_FILE_EXTENSION,
                                              CSV_FILE_EXTENSION)

    output_path = '/'.join([PRODUCTS, SYNTH_POSITIONS, source])
    return put_processed(output_path, [output_filename])
예제 #5
0
def match_apds_trajectories_on_day(from_date, to_date, date):
    """
    Match refined APDS data with CPR and FR24 ADS-B data for the given date.

    Parameters
    ----------
    from_date: string
        The APDS start date in ISO8601 format, e.g. 2017-08-01

    to_date: string
        The APDS finish date in ISO8601 format, e.g. 2017-08-31

    date: string
        The date in ISO8601 format, e.g. 2017-08-16

    Returns
    -------
        True if succesful, False otherwise.

    """
    apds_filenames = create_convert_apds_filenames(from_date, to_date)
    if not path_exists(apds_filenames[0]) or \
            not path_exists(apds_filenames[1]) or \
            not path_exists(apds_filenames[2]):
        log.debug("Getting apds files: %s", str(apds_filenames))
        if not get_processed(REFINED_APDS, apds_filenames):
            log.error('APDS files not found in sources/apds bucket')
            return False

    day_filenames = create_daily_filenames(date)
    log.debug("Getting days files: %s", str(day_filenames))
    if not get_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24, day_filenames):
        log.error('Data files not found in overnight_cpr_fr24 bucket')
        return False

    filenames = create_match_apds_input_filenames(from_date, to_date, date)
    log.info("matching apds trajectories with files %s", str(filenames))
    if match_apds_trajectories(filenames):
        return False
    gc.collect()

    return put_processed(REFINED_MERGED_APDS_CPR_FR24_IDS,
                         [create_matching_ids_filename(APDS, date)])
예제 #6
0
def find_trajectory_airport_intersections(
        trajectory_filename,
        source=CPR_FR24,
        radius=DEFAULT_RADIUS,
        airports_filename=DEFAULT_MOVEMENTS_AIRPORTS_FILENAME,
        distance_tolerance=DEFAULT_DISTANCE_TOLERANCE):
    """
    """
    if not path_exists(trajectory_filename):
        source_path = '/'.join([PRODUCTS, TRAJECTORIES, source])
        log.debug("Getting trajectory file: %s", trajectory_filename)
        if not get_processed(source_path, [trajectory_filename]):
            log.error('Trajectory file not found in %s bucket', source_path)
            return False

    date = read_iso8601_date_string(trajectory_filename, is_json=True)
    flights_filename = create_flights_filename(source, date)
    if not path_exists(flights_filename):
        source_path = REFINED_MERGED_OVERNIGHT_CPR_FR24 \
            if (source == CPR_FR24) else '/'.join([REFINED, source])
        if not get_processed(source_path, [flights_filename]):
            log.error('Flights file not found in %s bucket', source_path)
            return False

    get_airports(airports_filename, '.')

    log.info("find_sector_intersections for: %s and %s", flights_filename,
             trajectory_filename)
    if find_airport_intersections(flights_filename, trajectory_filename,
                                  radius, airports_filename,
                                  distance_tolerance):
        return False

    output_filename = trajectory_filename.replace(TRAJECTORIES,
                                                  AIRPORT_INTERSECTIONS)
    output_filename = output_filename.replace(JSON_FILE_EXTENSION,
                                              CSV_FILE_EXTENSION)

    intersections_path = '/'.join([PRODUCTS_INTERSECTIONS_AIRPORT, source])
    return put_processed(intersections_path, [output_filename])
예제 #7
0
def merge_overnight_data_on_day(date,
                                max_speed=DEFAULT_MAX_SPEED,
                                distance_accuracy=DEFAULT_DISTANCE_ACCURACY):
    """
    Match flights for the given day with flights for the previous day.

    Parameters
    ----------
    date: string
        The date in ISO8601 format, e.g. 2017-08-16

    max_speed: string
        The maximum ground speed permitted between adjacent positions [Knots],
        default: 750 Knots.

    distance_accuracy: string
        The maximum distance between positions at the same time [Nautical Miles],
        default: 0.25 NM.

    """
    if is_valid_iso8601_date(date):

        # get the CPR data from the Google bucket
        log.info(f'Getting data for date: {date}')

        merge_files = create_merge_overnight_flight_data_input_filenames(date)
        if not get_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24, merge_files):
            log.error('Flights file not found in overnight_cpr_fr24 bucket')
            return errno.ENOENT

        error_code = merge_overnight_flight_data(merge_files)
        if error_code:
            return error_code

        output_files = create_merge_overnight_flight_data_output_filenames(
            date)
        if not put_processed(REFINED_MERGED_OVERNIGHT_CPR_FR24, output_files):
            log.error('Could not merged files to overnight_cpr_fr24 bucket')
            return errno.EACCES

        raw_filename = output_files[1]
        error_code = clean_position_data(raw_filename, max_speed,
                                         distance_accuracy)
        if error_code:
            log.error('clean_position_data error file: {raw_filename}')
            return error_code

        filenames = create_clean_position_data_filenames(CPR_FR24, date)

        source_path = REFINED_MERGED_OVERNIGHT_CPR_FR24
        if not put_processed(source_path, filenames[:1]):
            log.error('Could not write file: {filenames[:1]} to bucket')
            return errno.EACCES

        errors_path = PRODUCTS_ERROR_METRICS_CPR_FR24_OVERNIGHT
        if not put_processed(errors_path, filenames[1:]):
            log.error('Could not write file: {filenames[1:]} to bucket')
            return errno.EACCES

    else:
        log.error(f'invalid date: {date}')
        return errno.EINVAL

    return 0
예제 #8
0
def analyse_positions_on_date(
        date,
        source=CPR_FR24,
        *,
        distance_tolerance=DEFAULT_ACROSS_TRACK_TOLERANCE,
        time_method=MOVING_AVERAGE_SPEED,
        N=DEFAULT_MOVING_MEDIAN_SAMPLES,
        M=DEFAULT_MOVING_AVERAGE_SAMPLES,
        max_duration=DEFAULT_SPEED_MAX_DURATION,
        logging_msg_count=DEFAULT_LOGGING_COUNT):
    """
    Analyse refined CPR and FR24 ADS-B data for the given date.

    Parameters
    ----------
    date: string
        The date in ISO8601 format, e.g. 2017-08-16

    source: string
        The data to analyse, default cpr_fr24

    distance_tolerance: float
        The maximum across track distance[Nautical Miles], default: 0.25 NM.

    time_method: string
        The time analysis method, default 'lm'.

    N : integer
        The number of samples to consider for the speed moving median filter, default 5.

    M : integer
        The number of samples to consider for the speed moving average filter, default 5.

    max_duration: float
        The maximum time between points to smooth when calculating speed, default 120 [Seconds].

    logging_msg_count : integer
        The number of analysed flights between each log message.

    Returns
    -------
        True if succesful, False otherwise.

    """
    positions_filename = create_positions_filename(source, date)

    source_path = REFINED_MERGED_OVERNIGHT_CPR_FR24 if (source == CPR_FR24) else \
        '/'.join([REFINED, source])
    get_processed(source_path, [positions_filename])

    log.info("Analysing position data for file %s", positions_filename)
    if analyse_position_data(positions_filename, distance_tolerance,
                             time_method):
        return False
    gc.collect()

    filenames = create_analyse_position_data_filenames(source, date,
                                                       distance_tolerance,
                                                       time_method)

    # Put the traj metrics file first so that it can be deleted
    metrics_path = '/'.join([PRODUCTS, TRAJ_METRICS, source])
    if not put_processed(metrics_path, filenames[1:]):
        return False

    os.remove(filenames[1])  # delete the traj metrics file

    output_path = '/'.join([PRODUCTS, TRAJECTORIES, source])
    return put_processed(output_path, filenames[:1])