Ejemplo n.º 1
0
def main():

    # -------- Load configuration file --------
    config = ConfigParser()
    fp = open('GSS_settings.conf')
    config.read_file(fp)
    fp.close()

    # -------- Configure logger ---------
    logfile = config['logging']['log_file']
    logging.basicConfig(level=eval(f"logging.{config['logging']['log_level']}"),
             filename = logfile, 
             format='[%(asctime)s]\t%(module)s.%(name)s\t%(levelname)s\t%(message)s',
             datefmt='%Y-%m-%d %H:%M:%S')
    logging.getLogger('matplotlib').setLevel(logging.WARNING)
    np.seterr(divide='ignore')

    in_root = os.path.join(config['db_locations']['survey_tree_root'],'xml')
    out_root = os.path.join(config['db_locations']['survey_tree_root'],'figures')
    line_plots = [x.strip() for x in config['survey_config']['line_plots'].split(',')]
    plot_length = int(config['survey_config']['plot_length'])
    packet_db_file = config['db_locations']['packet_db_file']
    access_log = config['logging']['access_log']

    last_timestamp = get_last_access_time(access_log, 'generate_survey_quicklooks')
    last_run_time = datetime.datetime.utcfromtimestamp(last_timestamp)

    logging.info(f'Last ran at {last_run_time}')
    generate_survey_quicklooks(in_root, out_root, line_plots = line_plots, 
        plot_length=plot_length, last_run_time=last_run_time)

    # Success!
    log_access_time(access_log,"generate_survey_quicklooks")
Ejemplo n.º 2
0
def main():
    # -------- Load configuration file --------
    config = ConfigParser()
    fp = open('GSS_settings.conf')
    config.read_file(fp)
    fp.close()

    # -------- Configure logger ---------
    logfile = config['logging']['log_file']
    logging.basicConfig(
        level=eval(f"logging.{config['logging']['log_level']}"),
        filename=logfile,
        format='[%(asctime)s]\t%(module)s.%(name)s\t%(levelname)s\t%(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    logging.getLogger('matplotlib').setLevel(logging.WARNING)
    np.seterr(divide='ignore')

    packet_db = config['db_locations']['packet_db_file']
    out_root = config['db_locations']['status_tree_root']
    access_log = config['logging']['access_log']

    last_timestamp = get_last_access_time(access_log, 'process_status_data')
    last_time = datetime.datetime.utcfromtimestamp(last_timestamp)
    logging.info(f'Last run time: {last_time} UTC')

    tsmin, tsmax = get_time_range_for_updated_packets(packet_db,
                                                      last_timestamp)

    if (not tsmin) or (not tsmax):
        logging.info('No new data to process')
    else:
        logging.info(
            f'Header timestamps range from {datetime.datetime.utcfromtimestamp(tsmin)} to {datetime.datetime.utcfromtimestamp(tsmax)}'
        )
        # Add in some extra margin, in case the new minimum is in the middle of a burst
        tmin = datetime.datetime.utcfromtimestamp(tsmin) - datetime.timedelta(
            hours=2)
        tmax = datetime.datetime.utcfromtimestamp(tsmax) + datetime.timedelta(
            hours=2)
        logging.info(
            f'Loading packets with header timestamps {tmin} to {tmax}')

        stats = []
        # ---------------- Load packets --------------------
        # packets = load_packets_from_tree(in_root)
        packets = get_packets_within_range(packet_db,
                                           dtype='I',
                                           t1=tmin,
                                           t2=tmax)

        logging.info(f'loaded {len(packets)} packets')
        if packets:
            stats.extend(decode_status(packets))
            logging.info(f'Decoded {len(stats)} status messages')

        if stats:
            save_status_to_file_tree(stats, out_root)

    log_access_time(access_log, 'process_status_data')
Ejemplo n.º 3
0
def main():

    # -------- Load configuration file --------
    config = ConfigParser()
    fp = open('GSS_settings.conf')
    config.read_file(fp)
    fp.close()

    # -------- Configure logger ---------
    logfile = config['logging']['log_file']
    logging.basicConfig(
        level=eval(f"logging.{config['logging']['log_level']}"),
        filename=logfile,
        format='[%(asctime)s]\t%(module)s.%(name)s\t%(levelname)s\t%(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    logging.getLogger('matplotlib').setLevel(logging.WARNING)
    np.seterr(divide='ignore')

    # -------- Load some settings -------
    packet_db = config['db_locations']['packet_db_file']
    out_root = config['db_locations']['burst_tree_root']
    file_types = config['burst_config']['file_types']
    file_types = [x.strip() for x in file_types.split(',')]

    do_plots = int(config['burst_config']['do_plots']) > 0
    do_maps = int(config['burst_config']['do_maps']) > 0
    fill_GPS = int(config['burst_config']['fill_missing_GPS']) > 0

    lookback_mins = int(config['burst_config']['lookback_time_minutes'])
    cal_file = config['burst_config']['calibration_file']
    dpi = int(config['burst_config']['dpi'])

    max_lookback_time = datetime.timedelta(minutes=lookback_mins)

    TLE_file = config['burst_config']['TLE_file'].strip()
    TX_file = config['burst_config']['TX_file'].strip()
    access_log = config['logging']['access_log'].strip()

    last_timestamp = get_last_access_time(access_log, 'process_burst_data')
    last_time = datetime.datetime.utcfromtimestamp(last_timestamp)

    logging.info(f'------- process_burst_data --------')
    logging.info(f'Last run time: {last_time} UTC')
    logging.info(f'Do_plots: {do_plots}')
    logging.info(f'Do_maps:  {do_maps}')
    logging.info(f'packet_db: {packet_db}')
    logging.info(f'file types: {file_types}')
    logging.info(f'Out root: {out_root}')
    # Get any sets of headers / footers to check:
    pairs = get_burst_pairs(packet_db,
                            date_added=last_time,
                            max_lookback_time=max_lookback_time)

    if not pairs:
        logging.info(f'No new burst data to decode')
    else:

        logging.info(f'Have {len(pairs)} sets to check')

        # Check each pair one by one, and saving + plotting as we go
        for index, pair in enumerate(pairs):
            logging.info(f'Doing pair {index}:')

            # Process
            bursts = process_bursts_from_database(
                packet_db, [pair], max_lookback_time=max_lookback_time)

            # Replace any bad GPS positions with TLE-propagated data
            if fill_GPS:
                for B in bursts:
                    fill_missing_GPS_entries(B['G'])

            # Save output files
            save_burst_to_file_tree(bursts, out_root, file_types)

            # Plot
            if do_plots or do_maps:
                gen_burst_plots(bursts,
                                out_root,
                                do_plots=do_plots,
                                do_maps=do_maps,
                                dpi=dpi,
                                cal_file=cal_file,
                                TLE_file=TLE_file,
                                TX_file=TX_file)
        # Success!
        logging.info(f'saving access time')
        log_access_time(access_log, 'process_burst_data')
Ejemplo n.º 4
0
def process_packets():

    # -------- Load configuration file --------
    config = ConfigParser()
    fp = open('GSS_settings.conf')
    config.read_file(fp)
    fp.close()

    # -------- Configure logger ---------
    logfile = config['logging']['log_file']
    logging.basicConfig(
        level=eval(f"logging.{config['logging']['log_level']}"),
        filename=logfile,
        format='[%(asctime)s]\t%(module)s.%(name)s\t%(levelname)s\t%(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    logging.getLogger('matplotlib').setLevel(logging.WARNING)

    output_type = 'db'
    # output_type = 'files'
    # You need this setting if you're using a file tree for the raw packets
    # (but the database is cooler)
    # out_root = config['db_locations']['packet_tree_root']

    in_roots = config['db_locations']['telemetry_watch_directory'].split(',')
    db_name = config['db_locations']['packet_db_file']
    access_log = config['logging']['access_log']

    do_TLM = True
    do_CSV = True

    logging.info(f'input paths: {in_roots}')

    # Find any files to exclude from this run
    # (We could also do this by tracking the file modification date)
    if 'db' in output_type:
        logging.info(f'output database: {db_name}')
        files_to_skip = get_files_in_db(db_name, 'packets')
    else:
        logging.info(f'output path: {out_root}')
        files_to_skip = []

    # logging.info(f'Files to skip: {files_to_skip}')

    for in_root in in_roots:
        logging.info(f'doing {in_root}:')

        for root, dirs, files in os.walk(in_root):

            for fname in files:
                if fname in files_to_skip:
                    logging.debug(
                        f'File {fname} already in database; skipping')
                else:

                    packets = []
                    try:
                        if do_TLM and fname.endswith('.tlm'):
                            logging.info(f'loading TLM from {root} {fname}')
                            # Load packets from each TLM file, tag with the source filename, and decode
                            packets = decode_packets_TLM(root, fname)

                        if do_CSV and fname.endswith('.csv'):
                            logging.info(f'loading CSV from {root} {fname}')
                            packets = decode_packets_CSV(root, fname)

                        if packets:
                            if 'files' in output_type:
                                save_packets_to_file_tree(packets, out_root)
                            if 'db' in output_type:
                                conn = connect_packet_db(db_name)
                                line_id = write_to_db(conn,
                                                      packets,
                                                      db_field='packets')
                                logging.info(
                                    f'wrote to db: line ID = {line_id}')
                                conn.commit()
                                conn.close()

                    except:
                        logging.warning(f'Problem loading {fname}')

    if 'db' in output_type:
        log_access_time(access_log, 'process_packets')
Ejemplo n.º 5
0
def main():
    # -------- Load configuration file --------
    config = ConfigParser()
    fp = open('GSS_settings.conf')
    config.read_file(fp)
    fp.close()

    # -------- Configure logger ---------
    logfile = config['logging']['log_file']
    logging.basicConfig(level=eval(f"logging.{config['logging']['log_level']}"),
             filename = logfile, 
             format='[%(asctime)s]\t%(module)s.%(name)s\t%(levelname)s\t%(message)s',
             datefmt='%Y-%m-%d %H:%M:%S')
    logging.getLogger('matplotlib').setLevel(logging.WARNING)
    np.seterr(divide='ignore')

    packet_db = config['db_locations']['packet_db_file']
    out_root = config['db_locations']['survey_tree_root']
    access_log = config['logging']['access_log']
    
    file_types = config['survey_config']['file_types']
    file_types = [x.strip() for x in file_types.split(',')]
    S_data = []

    fill_GPS = int(config['survey_config']['fill_missing_GPS']) > 0

    # Get the last time we ran this script:
    last_timestamp = get_last_access_time(access_log,'process_survey_data')
    last_time = datetime.datetime.utcfromtimestamp(last_timestamp)
    logging.info(f'Last run time: {last_time} UTC')

    # Get the range of header timestamps corresponding to
    # packets added after the last time we ran:
    
    tsmin, tsmax = get_time_range_for_updated_packets(packet_db, last_timestamp)

    if (not tsmin) or (not tsmax):
        logging.info('No new data to process')
    else:    
        logging.info(f'Header timestamps range from {datetime.datetime.utcfromtimestamp(tsmin)} to {datetime.datetime.utcfromtimestamp(tsmax)}')
        # Add in some extra margin, in case the new minimum is in the middle of a burst
        tmin = datetime.datetime.utcfromtimestamp(tsmin) - datetime.timedelta(hours=2)
        tmax = datetime.datetime.utcfromtimestamp(tsmax) + datetime.timedelta(hours=2)
        logging.info(f'Loading packets with header timestamps {tmin} to {tmax}')

        # ---------------- Load packets --------------------
        # this version from a file tree (.pklz files)
        # packets = load_packets_from_tree(in_root)

        # this version from the database!
        packets = get_packets_within_range(packet_db, dtype='S', t1=tmin, t2 = tmax)


        # -------------------- Decode survey data from packets --------------------
        if packets:
            from_packets, unused = decode_survey_data(packets, separation_time=4.5)
            logging.info(f'Decoded {len(from_packets)} survey products, ({len(unused)}) unused packets remaining')
            S_data.extend(from_packets)

        if S_data:

            # Replace any missing GPS positions with TLE-propagated data
            if fill_GPS:
                fill_missing_GPS_entries([x['GPS'][0] for x in S_data])

            save_survey_to_file_tree(S_data, out_root, file_types=file_types)
    
    log_access_time(access_log, 'process_survey_data')