Пример #1
0
def process_tm(path):
    lb_files = process_tmtc_to_levelbinary([SOCPacketFile(path)])
    l0_proc = Level0(CONFIG.get('Paths', 'tm_archive'),
                     CONFIG.get('Paths', 'fits_archive'))
    l0_files = l0_proc.process_fits_files(files=lb_files)
    l1_proc = Level1(CONFIG.get('Paths', 'tm_archive'),
                     CONFIG.get('Paths', 'fits_archive'))
    l1_files = l1_proc.process_fits_files(files=l0_files)
    logger.debug(l1_files)
Пример #2
0
def process_type(files, *, processor, spice_kernel_path, config):
    all_files = list()
    Spice.instance = Spice(spice_kernel_path)
    CONFIG = config

    for file in files:
        l0 = Product(file)
        try:
            tmp = Product._check_registered_widget(
                level='L1',
                service_type=l0.service_type,
                service_subtype=l0.service_subtype,
                ssid=l0.ssid,
                data=None,
                control=None)
            l1 = tmp.from_level0(l0, parent=file.name)
            files = processor.write_fits(l1)
            all_files.extend(files)
        except NoMatchError:
            logger.debug('No match for product %s', l0)
        except Exception as e:
            logger.error('Error processing file %s', file, exc_info=True)
            logger.error('%s', e)
            if CONFIG.getboolean('Logging', 'stop_on_error', fallback=False):
                raise e
    return all_files
Пример #3
0
 def __init__(self, archive_path):
     self.archive_path = archive_path
     soop_path = CONFIG.get('Paths', 'soop_files')
     if str(soop_path) == '':
         soop_path = Path(
             __file__).parent.parent.parent / 'data' / 'test' / 'soop'
     self.soop_manager = SOOPManager(soop_path)
Пример #4
0
    def process_fits_files(self, files=None):
        all_files = list()
        tm = defaultdict(list)
        if files is None:
            files = self.levelb_files
        # Create list of file by type
        for file in files:
            mission, level, identifier, *_ = file.name.split('_')
            tm_type = tuple(map(int, identifier.split('-')[1:]))
            tm[tm_type].append(file)

        # For each type
        with ProcessPoolExecutor() as executor:
            jobs = [
                executor.submit(
                    process_tm_type,
                    files,
                    tm_type,
                    self.processor,
                    # keep track of the used Spice kernel
                    spice_kernel_path=Spice.instance.meta_kernel_path,
                    config=CONFIG) for tm_type, files in tm.items()
            ]

        for job in jobs:
            try:
                created_files = job.result()
                all_files.extend(created_files)
            except Exception as e:
                logger.error('Problem processing files', exc_info=True)
                if CONFIG.getboolean('Logging',
                                     'stop_on_error',
                                     fallback=False):
                    raise e
        return list(set(all_files))
Пример #5
0
def process_request(request, outputdir):
    """
    Process at LLDP request.
    Parameters
    ----------
    request : `pathlib.Path`
        Path to directory containing request
    outputdir :
        Path to directory to store outputs
    Raises
    ------
    RequestException
        There was an error processing the request
    """
    logger.info('Processing %s', request.name)

    tmtc_files = list(request.joinpath('telemetry').glob('*.xml'))
    if len(tmtc_files) != 1:
        raise RequestException('Expected one tmtc file found %s.',
                               len(tmtc_files))
    soc_file = SOCPacketFile(tmtc_files[0])
    lb = LevelB.from_tm(soc_file)
    prods = []
    for prod in lb:
        # Only process light curve (30) and flare flag and location (34)
        if prod.ssid in (30, 34):
            tmp = Product._check_registered_widget(
                level='L0',
                service_type=prod.service_type,
                service_subtype=prod.service_subtype,
                ssid=prod.ssid,
                data=None,
                control=None)
            try:
                l0 = tmp.from_levelb(prod, parent='')
                prods.append(l0)
            except Exception as e:
                logger.error('Error processing file %s for %s, %s, %s',
                             soc_file, lb.service_type, lb.service_subtype,
                             lb.ssid)
                logger.error('%s', e)
                if CONFIG.getboolean('Logging',
                                     'stop_on_error',
                                     fallback=False):
                    raise e

    processor = FitsLL01Processor()
    curtime = datetime.now()
    for prod in prods:
        processor.write_fits(prod, outputdir, curtime)
Пример #6
0
def process_tmtc_to_levelbinary(files_to_process, archive_path=None):
    if archive_path is None:
        archive_path = Path(CONFIG.get('Paths', 'fits_archive'))
    fits_processor = FitsLBProcessor(archive_path)
    all_files = set()
    for tmtc_file in files_to_process:
        logger.info(f'Processing file: {tmtc_file}')
        jobs = []
        with ProcessPoolExecutor() as executor:
            for prod in LevelB.from_tm(tmtc_file):
                if prod:
                    jobs.append(executor.submit(fits_processor.write_fits, prod))

        for job in jobs:
            try:
                new_files = job.result()
                all_files.update(new_files)
            except Exception as e:
                logger.error('Error processing', exc_info=True)
                if CONFIG.getboolean('Logging', 'stop_on_error', fallback=False):
                    raise e

    return all_files
Пример #7
0
            files = processor.write_fits(l1)
            all_files.extend(files)
        except NoMatchError:
            logger.debug('No match for product %s', l0)
        except Exception as e:
            logger.error('Error processing file %s', file, exc_info=True)
            logger.error('%s', e)
            if CONFIG.getboolean('Logging', 'stop_on_error', fallback=False):
                raise e
    return all_files


if __name__ == '__main__':
    tstart = perf_counter()
    warnings.filterwarnings('ignore', module='astropy.io.fits.card')
    warnings.filterwarnings('ignore', module='stixcore.soop.manager')
    warnings.filterwarnings('ignore', module='astropy.utils.metadata')

    fits_path = Path('/home/shane/fits_test_latest/L0')
    bd = Path('/home/shane/fits_test_latest')

    # possible set an alternative spice kernel if not the latest should be used
    spm = SpiceKernelManager(Path(CONFIG.get("Paths", "spice_kernels")))
    Spice.instance = Spice(spm.get_latest_mk())

    l1processor = Level1(fits_path, bd)
    all_files = l1processor.process_fits_files()
    logger.info(len(all_files))
    tend = perf_counter()
    logger.info('Time taken %f', tend - tstart)
Пример #8
0
def spicekernelmanager():
    return SpiceKernelManager(CONFIG.get("Paths", "spice_kernels"))
Пример #9
0
def process_tm(path):
    lb_files = process_tmtc_to_levelbinary([SOCPacketFile(path)])
    l0_proc = Level0(CONFIG.get('Paths', 'tm_archive'),
                     CONFIG.get('Paths', 'fits_archive'))
    l0_files = l0_proc.process_fits_files(files=lb_files)
    l1_proc = Level1(CONFIG.get('Paths', 'tm_archive'),
                     CONFIG.get('Paths', 'fits_archive'))
    l1_files = l1_proc.process_fits_files(files=l0_files)
    logger.debug(l1_files)


if __name__ == '__main__':
    tstart = time.perf_counter()
    observer = Observer()
    path = Path('/home/shane/tm')
    soop_path = Path(CONFIG.get('Paths', 'soop_files'))
    logging_handler = LoggingEventHandler(logger=logger)
    tm_handler = GFTSFileHandler(process_tm, TM_REGEX)

    # TODO should be an deticated path from the config
    soop_manager = SOOPManager(soop_path)
    soop_handler = GFTSFileHandler(soop_manager.add_soop_file_to_index,
                                   SOOPManager.SOOP_FILE_REGEX)

    observer.schedule(soop_handler, soop_manager.data_root, recursive=False)
    observer.schedule(logging_handler, path, recursive=True)
    observer.schedule(tm_handler, path, recursive=True)

    observer.start()
    try:
        while True:
Пример #10
0
def process_tm_type(files, tm_type, processor, spice_kernel_path, config):
    all_files = []
    Spice.instance = Spice(spice_kernel_path)
    CONFIG = config

    # Stand alone packet data
    if (tm_type[0] == 21
            and tm_type[-1] not in {20, 21, 22, 23, 24}) or tm_type[0] != 21:
        for file in files:
            levelb = Product(file)
            tmp = Product._check_registered_widget(
                level='L0',
                service_type=levelb.service_type,
                service_subtype=levelb.service_subtype,
                ssid=levelb.ssid,
                data=None,
                control=None)
            try:
                level0 = tmp.from_levelb(levelb, parent=file.name)
                if level0:
                    fits_files = processor.write_fits(level0)
                    all_files.extend(fits_files)
            except Exception as e:
                logger.error('Error processing file %s for %s, %s, %s', file,
                             levelb.service_type, levelb.service_subtype,
                             levelb.ssid)
                logger.error('%s', e)
                if CONFIG.getboolean('Logging',
                                     'stop_on_error',
                                     fallback=False):
                    raise e

    else:
        last_incomplete = []
        # for each file
        for file in files:
            levelb = Product(file)
            complete, incomplete = levelb.extract_sequences()

            if incomplete and last_incomplete:
                combined_complete, combined_incomplete \
                    = (incomplete[0] + last_incomplete[0]).extract_sequences()
                complete.extend(combined_complete)
                last_incomplete = combined_incomplete

            if complete:
                for comp in complete:

                    # TODO need to carry better information for logging like index from
                    #  original files and file names
                    try:
                        tmp = Product._check_registered_widget(
                            level='L0',
                            service_type=comp.service_type,
                            service_subtype=comp.service_subtype,
                            ssid=comp.ssid,
                            data=None,
                            control=None)
                        level0 = tmp.from_levelb(comp, parent=file.name)
                        fits_files = processor.write_fits(level0)
                        all_files.extend(fits_files)
                    except Exception as e:
                        logger.error('Error processing file %s for %s, %s, %s',
                                     file,
                                     comp.service_type,
                                     comp.service_subtype,
                                     comp.ssid,
                                     exc_info=True)
                        logger.error('%s', e)
                        if CONFIG.getboolean('Logging',
                                             'stop_on_error',
                                             fallback=False):
                            raise e
            try:
                last_incomplete = last_incomplete[0] + incomplete[0]
            except IndexError:
                last_incomplete = []

        if last_incomplete:
            for inc in last_incomplete:
                tmp = Product._check_registered_widget(
                    level='L0',
                    service_type=inc.service_type,
                    service_subtype=inc.service_subtype,
                    ssid=inc.ssid,
                    data=None,
                    control=None)
                level0 = tmp.from_levelb(inc, parent=file.name)
                fits_files = processor.write_fits(level0)
                all_files.extend(fits_files)

    return all_files