def define_global_program_options(parser):
    """Initilize command line argument parser with common arguments.

    Parameters
    ----------
    parser : :obj:`ArgumentParser`
        An initialized ArgumentParser object
    """

    parser.add_argument(
        '--config',
        '-c',
        metavar='FILE',
        action=AbspathAction,
        help='Configuration file giving all these command line arguments')
    parser.add_argument(
        '--workdir',
        default=os.getcwd(),
        action=AbspathAction,
        help='The root working directory where data is located.')
    parser.add_argument('--logdir',
                        action=AbspathAction,
                        help='The logging directory.')
    parser.add_argument(
        '--loglevel',
        choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'],
        # action=LogLevelAction,
        help='The logging level. One of ERROR, WARNING, INFO or DEBUG.')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='%(prog)s {} (pylibamazed {})'.format(
                            VERSION, get_version()))
    def header_to_fits(self, hdulist):
        quality_flag = 2  # no linemeas active
        header = [
            fits.Card('tract', self.spectrum_reader.pfs_object_id["tract"],
                      'Area of the sky'),
            fits.Card('patch', self.spectrum_reader.pfs_object_id["patch"],
                      'Region within tract'),
            fits.Card('catId', self.spectrum_reader.pfs_object_id["catId"],
                      'Source of the objId'),
            fits.Card('objId', self.spectrum_reader.pfs_object_id["objId"],
                      'Unique ID for object'),
            fits.Card('nvisit', self.spectrum_reader.pfs_object_id["nVisit"],
                      'Number of visit'),
            fits.Card('vHash',
                      self.spectrum_reader.pfs_object_id["pfsVisitHash"],
                      '63-bit SHA-1 list of visits'),
            fits.Card('CRPIX1', self.spectrum_reader.wl_infos["CRPIX1"],
                      'Pixel coordinate of reference point'),
            fits.Card('CRVAL1', self.spectrum_reader.wl_infos["CRVAL1"],
                      '[m] Coordinate value at reference point'),
            fits.Card('CDELT1', self.spectrum_reader.wl_infos["CDELT1"],
                      '[m] Coordinate increment at reference point'),
            fits.Card('D1D_VER',
                      get_version()[0:7], 'Version of the DRP_1D library'),
            fits.Card('D1DP_VER', VERSION,
                      'Version of the DRP_1DPIPE pipeline'),
            fits.Card('DAMD_VER', self.spectrum_reader.damd_version,
                      'Version of the data model'),
            fits.Card('U_PARAM', json.dumps(self.user_param),
                      "User Parameters content, json"),
            fits.Card('ZWARNING', quality_flag, 'Quality flag')
        ]

        hdr = fits.Header(header)
        primary = fits.PrimaryHDU(header=hdr)
        hdulist.append(primary)
Exemple #3
0
def test_version():
    assert get_version()
    print(get_version())
def amazed(config):
    """Run the full-featured amazed client

    Parameters
    ----------
    config : :obj:`Config`
        Configuration object
    """
    logFileHandler = CLogFileHandler(os.path.join(config.logdir, 'amazed.log'))
    logFileHandler.SetLevelMask(_map_loglevel[config.log_level])
    #
    # Set up param and linecatalog for redshift pass
    #
    context, user_parameters = _setup_pass(config)

    with open(normpath(config.workdir, config.spectra_listfile), 'r') as f:
        spectra_list = json.load(f)

    outdir = normpath(config.workdir, config.output_dir)
    os.makedirs(outdir, exist_ok=True)

    data_dir = os.path.join(outdir, 'data')
    os.makedirs(data_dir, exist_ok=True)

    products = []
    for i, spectrum_path in enumerate(spectra_list):
        spectrum = normpath(config.workdir, config.spectra_dir,
                            spectrum_path["fits"])
        reader = PfsObjectReader(spectrum, context.calibration_library)
        nb_valid_points = reader.get_nb_valid_points()
        if nb_valid_points < 3000:
            logger.log(
                logging.WARNING, "Invalid spectrum, only " +
                str(nb_valid_points) + " valid points, not processed")
            to_process = False
        else:
            to_process = True
        proc_id, ext = os.path.splitext(spectrum_path["fits"])
        spc_out_dir = os.path.join(outdir, proc_id)
        processed = False
        if to_process:
            # first step : compute redshift
            to_process = True
            if os.path.exists(spc_out_dir):
                if config.continue_:
                    to_process = False
                else:
                    shutil.rmtree(spc_out_dir)
            if to_process:
                try:
                    _process_spectrum(data_dir, reader, context,
                                      user_parameters)
                    processed = True
                except Exception as e:
                    logger.log(logging.ERROR,
                               "Could not process spectrum: {}".format(e))

    with TemporaryFilesSet(
            keep_tempfiles=config.log_level <= logging.INFO) as tmpcontext:

        # save amazed version and parameters file to output dir
        version_file = _output_path(config, 'version.json')
        with open(version_file, 'w') as f:
            json.dump({'amazed-version': get_version()}, f)
        parameters_file = os.path.join(
            normpath(config.workdir, config.output_dir), 'parameters.json')
        with open(parameters_file, 'w') as f:
            json.dump(context.parameters, f)
        tmpcontext.add_files(parameters_file)

        # write list of created products
        with open(os.path.join(config.output_dir, "output.json"), 'w') as ff:
            json.dump(products, ff)
Exemple #5
0
def amazed(config):
    """Run the full-featured amazed client

    Parameters
    ----------
    config : :obj:`Config`
        Configuration object
    """

    zlog = CLog()
    logFileHandler = CLogFileHandler(zlog, os.path.join(config.logdir,
                                                        'amazed.log'))
    logFileHandler.SetLevelMask(_map_loglevel[config.log_level])

    #
    # Set up param and linecatalog for redshift pass
    #
    param, line_catalog = _setup_pass(normpath(config.calibration_dir),
                                      normpath(config.parameters_file),
                                      normpath(config.linecatalog))
    medianRemovalMethod = param.Get_String('templateCatalog.continuumRemoval.'
                                           'method', 'IrregularSamplingMedian')
    opt_medianKernelWidth = param.Get_Float64('templateCatalog.'
                                              'continuumRemoval.'
                                              'medianKernelWidth')
    opt_nscales = param.Get_Float64('templateCatalog.continuumRemoval.'
                                    'decompScales',
                                    8.0)
    dfBinPath = param.Get_String('templateCatalog.continuumRemoval.binPath',
                                 'absolute_path_to_df_binaries_here')

    #
    # Set up param and linecatalog for line measurement pass
    #
    linemeas_param, linemeas_line_catalog = _setup_pass(normpath(config.calibration_dir),
                                                        normpath(config.linemeas_parameters_file),
                                                        normpath(config.linemeas_linecatalog))

    classif = CClassifierStore()

    if config.zclassifier_dir:
        zclassifier_dir = normpath(config.zclassifier_dir)
        if not os.path.exists(zclassifier_dir):
            raise FileNotFoundError(f"zclassifier directory does not exist: "
                                    f"{zclassifier_dir}")
        classif.Load(zclassifier_dir)

    with open(normpath(config.workdir, config.spectra_listfile), 'r') as f:
        spectra_list = json.load(f)

    template_catalog = CTemplateCatalog(medianRemovalMethod,
                                        opt_medianKernelWidth,
                                        opt_nscales, dfBinPath)
    logger.log(logging.INFO, "Loading %s" % config.template_dir)

    try:
        template_catalog.Load(normpath(config.template_dir))
    except Exception as e:
        logger.log(logging.CRITICAL, "Can't load template : {}".format(e))
        raise

    outdir = normpath(config.workdir, config.output_dir)
    os.makedirs(outdir, exist_ok=True)

    data_dir = os.path.join(outdir, 'data')
    os.makedirs(data_dir, exist_ok=True)

    outdir_linemeas = None
    if config.lineflux in ['only', 'on']:
        outdir_linemeas = '-'.join([outdir, 'lf'])
        os.makedirs(outdir_linemeas, exist_ok=True)

    products = []
    for i, spectrum_path in enumerate(spectra_list):
        spectrum = normpath(config.workdir, config.spectra_dir, spectrum_path)
        proc_id, ext = os.path.splitext(spectrum_path)
        spc_out_dir = os.path.join(outdir, proc_id )    

        if config.lineflux != 'only':
            # first step : compute redshift
            to_process = True
            if os.path.exists(spc_out_dir):
                if config.continue_:
                    to_process = False
                else:
                    shutil.rmtree(spc_out_dir)
            if to_process:
                _process_spectrum(outdir, i, spectrum, template_catalog,
                                 line_catalog, param, classif, 'all')

        if config.lineflux in ['only', 'on']:
            # second step : compute line fluxes
            to_process_lin = True
            spc_out_lin_dir = os.path.join(outdir_linemeas, proc_id)
            if os.path.exists(spc_out_lin_dir):
                if config.continue_:
                    to_process_lin = False
                else:
                    shutil.rmtree(spc_out_lin_dir)
            if to_process_lin:
                linemeas_param.Set_String('linemeascatalog',
                                        os.path.join(outdir, 'redshift.csv'))
                _process_spectrum(outdir_linemeas, i, spectrum,
                                template_catalog,
                                linemeas_line_catalog, linemeas_param,
                                classif, 'linemeas')
            
        result = SpectrumResults(spectrum, spc_out_dir, output_lines_dir=spc_out_lin_dir, stellar=config.stellar)
        products.append(result.write(data_dir))

    with TemporaryFilesSet(keep_tempfiles=config.log_level <= logging.INFO) as tmpcontext:

        # save amazed version and parameters file to output dir
        version_file = _output_path(config, 'version.json')
        with open(version_file, 'w') as f:
            json.dump({'amazed-version': get_version()}, f)
        parameters_file = os.path.join(normpath(config.workdir, config.output_dir),
                                       'parameters.json')
        param.Save(parameters_file)
        tmpcontext.add_files(parameters_file)

        # create output products
        # results = AmazedResults(_output_path(config), normpath(config.workdir,
        #                                                      config.spectra_dir),
        #                         config.lineflux in ['only', 'on'],
        #                         tmpcontext=tmpcontext)
        # products = results.write()

        # write list of created products
        with open(os.path.join(config.output_dir, "output.json"), 'w') as ff:
            json.dump(products, ff)