def check_pop(sample_list, user_choices):
    """
    Check the number of objects from each time in a given sample.

    input: sample_list, str
           list of objects raw data files.

           user_choices, dict
           output from read_user_input

    output: pop, dict
            keys are types and values are the number of objects in the sample
    """
    # read raw data files names 
    flist = asciitable.read(sample_list)

    # count types
    pop = {}
    for name in flist:
        user_choices['path_to_lc'] = [name[0]]
        raw = read_snana_lc(user_choices)
        if raw['SIM_NON1a:'][0] not in pop.keys():
            pop[raw['SIM_NON1a:'][0]] = 1
        else:
            pop[raw['SIM_NON1a:'][0]] = pop[raw['SIM_NON1a:'][0]] + 1

    return pop
def check_fitted(name_dir, user_choices):
    """
    Check object types on previously fitted sample.

    input: name_dir, str
           directory where fitted samples are stored.

           user_choices, dict
           output from snclass.util.read_user_input

    output: surv_names, dict
            keys are types and values are names of raw data files
    """
    # list classes surviving selection cuts
    surv = os.listdir(name_dir)

    surv_names = {}
    for name in surv:
        user_choices['path_to_lc'] = [translate_snid(name)[0]]

        try_lc = read_snana_lc(user_choices)
        stype = try_lc['SIM_NON1a:'][0]

        if try_lc['SIM_NON1a:'][0] not in surv_names.keys():
            surv_names[stype] = user_choices['path_to_lc']
        else:
            surv_names[stype].append(user_choices['path_to_lc'][0])

    return surv_names
def test_epoch(key, surv, user_choices):
    """
    Check if previously calculated GP fit survives selection cuts.
    
    input: key, str
           object type

           surv, dict
           dictionary of objects surviving basic cuts
           keys are types, values are GP fit mean file name

           user_choices, dict
           output from snclass.util.read_user_input

    output: my_lc, LC object
            updated light curve object after checked for epoch cuts            
    """
    # sample a random obj in the training sample
    indx = np.random.randint(0, len(surv[key]))
    name = surv[key][indx]

    # determine fitting method
    fit_method = bool(int(user_choices['do_mcmc'][0]))

    # update path to raw data
    user_choices['path_to_lc'] = [name]

    # read light curve raw data
    raw = read_snana_lc(user_choices)

    # update raw data with user choices
    raw.update(user_choices)

    # set number of samples to 0 (we are only interested in the mean for now)
    raw['n_samples'] = ['0']

    # initiate light curve object
    my_lc = LC(raw, user_choices)

    screen('Fitting SN' + raw['SNID:'][0], user_choices)

    # load GP fit
    my_lc.load_fit_GP(user_choices['samples_dir'][0] + '/DES_SN' + raw['SNID:'][0] + '_mean.dat')

    # normalize
    my_lc.normalize()

    # shift to peak mjd
    my_lc.mjd_shift()

    # check epoch requirements
    my_lc.check_epoch()

    return my_lc, raw
Exemple #4
0
def get_names(user_choices, params, type_number):
    """
    Separate object identification according to class.

    input: user_choices, dict
           output from snclass.util.read_user_input

           params, dict
           keywords: 'list_name', str
                      name of file with list of all objs in this sample

           type_number, dict
           dictionary to translate types between raw data and final
           classification
           keywords -> final classificaton elements
           values -> identifiers in raw data

    output: surv_spec_names, dict
            keywords -> final classes identification
            values -> set of objects ids for this class
    """
    from snclass.util import read_snana_lc

    # store name of objs surviving selection cuts
    surv_spec_names = {}
    fsample = read_file(params['list_name'])
    for name in fsample:
        user_choices['path_to_lc'] = [name[0]]

        try_lc = read_snana_lc(user_choices)

        stype = try_lc[user_choices['type_flag'][0]][0]
        for type_name in type_number.keys():
            if stype in type_number[type_name]:
                if type_name not in surv_spec_names.keys():
                    surv_spec_names[type_name] = [name[0]]
                else:
                    surv_spec_names[type_name].append(name[0])

    return surv_spec_names
Exemple #5
0
def sample_pop(user_choices, params, type_number):
    """
    Count number of each type in sample.

    input: user_choices, dict
           output from snclass.util.read_user_choices

           params, dict
           keywords: 'list_name', str
                     name of file with list of all objs in this sample

           type_number, dict
           dictionary to translate types between raw data and final
           classification
           keywords -> final classificaton elements
           values -> identifiers in raw data

    output: sample_pop, dict
            keywords -> final types
            values -> number of objects of this type
            'tot' -> total number of objs in the sample
    """
    from snclass.util import read_snana_lc

    #get population for each SN type in spec sample
    fsample = read_file(params['list_name'])
    sample_pop = {}
    for name in fsample:
        user_choices['path_to_lc'] = [name[0]]
        raw = read_snana_lc(user_choices)
        for type_name in type_number.keys():
            if raw[user_choices['type_flag'][0]][0] in type_number[type_name]:
                if type_name not in sample_pop.keys():
                    sample_pop[type_name] = 1
                else:
                    sample_pop[type_name] = sample_pop[type_name] + 1

    sample_pop['tot'] = sum([val for val in sample_pop.values()])

    return sample_pop
Exemple #6
0
def classify_1obj(din):
    """
    Perform classification of 1 supernova.

    input: din, dict - keywords, value type: 
                     user_input, dict -> output from read_user_input
                     name, str -> name of raw light curve file
                     type_number, dict -> translate between str and numerical
                                          classes identification
                     do_plot, bool -> if True produce plots, default is False

                     p1, dict ->  keywords, value type:
                         fname_photo_list, str: list of all photometric 
                                                sample objects
                         photo_dir, str: directory of GP fitted results
                                         for photo sample
                         range_pcs, list: [min_number_PCs, max_number_PCs]
                                          to be tested through cross-validation
                         SNR_dir, str: directory to store all results from 
                                       this SNR cut
                         out_dir, str: directory to store classification 
                                       results
                         plot_proj_dir, str: directory to store 
                                             projection plots
                         data_matrix, str: file holding spec data matrix

    output: class_results:
               list -> [snid, true_type, prob_Ia] 
    """
    from snclass.functions import screen, nneighbor
    from snclass.util import translate_snid, read_snana_lc
    from snclass.treat_lc import LC

    # update supernova name
    din['user_input']['path_to_lc'] = [translate_snid(din['name'])[0]]

    # read raw data
    raw = read_snana_lc(din['user_input'])

    # set true type
    for names in din['type_number'].keys():
        if raw[din['user_input']['type_flag']
               [0]][0] in din['type_number'][names]:
            true_type = names

    # load GP fit and test epoch cuts
    new_lc = LC(raw, din['user_input'])
    new_lc.user_choices['samples_dir'] = [din['p1']['photo_dir']]
    new_lc.load_fit_GP(din['p1']['photo_dir'] + din['name'])

    l1 = [
        1 if len(new_lc.fitted['GP_fit'][fil]) > 0 else 0
        for fil in din['user_input']['filters']
    ]

    fil_choice = din['user_input']['ref_filter'][0]
    if fil_choice == 'None':
        fil_choice = None

    if sum(l1) == len(din['user_input']['filters']):
        new_lc.normalize(samples=True, ref_filter=fil_choice)
        new_lc.mjd_shift()
        new_lc.check_epoch()

        if new_lc.epoch_cuts:

            screen(new_lc.raw['SNID:'][0], din['user_input'])

            # build matrix lines
            new_lc.build_steps(samples=True)

            # transform samples
            small_matrix = new_lc.samples_for_matrix
            data_test = din['p1']['obj_kpca'].transform(small_matrix)

            #classify samples
            new_label = nneighbor(data_test, din['p1']['spec_matrix'],
                                  din['p1']['binary_types'], din['user_input'])

            # calculate final probability
            ntypes = [1 for item in new_label if item == '0']
            new_lc.prob_Ia = sum(ntypes) / \
                             float(din['user_input']['n_samples'][0])

            if din['do_plot']:
                plot_proj(din['p1']['spec_matrix'], data_test,
                          din['p1']['labels'], new_lc, din['p1']['plot_dir'],
                          [0, 1], true_type)

            # print result to screen
            screen('SN' + new_lc.raw['SNID:'][0] + \
                   ',   True type: ' + true_type + ', prob_Ia = ' + \
                    str(new_lc.prob_Ia), din['user_input'])

            class_results = [new_lc.raw['SNID:'][0], true_type, new_lc.prob_Ia]
            return class_results
Exemple #7
0
def set_lclist(params):
    """
    Build a list of all objects satisfying selection cuts and plot them.

    input: params, dict
           keywords: plot_dir
                     path to store plots. If None do not build plots.
                     if None plots are not generated

                     fitted_data_dir
                     path to fitted data

                     list_dir
                     path to list directory 

                     sample
                     'spec' or 'photo'

                     user_choices, dict
                     output from snclass.read_user_input                
    """
    import numpy as np
    import pylab as plt
    import os

    from snclass.treat_lc import LC
    from snclass.util import translate_snid, read_snana_lc
    from snclass.functions import screen
    import sys

    # create plot directory
    if params['plot_dir'] is not None and \
    not os.path.isdir(params['plot_dir']):
        os.makedirs(params['plot_dir'])

    flist = os.listdir(params['fitted_data_dir'])

    photo_list = []
    problem = []
    cont = 0

    rfil = params['user_choices']['ref_filter'][0]

    for obj in flist:

        if 'mean' in obj and '~' not in obj and 'Y' not in obj:

            screen(obj, params['user_choices'])

            rname = translate_snid(obj)[0]
            params['user_choices']['path_to_lc'] = [rname]
            params['user_choices']['n_samples'] = ['0']

            raw = read_snana_lc(params['user_choices'])
            new_lc = LC(raw, params['user_choices'])

            if (params['user_choices']['file_root'][0] + raw['SNID:'][0] + \
               '_samples.dat' in flist):
                new_lc.user_choices['n_samples'] = ['100']
                new_lc.user_choices['samples_dir'] = [
                    params['fitted_data_dir']
                ]

                try:
                    new_lc.load_fit_GP(params['fitted_data_dir'] + obj)
                    l1 = [
                        1 if len(new_lc.fitted['GP_fit'][fil]) > 0 else 0
                        for fil in params['user_choices']['filters']
                    ]

                    if sum(l1) == len(params['user_choices']['filters']):
                        if rfil == 'None':
                            new_lc.normalize()
                        else:
                            new_lc.normalize(ref_filter=rfil)
                        new_lc.mjd_shift()
                        new_lc.check_epoch()

                        if new_lc.epoch_cuts:
                            photo_list.append(rname)

                            # only plot if not already done
                            if params['plot_dir'] is not None and \
                            not os.path.isfile(params['plot_dir'] + 'SN' + \
                                               raw['SNID:'][0] + '.png'):
                                new_lc.plot_fitted(file_out=\
                                                   params['plot_dir'] + \
                                                   'SN' + raw['SNID:'][0] + \
                                                   '.png')
                        else:
                            screen('SN' + raw['SNID:'][0] + ' did not satisfy' + \
                                   ' epoch cuts!\n', params['user_choices'])
                            cont = cont + 1
                    else:
                        screen('SN' + raw['SNID:'][0] + ' does not exist in ' + \
                               'all filters!\n', params['user_choices'])
                        cont = cont + 1

                except ValueError:
                    problem.append(rname)
                    cont = cont + 1

            else:
                screen('Samples not found for SN' + raw['SNID:'][0],
                       params['user_choices'])

        else:
            cont = cont + 1

    screen('Missed ' + str(cont) + ' SN.', params['user_choices'])

    # store list of problematic fits
    if len(problem) > 0:
        op2 = open('problematic_fits.dat', 'w')
        for obj in problem:
            op2.write(obj + '\n')
        op2.close()
        sys.exit()

    # set parameter for file name
    if int(params['user_choices']['epoch_cut'][0]) < 0:
        epoch_min = str(abs(int(params['user_choices']['epoch_cut'][0])))
    else:
        epoch_min = 'p' + \
                    str(abs(int(params['user_choices']['epoch_cut'][0])))

    epoch_max = str(int(params['user_choices']['epoch_cut'][1]) - 1)

    filter_list = params['user_choices']['filters'][0]
    for item in params['user_choices']['filters'][1:]:
        filter_list = filter_list + item

    # save objs list
    if not os.path.isdir(params['list_dir']):
        os.makedirs(params['list_dir'])

    ref_filter = params['user_choices']['ref_filter'][0]
    if ref_filter is None:
        ref_fils = 'global'
    else:
        ref_fils = ref_filter

    op1 = open(params['list_dir'] + params['sample'] + '_' + filter_list + \
               '_' + epoch_min + '_' + epoch_max + '_ref_' + ref_fils + \
               '.list', 'w')
    for item in photo_list:
        op1.write(item + '\n')
    op1.close()
Exemple #8
0
def select_GP(params, user_choices):
    """
    Select original objs to build a synthetic spectroscopic sample.

    input: params, dict
           output from set_paramameters

           user_choices, dict
           output from snclass.util.read_user_input
    """
    from snclass.util import translate_snid, read_snana_lc
    from snclass.functions import screen
    from snclass.treat_lc import LC
    from snclass.fit_lc_gptools import save_result

    import os
    import numpy as np
    import sys

    # set reference filter
    if user_choices['ref_filter'][0] == 'None':
        fil_choice = None
    else:
        fil_choice = user_choices['ref_filter'][0]

    # select extra GP realizations in order to construct
    # a representative spec sample
    for key in params['draw_spec_samples'].keys():
        cont = 0
        fail = 0

        # check if there are existing objs in this sample
        screen('... Check existing objs', user_choices)
        ready = []
        for obj in params['surv_spec_names'][key]:
            obj_id = translate_snid(obj)

            for j in xrange(params['draw_spec_samples'][key]):
                mean_file = params['synthetic_dir'] + '/' + \
                            user_choices['file_root'][0] + str(j) + \
                            'X' + obj_id + '_mean.dat'

                if os.path.isfile(mean_file) and mean_file not in ready:
                    cont = cont + 1
                    ready.append(mean_file)
                    screen('Found ready SN ' + str(cont) + 'X' + \
                           obj_id, user_choices)

        while cont < params['draw_spec_samples'][key]:

            # draw one of the objs in the spec sample
            indx = np.random.randint(0, params['spec_pop'][key])
            name = params['surv_spec_names'][key][indx]

            user_choices['path_to_lc'] = [name]

            # read light curve raw data
            raw = read_snana_lc(user_choices)

            if os.path.isfile(params['fitted_data_dir'] + user_choices['file_root'][0] + \
                              raw['SNID:'][0] + '_samples.dat'):

                # initiate light curve object
                my_lc = LC(raw, user_choices)

                screen('Loading SN' + raw['SNID:'][0], user_choices)

                # load GP fit
                my_lc.user_choices['n_samples'] = ['100']
                my_lc.user_choices['samples_dir'] = [params['fitted_data_dir']]
                my_lc.load_fit_GP(params['fitted_data_dir'] + user_choices['file_root'][0] + \
                                  raw['SNID:'][0] + '_mean.dat')

                l1 = [
                    1 if len(my_lc.fitted['GP_fit'][fil]) > 0 else 0
                    for fil in user_choices['filters']
                ]
                if sum(l1) == len(user_choices['filters']):

                    # normalize
                    my_lc.normalize(samples=True, ref_filter=fil_choice)

                    # shift to peak mjd
                    my_lc.mjd_shift()

                    # check epoch requirements
                    my_lc.check_epoch()

                    if my_lc.epoch_cuts:

                        screen('... Passed epoch cuts', user_choices)
                        screen('... ... This is SN type ' +  raw[user_choices['type_flag'][0]][0] + \
                               ' number ' + str(cont + 1) + ' of ' +
                               str(params['draw_spec_samples'][key]), user_choices)

                        # draw one realization
                        size = len(my_lc.fitted['realizations'][
                            user_choices['filters'][0]])
                        indx2 = np.random.randint(0, size)

                        for fil in user_choices['filters']:
                            print '... ... ... filter ' + fil

                            raw['GP_fit'][fil] = my_lc.fitted['realizations'][
                                fil][indx2]
                            raw['GP_std'][fil] = my_lc.fitted['GP_std'][fil]
                            raw['xarr'][fil] = my_lc.fitted['xarr'][fil]

                        # set new file root
                        raw['file_root'] = [user_choices['file_root'][0] + \
                                             str(cont) + 'X']
                        raw['samples_dir'] = [params['synthetic_dir'] + '/']
                        save_result(raw)

                        # check epoch for this realization
                        new_lc = LC(raw, user_choices)
                        new_lc.load_fit_GP(params['synthetic_dir'] + '/' + \
                                       user_choices['file_root'][0] + str(cont) + \
                                       'X' + raw['SNID:'][0] + '_mean.dat')
                        new_lc.normalize(ref_filter=fil_choice)
                        new_lc.mjd_shift()
                        new_lc.check_epoch()

                        if new_lc.epoch_cuts:
                            cont = cont + 1
                        else:
                            screen('Samples failed to pass epoch cuts!\n',
                                   user_choices)
                            os.remove(params['synthetic_dir'] + '/' +
                                      user_choices['file_root'][0] + str(cont) + \
                                  'X' + raw['SNID:'][0] + '_mean.dat')
                        print '\n'

                    else:
                        screen('Failed to pass epoch cuts!\n', user_choices)
                        fail = fail + 1

                    if fail > 10 * params['spec_pop'][key]:
                        cont = 100000
                        sys.exit()
Exemple #9
0
def build_sample(params):
    """
    Build a directory holding all raw data passing selection cuts.

    input: params, dict
           keywords:  'raw_dir' -> new directory to be created
                      'photo_dir' -> photometric LC fitted with GP
                      'spec_dir' -> sectroscopic LC fitted with GP
                      'user_choices' -> output from 
                                        snclass.util.read_user_input
    """
    import shutil
    from snclass.util import read_user_input, read_snana_lc, translate_snid
    from snclass.treat_lc import LC
    from snclass.functions import screen

    # create data directory
    if not os.path.isdir(params['raw_dir']):
        os.makedirs(params['raw_dir'])

    # read fitted light curves
    photo_list = os.listdir(params['photo_dir'])
    spec_list = os.listdir(params['spec_dir'])

    # build filter list
    fil_list = params['user_choices']['filters'][0]
    for i in xrange(1, len(params['user_choices']['filters'])):
        fil_list = fil_list + params['user_choices']['filters'][i]

    for sn_set in [photo_list, spec_list]:
        for obj in sn_set:
            if 'samples' in obj and '~' not in obj and 'Y' not in obj:

                screen(obj, params['user_choices'])

                rname = translate_snid(obj)[0]
                params['user_choices']['path_to_lc'] = [rname]
                params['user_choices']['n_samples'] = ['0']

                # read raw data
                raw = read_snana_lc(params['user_choices'])
                new_lc = LC(raw, params['user_choices'])

                # load GP fit
                if sn_set == photo_list:
                    new_lc.load_fit_GP(photo_dir +
                                       params['user_choices']['file_root'][0] +
                                       raw['SNID:'][0] + '_mean.dat')
                else:
                    new_lc.load_fit_GP(spec_dir +
                                       params['user_choices']['file_root'][0] +
                                       raw['SNID:'][0] + '_mean.dat')

                l1 = [
                    1 if len(new_lc.fitted['GP_fit'][fil]) > 0 else 0
                    for fil in params['user_choices']['filters']
                ]

                if sum(l1) == len(params['user_choices']['filters']):
                    # treat light curve
                    new_lc.normalize(ref_filter= \
                                     params['user_choices']['ref_filter'][0])
                    new_lc.mjd_shift()
                    new_lc.check_basic()
                    new_lc.check_epoch()

                    # check epoch cuts
                    data_path = params['user_choices']['path_to_obs'][0]
                    if new_lc.epoch_cuts:
                        shutil.copy2(data_path + rname, raw_dir + rname)
                    else:
                        screen('... SN' + raw['SNID:'][0] + \
                               ' fail to pass epoch cuts!',
                               params['user_choices'])
Exemple #10
0
def classify_test(test_name,
                  matrix,
                  user_input,
                  test_dir='test_samples/',
                  csamples=True):
    """
    Classify one photometric supernova using a trained KernelPCA matrix.

    input: test_name, str
           name of mean GP fit file

           matrix, snclass.matrix.DataMatrix object
           trained KernelPCA matrix

           user_input, dict
           output from snclass.util.read_user_input

           test_dir, str, optional
           name of directory to store samples from test light curve
           Default is 'test_samples/'

           csamples, bool, optional
           If True, fit GP object and generate sample file as output
           otherwise reads samples from file
           Default is True

    return: new_lc, snclass.treat_lc.LC object
            updated with test projections and probability of being Ia
    """
    # update path to raw light curve
    user_input['path_to_lc'] = [translate_snid(test_name, 'FLUXCAL')[0]]

    # store number of samples for latter tests
    nsamples = user_input['n_samples'][0]

    # reset the number of samples for preliminary tests
    user_input['n_samples'] = ['0']

    # read raw data
    raw = read_snana_lc(user_input)

    # load GP fit and test epoch cuts
    new_lc = LC(raw, user_input)
    new_lc.load_fit_GP(user_input['samples_dir'][0] + test_name)
    new_lc.normalize()
    new_lc.mjd_shift()
    new_lc.check_epoch()

    if new_lc.epoch_cuts:
        # update test sample directory
        user_input['samples_dir'] = [test_dir]

        # update user choices
        new_lc.user_choices = user_input

        # update number of samples
        new_lc.user_choices['n_samples'] = [nsamples]

        # fit GP or normalize/shift fitted mean
        test_matrix = test_samples(new_lc, calc_samples=bool(csamples))

        # project test
        new_lc.test_proj = matrix.transf_test.transform(test_matrix)

        # classify
        new_lc.new_label = nneighbor(new_lc.test_proj, matrix.low_dim_matrix,
                                     matrix.sntype, matrix.user_choices)

        if csamples:
            new_lc.prob_Ia = sum([1 for item in new_label if item == '0'
                                  ]) / float(nsamples)

        return new_lc

    else:
        return None
Exemple #11
0
    def check_file(self, filename, epoch=True, ref_filter=None):
        """
        Construct one line of the data matrix.

        input:   filename, str
                 file of raw data for 1 supernova

                 epoch, bool - optional
                 If true, check if SN satisfies epoch cuts
                 Default is True

                 ref_filter, str - optional
                 Reference filter for peak MJD calculation
                 Default is None
        """
        screen('Fitting ' + filename, self.user_choices)

        # translate identifier
        self.user_choices['path_to_lc'] = [
            translate_snid(filename, self.user_choices['photon_flag'][0])[0]
        ]

        # read light curve raw data
        raw = read_snana_lc(self.user_choices)

        # initiate light curve object
        lc_obj = LC(raw, self.user_choices)

        # load GP fit
        lc_obj.load_fit_GP(self.user_choices['samples_dir'][0] + filename)

        # normalize
        lc_obj.normalize(ref_filter=ref_filter)

        # shift to peak mjd
        lc_obj.mjd_shift()

        if epoch:
            # check epoch requirements
            lc_obj.check_epoch()
        else:
            lc_obj.epoch_cuts = True

        if lc_obj.epoch_cuts:
            # build data matrix lines
            lc_obj.build_steps()

            # store
            obj_line = []
            for fil in self.user_choices['filters']:
                for item in lc_obj.flux_for_matrix[fil]:
                    obj_line.append(item)

            rflag = self.user_choices['redshift_flag'][0]
            redshift = raw[rflag][0]

            obj_class = raw[self.user_choices['type_flag'][0]][0]

            self.snid.append(raw['SNID:'][0])

            return obj_line, redshift, obj_class

        else:
            screen('... Failed to pass epoch cuts!', self.user_choices)
            screen('\n', self.user_choices)
            return None
Exemple #12
0
def main(args):
    """Read user input, fit and plot a GP and the raw data."""
    # read_user_input
    user_input = read_user_input(args.input)

    # read lc data
    lc_data = read_snana_lc(user_input)

    # add extra keys
    lc_data.update(user_input)

    # set screen output
    out = bool(int(user_input['screen'][0]))

    if user_input['measurement'][0] == 'flux':
        ylabel = 'flux'
        sign = 1.0
    else:
        ylabel = 'magnitude'
        sign = -1.0

    if bool(int(args.calculate)):

        screen('Fitting SN' + lc_data['SNID:'][0], user_input)

        if user_input['measurement'][0] == 'flux':
            p1 = [
                int(user_input['epoch_predict'][0]),
                int(user_input['epoch_predict'][1])
            ]
            sign2 = 1.0

        else:
            p1 = None
            sign2 = -1.0

        # fit lc
        lc_data = fit_lc(lc_data,
                         samples=bool(int(lc_data['n_samples'][0])),
                         save_samples=bool(int(user_input['save_samples'][0])),
                         screen=out,
                         do_mcmc=bool(int(user_input['do_mcmc'][0])),
                         predict=p1)
    else:
        sign2 = 1.0

        if bool(int(lc_data['n_samples'][0])):
            op1 = open(lc_data['samples_dir'][0] + lc_data['file_root'][0] + \
                       lc_data['SNID:'][0] + '_' + user_input['measurement'][0] + '_samples.dat', 'r')
            lin1 = op1.readlines()
            op1.close()

            d1 = [elem.split() for elem in lin1]

            for fil in lc_data['filters']:
                lc_data['xarr'][fil] = []

                if bool(int(lc_data['n_samples'][0])):
                    lc_data['realizations'][fil] = [[
                        float(d1[kk][jj]) for kk in xrange(len(d1))
                        if d1[kk][0] == fil
                    ] for jj in xrange(2,
                                       int(lc_data['n_samples'][0]) + 2)]

            for i1 in xrange(len(d1)):
                if d1[i1][0] == fil:
                    lc_data['xarr'][fil].append(float(d1[i1][1]))

        op2 = open(lc_data['samples_dir'][0] + lc_data['file_root'][0] + \
                   lc_data['SNID:'][0] + '_' + user_input['measurement'][0] + '_mean.dat', 'r')
        lin2 = op2.readlines()
        op2.close()

        d2 = [elem.split() for elem in lin2]

        lc_data['GP_std'] = {}
        for fil in lc_data['filters']:
            lc_data['xarr'][fil] = []
            lc_data['GP_fit'][fil] = np.array([
                float(d2[j][2]) for j in xrange(1, len(d2)) if d2[j][0] == fil
            ])
            lc_data['GP_std'][fil] = np.array([
                float(d2[j][3]) for j in xrange(1, len(d2)) if d2[j][0] == fil
            ])
            lc_data['xarr'][fil] = np.array([
                float(d2[j][1]) for j in xrange(1, len(d2)) if d2[j][0] == fil
            ])

    #initiate figure
    f = plt.figure()
    for fil in user_input['filters']:

        # Plot the samples in data space.
        plt.subplot(2,
                    len(lc_data['filters']) / 2 + len(lc_data['filters']) % 2,
                    lc_data['filters'].index(fil) + 1)
        if bool(int(lc_data['n_samples'][0])):
            for s in lc_data['realizations'][fil]:
                plt.plot(lc_data['xarr'][fil],
                         sign2 * np.array(s),
                         color="gray",
                         alpha=0.3)
        plt.errorbar(lc_data[fil][:, 0],
                     sign * lc_data[fil][:, 1],
                     yerr=lc_data[fil][:, 2],
                     fmt="o",
                     color='blue',
                     label=fil)
        plt.plot(lc_data['xarr'][fil],
                 sign2 * lc_data['GP_fit'][fil],
                 color='red',
                 linewidth=2)
        plt.ylabel(ylabel)
        plt.xlabel("MJD")
        plt.legend()
        plt.xlim(
            min(lc_data['xarr'][fil]) - 1.0,
            max(lc_data['xarr'][fil]) + 1.0)
        if user_input['measurement'][0] == 'mag':
            plt.ylim(
                min(sign * lc_data[fil][:, 1]) - 1.5 * max(lc_data[fil][:, 2]),
                max(sign * lc_data[fil][:, 1]) + 1.5 * max(lc_data[fil][:, 2]))
            ax = plt.gca()
            ax.invert_yaxis()

    f.tight_layout()
    plt.savefig("gp-SN" + lc_data['SNID:'][0] + "_" +
                user_input['measurement'][0] + ".png",
                dpi=350)
    plt.close()
def count_pop(params, type_number):
    """
    Count original population from spec and photo samples.

    input: params, dict
           keywords, value type:
               user_input, str: path to user input file
               nbins, int: number of redshift bins
               dz, float: width of redshift bin

           type_number, dict
           dictionary to translate types between raw data and final
           classification
           keywords -> final classificaton elements
           values -> identifiers in raw data

    output: params, dict
            additional keywords, value type:
                orig_photo_Ia, int: number of SN Ia before cuts
                orig_Ia_bins, list: number of SN Ia per redshift bin
                                    before cuts
                orig_pop, dict: snid: [sample, type, redshift]
    """
    from snclass.util import read_user_input, read_snana_lc

    import os
    import numpy as np

    # count original population
    user_input = read_user_input(params['user_input'])

    # check reference filter
    if 'ref_filter' in params.keys():
        user_input['ref_filter'] = params['ref_filter']

    raw_list = os.listdir(user_input['path_to_obs'][0])

    orig_pop = {}
    for sn in raw_list:
        if '.DAT' in sn:
            user_input['path_to_lc'] = [sn]
            lc = read_snana_lc(user_input)

            z = float(lc['REDSHIFT_FINAL:'][0])
            if lc['SNTYPE:'][0] == '-9':
                samp = 'photo'
            else:
                samp = 'spec'
            for name in type_number.keys():
                if lc['SIM_NON1a:'][0] in type_number[name]:
                    label = name
            orig_pop[lc['SNID:'][0]] = [samp, label, z]

    params['orig_pop'] = orig_pop

    all_sn = np.array(orig_pop.values())
    orig_photo_Ia = sum([1 for key in orig_pop.keys()
                         if orig_pop[key][0] == 'photo' and \
                         orig_pop[key][1] == 'Ia'])

    params['orig_photo_Ia'] = orig_photo_Ia

    orig_Ia_bins = []
    for i in xrange(params['nbins']):
        cont = 0
        for key in orig_pop.keys():
            if orig_pop[key][0] == 'photo' and orig_pop[key][1] == 'Ia':
                if  (orig_pop[key][2] >= i * params['dz']) and  \
                (orig_pop[key][2] < (i + 1) * params['dz']):
                    cont = cont + 1

        orig_Ia_bins.append(cont)

    params['orig_Ia_bins'] = orig_Ia_bins

    return params
photo_label = []

# rather to generate plots
plot = False

xaxis = np.arange(-120, 350, 0.5)

for fname in lc_list:

    # read user input
    user_input['path_to_lc'] = [fname]
    user_input['photon_flag'] = ['FLUXCAL']
    user_input['photonerr_flag'] = ['FLUXCALERR']

    # read raw data
    lc_flux = read_snana_lc(user_input)

    data = {}
    for f in user_input['filters']:
        line = []
        for i in range(len(lc_flux[f])):
            line.append(lc_flux[f][i])

        # we need at least 5 points to fit the light curve
        if len(line) >= 5:
            data[f] = np.array(line)

    if len(data.keys()) == 4:

        new_fit = []
Exemple #15
0
def fit_objs(user_choices,
             plot=False,
             calc_mean=True,
             calc_samp=False,
             save_samp=False):
    """
    Perform a GP fit in a set of objects.

    input: user_choices
           output from read_user_input

           plot - bool, optional
           rather or not to generate an output png file
           default is False

           calc_mean - bool, optional
           rather or not to calculate the main GP fit
           default is True

           calc_samp - bool, optional
           rather or not to calulate realizations of the final fit
           default is False

           save_samp - bool, optional
           rather or not to save realizations of the final fit
           default is False
    """
    if not os.path.exists(user_choices['samples_dir'][0]):
        os.makedirs(user_choices['samples_dir'][0])

    # read list of SN in sample
    f_open = open(user_choices['snlist'][0], 'r')
    lin = f_open.readlines()
    f_open.close()

    snlist = [elem.split()[0] for elem in lin]

    fit_method = bool(int(user_choices['do_mcmc'][0]))

    for supernova in snlist:

        # update object
        user_choices['path_to_lc'] = [supernova]

        # read light curve raw data
        raw = read_snana_lc(user_choices)

        if not os.path.isfile(user_choices['samples_dir'][0] + \
                              user_choices['file_root'][0] + \
                              raw['SNID:'][0] + '_' + user_choices['photon_flag'][0] +  '_mean.dat'):

            # initiate light curve object
            my_lc = LC(raw, user_choices)

            screen('Fitting SN' + raw['SNID:'][0], user_choices)

            # perform basic check
            my_lc.check_basic()

            # check if satisfy minimum cut
            if my_lc.basic_cuts:
                screen('... Passed basic cuts', user_choices)

                # fit
                my_lc.fit_GP(mean=calc_mean,
                             samples=calc_samp,
                             do_mcmc=fit_method,
                             save_samples=save_samp,
                             screen=bool(int(user_choices['screen'][0])))

                if plot:
                    my_lc.normalize()
                    my_lc.mjd_shift()
                    my_lc.plot_fitted(
                        file_out=user_choices['path_output_plot'][0] +
                        'gp-SN' + raw['SNID:'][0] + '_' +
                        user_choices['photon_flag'][0] + '.png')

                print '\n'

            else:
                screen('Failed to pass basic cuts!\n', user_choices)

        else:
            screen('Found fitted SN' + raw['SNID:'][0], user_choices)