Пример #1
0
    def handle(self, *args, **options):

        verbose = False

        log = self.start_log()

        errors = []

        if not path.isfile(options['phot_db_path']):
            raise IOError('Cannot find photometry database ' +
                          options['phot_db_path'])

        conn = phot_db.get_connection(dsn=options['phot_db_path'])

        pri_refimg = import_utils.fetch_primary_reference_image_from_phot_db(
            conn)
        log.info('Identified primary reference dataset as ' + repr(pri_refimg))

        star_colours = import_utils.fetch_star_colours(conn)

        jincr = int(float(len(star_colours)) * 0.01)

        for j, star_data in enumerate(star_colours):

            star_name = str(options['field_name']) + '-' + str(
                star_data['star_id'])

            log.info('Processing star ' + star_name)

            known_target = self.check_star_in_tom(star_name, log)

            if known_target:

                for key, key_type in self.star_extra_params().items():

                    value = str(star_data[key])

                    TargetExtra.objects.get_or_create(target=known_target,
                                                      key=key,
                                                      value=value)

                    log.info('-> ' + str(key) + ': ' + value)
Пример #2
0
    def handle(self, *args, **options):

        verbose = False

        self.check_arguments(options)

        errors = []

        if not path.isfile(options['phot_db_path']):
            raise IOError('Cannot find photometry database ' +
                          options['phot_db_path'])

        conn = phot_db.get_connection(dsn=options['phot_db_path'])

        pri_refimg = import_utils.fetch_primary_reference_image_from_phot_db(
            conn)

        stars_table = import_utils.fetch_starlist_from_phot_db(
            conn, pri_refimg)

        #pri_phot_table = import_utils.fetch_primary_reference_photometry(conn,pri_refimg)

        jincr = int(float(len(stars_table)) * 0.01)

        for j, star in enumerate(stars_table[44191:]):

            s = SkyCoord(star['ra'],
                         star['dec'],
                         frame='icrs',
                         unit=(u.deg, u.deg))

            star_name = str(options['field_name']) + '-' + str(
                star['star_index'])

            base_params = {
                'identifier': star_name,
                'name': star_name,
                'ra': star['ra'],
                'dec': star['dec'],
                'galactic_lng': s.galactic.l.deg,
                'galactic_lat': s.galactic.b.deg,
                'type': Target.SIDEREAL,
            }

            #print(base_params)
            extra_params = {
                'reference_image': pri_refimg['filename'][0],
                'target_type': 'star'
            }
            for key, key_type in self.star_extra_params().items():
                if key_type == 'string':
                    extra_params[key] = str(star[key])
                else:
                    extra_params[key] = float(star[key])

            #print(extra_params)
            #import pdb;pdb.set_trace()

            known_target = self.check_star_in_tom(star_name)

            if known_target == None:
                #try:
                target = Target.objects.create(**base_params)
                if verbose: print(' -> Created target for star ' + star_name)

                for key, key_type in self.star_extra_params().items():

                    value = extra_params[key]

                    if verbose:
                        print('Submitting key ', key, value, type(value))

                    self.create_target_extra_with_type(target, key, value,
                                                       key_type)

                if verbose: print(' -> Ingested extra parameters')
                #except OverflowError:
                #    print(base_params,extra_params)
                #    exit()
            else:

                #try:
                if verbose: print(' -> ' + star_name + ' already in database')

                for key, value in base_params.items():
                    setattr(known_target, key, value)
                known_target.save()
                if verbose: print(' -> Updated parameters for ' + star_name)

                qs = self.get_target_extra_params(known_target)

                if verbose:
                    print(' -> Found ' + str(len(qs)) +
                          ' extra parameters for this target')

                for key, key_type in self.star_extra_params().items():

                    value = extra_params[key]

                    for ts in qs:

                        got_key = False

                        if ts.key == key:
                            if verbose:
                                print('Submitting key ', key, value,
                                      type(value))

                            self.update_target_extra_with_type(
                                ts, value, key_type)

                            got_key = True

                    if not got_key:
                        if verbose:
                            print('Adding extra key ', key, value, type(value))
                        self.create_target_extra_with_type(
                            known_target, key, value, key_type)

                if verbose: print(' -> Updated extra parameters')
                #except OverflowError:
                #    print(base_params,extra_params)
                #    exit()

            if j % jincr == 0:
                percentage = round(
                    (float(j) / float(len(stars_table))) * 100.0, 0)
                print(' -> Ingested '+str(percentage)+\
                            '% complete ('+str(j)+' stars out of '+\
                            str(len(stars_table))+')')
Пример #3
0
#query = 'SELECT filter, facility, hjd, calibrated_flux FROM phot WHERE star_id="'+str(star_id)+'" AND image ="' + str(image_id) + '" AND filter="'+str(filt_choice)+'" AND facility="'+str(tel_choice)+'"'
#select flux and calibrated_flux from phot

#FIELD-16
ra = '18:00:17.9956'
dec = '-28:32:15.2109'
db_dir = '/home/jclark/examples/ROME-FIELD-16_phot.db'

ZP = 25
BACKGROUND = 15000

filt_choice = '3'
tel_choice = 2
mag_err_cutoff = 500

conn = phot_db.get_connection(dsn=db_dir)
facilities = phot_db.fetch_facilities(conn)
filters = phot_db.fetch_filters(conn)
code_id = phot_db.get_stage_software_id(conn, 'stage6')

center = SkyCoord(ra, dec, frame='icrs', unit=(units.hourangle, units.deg))

radius = 5 / 60.0  #arcmins

if __name__ == '__main__':
    print(facilities)
    print(filters)
    meds, stds = extract_med_std(conn, ra, dec, radius, filt_choice,
                                 tel_choice)
    #mags,poissons = extract_med_poisson(conn,ra,dec,radius,filt_choice,tel_choice)
    import pdb
    def handle(self, *args, **options):

        self.check_arguments(options)

        log = self.start_log()

        conn = phot_db.get_connection(dsn=options['phot_db_path'])

        pri_refimg = import_utils.fetch_primary_reference_image_from_phot_db(
            conn)

        stars_table = import_utils.fetch_starlist_from_phot_db(
            conn, pri_refimg, log)

        group = self.fetch_or_create_data_product_group()

        for star in stars_table[0:10]:

            log.info('Starting ingest for star ' + str(star['star_id']))

            phot_table = import_utils.fetch_photometry_for_star(
                conn, star['star_id'], log)

            target = self.fetch_star_from_tom(options['field_name'],
                                              star['star_id'], log)

            dataset_lut = self.identify_datasets(conn, phot_table, star, log)

            # Check for existing lightcurves for this star from the
            # facilities listed in the phot_db.  If data are present, delete the
            # associated Datums to clear the way for the new ingest
            self.clear_old_data(dataset_lut, target, log)

            for entry in phot_table:

                key = str(entry['facility']) + '_' + str(entry['filter'])

                dataset = dataset_lut[key]

                if dataset['product'] == None:

                    log.info('Creating new DataProduct with ID=' +
                             dataset['product_id'])

                    data_file = path.basename(
                        options['phot_db_path']) + '.' + dataset['product_id']

                    data_product_params = {
                        "product_id": dataset['product_id'],
                        "target": target,
                        "observation_record": None,
                        "data":
                        data_file,  # This is used for uploaded file paths
                        "extra_data": dataset['dataset_code'].split('_')[-1],
                        "tag": "photometry",
                        "featured": False,
                    }

                    product = DataProduct.objects.create(**data_product_params)
                    product.group.add(group)

                    dataset['product'] = product

                    dataset_lut[key] = dataset

                image = import_utils.get_image_entry(conn, entry['image'])
                date_obs = datetime.strptime(image['date_obs_utc'][0],
                                             "%Y-%m-%dT%H:%M:%S.%f")
                date_obs = date_obs.replace(tzinfo=pytz.UTC)

                value = {
                    "magnitude": entry['calibrated_mag'],
                    "magnitude_error": entry['calibrated_mag_err'],
                    "hjd": entry['hjd'],
                    "filter": dataset['dataset_code'].split('_')[-1]
                }

                datum_params = {
                    "target": target,
                    "data_product": dataset['product'],
                    "data_type": "photometry",
                    "source_name": dataset['product_id'],
                    "source_location": key,
                    "timestamp": date_obs,
                    "value": json.dumps(value),
                }

                datum = ReducedDatum.objects.create(**datum_params)

            log.info('Completed ingest of '+str(len(phot_table))+\
                        ' for star '+str(star['star_id']))

        self.end_log(log)
    def handle(self, *args, **options):

        verbose = False

        log = self.start_log()

        if not path.isfile(options['phot_db_path']):
            raise IOError('Cannot find photometry database ' +
                          options['phot_db_path'])

        conn = phot_db.get_connection(dsn=options['phot_db_path'])

        pri_refimg = import_utils.fetch_primary_reference_image_from_phot_db(
            conn)
        log.info('Identified primary reference dataset as ' + repr(pri_refimg))

        ref_image = import_utils.fetch_reference_component_image(
            conn, pri_refimg['refimg_id'][0])
        log.info('Identified reference image as ' + repr(ref_image))

        date_obs = datetime.strptime(ref_image['date_obs_utc'][0],
                                     "%Y-%m-%dT%H:%M:%S.%f")
        date_obs = date_obs.replace(tzinfo=pytz.UTC)
        log.info('Reference image timestamp ' +
                 date_obs.strftime("%Y-%m-%dT%H:%M:%S.%f"))

        data_source = str(ref_image['facility'][0]) + '_' + str(
            ref_image['filter'][0])
        log.info('Data source code is ' + data_source)

        star_colours = import_utils.fetch_star_colours(conn)
        log.info('Extracted colour information for ' + str(len(star_colours)) +
                 ' stars')

        group = self.fetch_or_create_data_product_group()
        log.info('Data product group is ' + repr(group))

        field_target = self.check_field_in_tom(options['field_name'], log)
        log.info('Associating with field Target ' + repr(field_target))

        field_keys = self.field_extra_params()

        data_array = []
        ns = 0

        for j in range(0, len(star_colours), 1):

            if star_colours['cal_mag_corr_g'][j] > 0.0 or \
                star_colours['cal_mag_corr_r'][j] > 0.0 or \
                    star_colours['cal_mag_corr_i'][j] > 0.0:

                star_data = []

                for key in field_keys.keys():
                    star_data.append(star_colours[key][j])

                data_array.append(star_data)

        data_array = np.array(data_array)

        print('Built data array')

        for i, (key, tag) in enumerate(field_keys.items()):

            if '_err' not in key:

                product_id = options['field_name'] + '_pri_ref_' + key
                print('Using product ID=' + product_id)

                data_file = path.basename(
                    options['phot_db_path']) + '.' + product_id

                data_product_params = {
                    "product_id": product_id,
                    "target": field_target,
                    "observation_record": None,
                    "data": data_file,  # This is used for uploaded file paths
                    "extra_data": tag,
                    "tag": "photometry",
                    "featured": False,
                }

                product = self.get_or_create_data_product(
                    data_product_params, group)

                value = {
                    "magnitude": data_array[:, i].tolist(),
                    "magnitude_error": data_array[:, i + 1].tolist(),
                    "filter": tag
                }

                #value = {}
                #for i,(key,tag) in enumerate(field_keys.items()):
                #    value[key] = data_array[:,i].tolist()
                #value['filter'] = 'gri'

                datum_params = {
                    "target": field_target,
                    "data_product": product,
                    "data_type": "photometry",
                    "source_name": product_id,
                    "source_location": data_source,
                    "timestamp": date_obs,
                    "value": json.dumps(value)
                }

                print('Composed datum parameters')

                datum = ReducedDatum.objects.create(**datum_params)

                print('Created datum')