Пример #1
0
def create_ascii_table(observation_table, outfile):
    """Given a table of observations create an ascii log file for easy parsing.
    Store the result in outfile (could/should be a vospace dataNode)

    observation_table: astropy.votable.array object
    outfile: str (name of the vospace dataNode to store the result to)

    """

    logging.info("writing text log to %s" % outfile)

    stamp = "#\n# Last Updated: " + time.asctime() + "\n#\n"
    header = "| %20s | %20s | %20s | %20s | %20s | %20s | %20s |\n" % (
        "EXPNUM", "OBS-DATE", "FIELD", "EXPTIME(s)", "RA", "DEC", "RUNID")
    bar = "=" * (len(header) - 1) + "\n"

    if outfile[0:4] == "vos:":
        temp_file = tempfile.NamedTemporaryFile(suffix='.txt')
        fout = temp_file
    else:
        fout = open(outfile, 'w')

    t2 = None
    fout.write(bar + stamp + bar + header)

    populated = storage.list_dbimages()
    for i in range(len(observation_table) - 1, -1, -1):
        row = observation_table.data[i]
        if row['dataset_name'] not in populated:
            storage.populate(row['dataset_name'])
        str_date = str(
            ephem.date(row.StartDate + 2400000.5 -
                       ephem.julian_date(ephem.date(0))))[:20]
        t1 = time.strptime(str_date, "%Y/%m/%d %H:%M:%S")
        if t2 is None or math.fabs(time.mktime(t2) -
                                   time.mktime(t1)) > 3 * 3600.0:
            fout.write(bar)
        t2 = t1
        ra = str(ephem.hours(math.radians(row.RA)))
        dec = str(ephem.degrees(math.radians(row.DEC)))
        line = "| %20s | %20s | %20s | %20.1f | %20s | %20s | %20s |\n" % (
            str(row.dataset_name),
            str(
                ephem.date(row.StartDate + 2400000.5 -
                           ephem.julian_date(ephem.date(0))))[:20],
            row.TargetName[:20], row.ExposureTime, ra[:20], dec[:20],
            row.ProposalID[:20])
        fout.write(line)

    fout.write(bar)

    if outfile[0:4] == "vos:":
        fout.flush()
        storage.copy(fout.name, outfile)
    fout.close()

    return
Пример #2
0
def create_ascii_table(observation_table, outfile):
    """Given a table of observations create an ascii log file for easy parsing.
    Store the result in outfile (could/should be a vospace dataNode)

    observation_table: astropy.votable.array object
    outfile: str (name of the vospace dataNode to store the result to)

    """

    logging.info("writing text log to %s" % outfile)

    stamp = "#\n# Last Updated: " + time.asctime() + "\n#\n"
    header = "| %20s | %20s | %20s | %20s | %20s | %20s | %20s |\n" % (
        "EXPNUM", "OBS-DATE", "FIELD", "EXPTIME(s)", "RA", "DEC", "RUNID")
    bar = "=" * (len(header) - 1) + "\n"

    if outfile[0:4] == "vos:":
        temp_file = tempfile.NamedTemporaryFile(suffix='.txt')
        fout = temp_file
    else:
        fout = open(outfile, 'w')

    t2 = None
    fout.write(bar + stamp + bar + header)

    populated = storage.list_dbimages()
    for i in range(len(observation_table) - 1, -1, -1):
        row = observation_table.data[i]
        if row['dataset_name'] not in populated:
            storage.populate(row['dataset_name'])
        str_date = str(ephem.date(row.StartDate +
                                  2400000.5 -
                                  ephem.julian_date(ephem.date(0))))[:20]
        t1 = time.strptime(str_date, "%Y/%m/%d %H:%M:%S")
        if t2 is None or math.fabs(time.mktime(t2) - time.mktime(t1)) > 3 * 3600.0:
            fout.write(bar)
        t2 = t1
        ra = str(ephem.hours(math.radians(row.RA)))
        dec = str(ephem.degrees(math.radians(row.DEC)))
        line = "| %20s | %20s | %20s | %20.1f | %20s | %20s | %20s |\n" % (
            str(row.dataset_name),
            str(ephem.date(row.StartDate + 2400000.5 -
                           ephem.julian_date(ephem.date(0))))[:20],
            row.TargetName[:20],
            row.ExposureTime, ra[:20], dec[:20], row.ProposalID[:20])
        fout.write(line)

    fout.write(bar)

    if outfile[0:4] == "vos:":
        fout.flush()
        storage.copy(fout.name, outfile)
    fout.close()

    return
Пример #3
0
def mkpsf_failures():
    """A simple script to loop over the standard tags for the mkpsf and
    step1 processing steps. If exposure/ccd combo isn't marked as
    'success' then report the failure.
    
    This example uses the vos client directly.
    """

    for expnum in storage.list_dbimages():
        for ccd in range(36):
            if not storage.get_status(MKPSF, "", expnum, "p", ccd):
                # get_status returns FALSE if process didn't succeed,
                # with return_message=True it returns the error message.
                print expnum, ccd, storage.get_status(MKPSF, "", expnum, "p", ccd, return_message=True)
Пример #4
0
def mkpsf_failures():
    """A simple script to loop over the standard tags for the mkpsf and
    step1 processing steps. If exposure/ccd combo isn't marked as
    'success' then report the failure.
    
    This example uses the vos client directly.
    """

    for expnum in storage.list_dbimages():
        for ccd in range(36):
            if not storage.get_status(expnum, ccd, MKPSF):
                # get_status returns FALSE if process didn't succeed,
                # with return_message=True it returns the error message.
                print expnum, ccd, storage.get_status(expnum,
                                                      ccd,
                                                      MKPSF,
                                                      return_message=True)
Пример #5
0
def main():
    """
    Update the ossuary Postgres db with images observed for OSSOS.
    iq: Go through and check all ossuary's images for new existence of IQs/zeropoints.
    comment: Go through all ossuary and
    Then updates ossuary with new images that are at any stage of processing.
    Constructs full image entries, including header and info in the vtags, and inserts to ossuary.

    TODO: a CLUSTER after data is inserted - maybe once a week, depending how much there is
    CLUSTER images;   need to sqlalchemy this one
    """
    parser = argparse.ArgumentParser()

    parser.add_argument(
        "-iq",
        "--iq",
        action="store_true",
        help="Check existing images in ossuary that do not yet have "
        "IQ/zeropoint information; update where possible.")
    parser.add_argument(
        "-comment",
        action="store_true",
        help="Add comments on images provided by S. Gwyn to database.")
    parser.add_argument(
        "-snr",
        action="store_true",
        help=
        "Update existing images in ossuary for SNR info where that exists in a vtag."
    )
    args = parser.parse_args()

    images = web.field_obs.queries.ImagesQuery()
    processed_images, iqs = retrieve_processed_images(
        images)  # straight list of primary keys
    commdict = parse_sgwn_comments()

    if args.iq:
        unmeasured_iqs = iq_unmeasured_images(images)
        sys.stdout.write(
            '%d images in ossuary; updating %d with new IQ/zeropoint information.\n'
            % (len(processed_images), len(unmeasured_iqs)))
        for n, image in enumerate(
                unmeasured_iqs
        ):  # it's in the db, so has already passed the other checks
            update_values(images, image)
            sys.stdout.write('%s %d/%d...ossuary updated.\n' %
                             (image, n + 1, len(unmeasured_iqs)))

    if args.snr:
        unmeasured = snr_unmeasured_images(images)
        sys.stdout.write(
            '%d images in ossuary; updating %d with new SNR information.\n' %
            (len(processed_images), len(unmeasured)))
        for n, image in enumerate(
                unmeasured
        ):  # it's in the db, so has already passed the other checks
            update_values(images, image, iq_zeropt=False, snr=True)
            sys.stdout.write('%s %d/%d...ossuary updated.\n' %
                             (image, n + 1, len(unmeasured)))

    if args.comment:
        sys.stdout.write(
            '%d images in ossuary; updating with new comment information.\n' %
            len(processed_images))
        for image in commdict.keys():
            if int(image) in processed_images:
                update_values(images,
                              image,
                              iq_zeropt=False,
                              comment=True,
                              commdict=commdict)
                sys.stdout.write('%s has comment...\n' % image)

    unprocessed_images = parse_unprocessed_images(storage.list_dbimages(),
                                                  processed_images)
    sys.stdout.write(
        '%d images in ossuary; updating with %d new in VOspace.\n' %
        (len(processed_images), len(unprocessed_images)))

    for n, image in enumerate(
            unprocessed_images[29:]
    ):  # FIXME: remove this when PlutoHazard images taken out by JJ
        sys.stdout.write('%s %d/%d ' % (image, n + 1, len(unprocessed_images)))
        try:
            subheader, fullheader = get_header(image)
            if subheader is not None:
                sys.stdout.write('Header obtained. ')
                verify_ossos_image(fullheader)
                header = get_iq_and_zeropoint(image, subheader)
                header = get_snr(image, header)
                if image in commdict.keys():
                    header['comment'] = commdict[image]
                put_image_in_database(header, images)
                sys.stdout.write('...added to ossuary...\n')
                # generate_MegaCam_previews(image)
                # sys.stdout.write(' .gif preview saved.\n')
            else:
                sys.stdout.write('Header is not available: skipping.\n')
        except Exception, e:
            sys.stdout.write('... %s\n' % e)
Пример #6
0
	if len([s[0] for s in query]) > 0:
		ss = sa.delete(ims.images.c.image_id == image['image_id'])
		query = ims.conn.execute(ss)	

	ins = ims.images.insert(values=image)
	ims.conn.execute(ins)
	
	return



############################ BEGIN MAIN #######################################
data_web_service_url = storage.DATA_WEB_SERVICE+"CFHT"
images = ImagesQuery()
processed_images, iqs = retrieve_processed_images(images)  # straight list of primary keys
dbimages = storage.list_dbimages()
unprocessed_images = parse_unprocessed_images(dbimages)


# Need to add updating of existing records when fwhm etc info are added
# unprocessed_images = []
# for i, im in enumerate(processed_images):
# 	if iqs[i] is None:
# 		unprocessed_images.append(im)



sys.stdout.write('%d images in ossuary; updating with %d new in VOspace.\n' % 
	(len(processed_images), len(unprocessed_images)))

# Construct a full image entry, including header and info in the vtags
Пример #7
0
def main():
    """
    Update the ossuary Postgres db with images observed for OSSOS.
    iq: Go through and check all ossuary's images for new existence of IQs/zeropoints.
    comment: Go through all ossuary and
    Then updates ossuary with new images that are at any stage of processing.
    Constructs full image entries, including header and info in the vtags, and inserts to ossuary.

    TODO: a CLUSTER after data is inserted - maybe once a week, depending how much there is
    CLUSTER images;   need to sqlalchemy this one
    """
    parser = argparse.ArgumentParser()

    parser.add_argument("-iq", "--iq", action="store_true",
                        help="Check existing images in ossuary that do not yet have "
                             "IQ/zeropoint information; update where possible.")
    parser.add_argument("-comment", action="store_true",
                        help="Add comments on images provided by S. Gwyn to database.")
    parser.add_argument("-snr", action="store_true",
                        help="Update existing images in ossuary for SNR info where that exists in a vtag.")
    args = parser.parse_args()

    images = web.field_obs.queries.ImagesQuery()
    processed_images, iqs = retrieve_processed_images(images)  # straight list of primary keys
    commdict = parse_sgwn_comments()

    if args.iq:
        unmeasured_iqs = iq_unmeasured_images(images)
        sys.stdout.write('%d images in ossuary; updating %d with new IQ/zeropoint information.\n' %
                         (len(processed_images), len(unmeasured_iqs)))
        for n, image in enumerate(unmeasured_iqs):  # it's in the db, so has already passed the other checks
            update_values(images, image)
            sys.stdout.write('%s %d/%d...ossuary updated.\n' % (image, n + 1, len(unmeasured_iqs)))

    if args.snr:
        unmeasured = snr_unmeasured_images(images)
        sys.stdout.write('%d images in ossuary; updating %d with new SNR information.\n' %
                         (len(processed_images), len(unmeasured)))
        for n, image in enumerate(unmeasured):  # it's in the db, so has already passed the other checks
            update_values(images, image, iq_zeropt=False, snr=True)
            sys.stdout.write('%s %d/%d...ossuary updated.\n' % (image, n + 1, len(unmeasured)))

    if args.comment:
        sys.stdout.write('%d images in ossuary; updating with new comment information.\n' %
                         len(processed_images))
        for image in commdict.keys():
            if int(image) in processed_images:
                update_values(images, image, iq_zeropt=False, comment=True, commdict=commdict)
                sys.stdout.write('%s has comment...\n' % image)

    unprocessed_images = parse_unprocessed_images(storage.list_dbimages(), processed_images)
    sys.stdout.write('%d images in ossuary; updating with %d new in VOspace.\n' %
                     (len(processed_images), len(unprocessed_images)))

    for n, image in enumerate(unprocessed_images):
        sys.stdout.write('%s %d/%d ' % (image, n + 1, len(unprocessed_images)))
        try:
            subheader, fullheader = get_header(image)
            if subheader is not None:
                sys.stdout.write('Header obtained. ')
                verify_ossos_image(fullheader)
                header = get_iq_and_zeropoint(image, subheader)
                header = get_snr(image, header)
                if image in commdict.keys():
                    header['comment'] = commdict[image]
                put_image_in_database(header, images)
                sys.stdout.write('...added to ossuary...\n')
                # generate_MegaCam_previews(image)
                # sys.stdout.write(' .gif preview saved.\n')
            else:
                sys.stdout.write('Header is not available: skipping.\n')
        except Exception, e:
            sys.stdout.write('... %s\n' % e)