示例#1
0
文件: plant.py 项目: sevenlin123/MOP
def plant(expnums, ccd, rmin, rmax, ang, width, version='s'):
    '''run the plant script on this combination of exposures'''

    ptf = open('proc-these-files','w')
    ptf.write("# Files to be planted and search\n")
    ptf.write("# image fwhm plant\n")

    for expnum in expnums:
        fwhm = storage.get_fwhm(expnum,ccd)
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        ptf.write("%s %3.1f YES\n" % ( filename[0:-5],
                                    fwhm ))
        for ext in ['apcor',
                    'obj.jmp',
                    'trans.jmp',
                    'psf.fits',
                    'mopheader',
                    'phot',
                    'zeropoint.used']:
            apcor = storage.get_image(expnum, ccd=ccd, version='s',
                                      ext=ext)

    ptf.close()

    cmd_args = ['plant.csh',os.curdir,
             str(rmin), str(rmax), str(ang), str(width)]

    util.exec_prog(cmd_args)
    
    if args.dryrun:
        # Don't push back to VOSpace
        return 

    uri = storage.get_uri('Object',ext='planted',version='',
                          subdir=str(
        expnums[0])+"/ccd%s" % (str(ccd).zfill(2)))
    storage.copy('Object.planted',uri)
    uri = os.path.join(os.path.dirname(uri), 'shifts')
    storage.copy('shifts', uri)
    for expnum in expnums:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              version=version,
                              ext='fits', prefix='fk')
        filename =  os.path.basename(uri)
        storage.copy(filename, uri)

        for ext in ['mopheader',
                    'psf.fits',
                    'fwhm',
                    'apcor', 'zeropoint.used', 'trans.jmp']:
            storage.delete(expnum, ccd, 's', ext, prefix='fk')
            storage.vlink(expnum, ccd, 'p', ext,
                          expnum, ccd, 's', ext, l_prefix='fk')
                          

    
    return
示例#2
0
文件: step1.py 项目: R136a1-/MOP
def step1(expnum,
              ccd,
              prefix='',
              version='p',
              fwhm=4, 
              sex_thresh=1.3, 
              wave_thresh=2.7, 
              maxcount=30000):
    """run the actual step1jmp/matt codes.

    expnum: the CFHT expousre to process
    ccd: which ccd in the mosaic to process
    fwhm: the image quality, FWHM, of the image.  In pixels.
    sex_thresh: the detection threhold to run sExtractor at
    wave_thresh: the detection threshold for wavelet
    maxcount: saturation level

    """

    filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
    mopheader = storage.get_image(expnum, ccd, version=version,
                                  ext='mopheader', prefix=prefix)
    fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version)
    basename = os.path.splitext(filename)[0]
    
    outfile = util.exec_prog(['step1jmp', 
                              '-f', basename,
                              '-t', str(wave_thresh),
                              '-w', str(fwhm),
                              '-m', str(maxcount)])
    
    obj_uri = storage.get_uri(expnum,ccd,version=version,ext='obj.jmp',
                              prefix=prefix)
    obj_filename = basename+".obj.jmp"

    storage.copy(obj_filename,obj_uri)

    ## for step1matt we need the weight image
    hdulist = fits.open(filename)
    flat_name = hdulist[0].header.get('FLAT','06Bm02.flat.r.36.01.fits')
    flat_name = flat_name[0:-5]
    flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits',
                      subdir='calibrators', rescale=False)
    if not os.access('weight.fits',os.R_OK):
        os.symlink(flat_filename, 'weight.fits')
    outfile = util.exec_prog(['step1matt',
                              '-f', basename,
                              '-t', str(sex_thresh),
                              '-w', str(fwhm),
                              '-m', str(maxcount)])

    obj_uri = storage.get_uri(expnum,ccd,version=version,ext='obj.matt',
                              prefix=prefix)
    obj_filename = basename+".obj.matt"

    storage.copy(obj_filename,obj_uri)

    return True
示例#3
0
文件: stepI.py 项目: ijiraq/MOP
def step3(expnums, ccd, version, rate_min,
          rate_max, angle, width, field=None, prefix=None, dry_run=False,
          maximum_flux_ratio=3, minimum_area=5, minimum_median_flux=1000.0):
    """run the actual step3  on the given exp/ccd combo"""

    jmp_args = ['step3jmp']
    matt_args = ['step3jjk']

    idx = 0
    cmd_args = []
    for expnum in expnums:
        idx += 1
        for ext in ['unid.jmp', 'unid.matt']:
            storage.get_file(expnum, ccd=ccd, version=version, ext=ext, prefix=prefix)
        image = os.path.splitext(os.path.basename(storage.get_uri(expnum, ccd, version=version, prefix=prefix)))[0]
        cmd_args.append('-f%d' % idx)
        cmd_args.append(image)

    cmd_args.extend(['-rn', str(rate_min),
                     '-rx', str(rate_max),
                     '-a', str(angle),
                     '-w', str(width)])
    jmp_args.extend(cmd_args)

    # Add some extra arguemnents for the ISO search.
    cmd_args.extend(['-fr', str(maximum_flux_ratio),
                     '-ma', str(minimum_area),
                     '-mf', str(minimum_median_flux)])
    matt_args.extend(cmd_args)

    logging.info(util.exec_prog(jmp_args))
    logging.info(util.exec_prog(matt_args))

    if dry_run:
        return

    if field is None:
        field = str(expnums[0])
    storage.mkdir(os.path.dirname(storage.get_uri(field,
                                                  ccd=ccd,
                                                  version=version,
                                                  prefix=prefix)))

    for ext in ['moving.jmp', 'moving.matt']:
        uri = storage.get_uri(field,
                              ccd=ccd,
                              version=version,
                              ext=ext,
                              prefix=prefix)
        filename = '%s%d%s%s.%s' % (prefix, expnums[0],
                                    version,
                                    str(ccd).zfill(2),
                                    ext)
        storage.copy(filename, uri)

    return
示例#4
0
文件: step3.py 项目: sevenlin123/MOP
def step3(expnums, ccd, version, rate_min,
              rate_max, angle, width, field=None, prefix=None):
    '''run the actual step2  on the given exp/ccd combo'''

    jmp_args = ['step3jmp']
    matt_args = ['step3matt']

    idx = 0
    cmd_args = []
    for expnum in expnums:
        idx += 1
        for ext in ['unid.jmp', 'unid.matt',
                    'trans.jmp' ]:
            filename = storage.get_image(expnum,
                                         ccd=ccd,
                                         version=version,
                                         ext=ext,
                                         prefix=prefix
                                         )
        image = os.path.splitext(os.path.splitext(os.path.basename(filename))[0])[0]
        cmd_args.append('-f%d' % ( idx))
        cmd_args.append(image)

    cmd_args.extend(['-rn', str(rate_min),
                     '-rx', str(rate_max),
                     '-a', str(angle),
                     '-w', str(width)])
    jmp_args.extend(cmd_args)
    matt_args.extend(cmd_args)
    util.exec_prog(jmp_args)
    util.exec_prog(matt_args)


    if field is None:
        field = str(expnums[0])
    storage.mkdir(os.path.dirname(
        storage.get_uri(field,
                        ccd=ccd,
                        version=version,
                        ext=ext,
                        prefix=prefix)))

    for ext in ['moving.jmp', 'moving.matt']:
        uri = storage.get_uri(field,
                              ccd=ccd,
                              version=version,
                              ext=ext,
                              prefix=prefix)
        filename = '%s%d%s%s.%s' % ( prefix, expnums[0],
                                   version,
                                   str(ccd).zfill(2),
                                   ext)
        storage.copy(filename, uri)


    return
示例#5
0
文件: plant.py 项目: OSSOS/MOP
def plant(expnums, ccd, rmin, rmax, ang, width, number=10, mmin=21.0, mmax=25.5, version='s', dry_run=False):
    """Plant artificial sources into the list of images provided.

    :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to
    :param ccd: which ccd to work on.
    :param rmin: The minimum rate of motion to add sources at (''/hour)
    :param rmax: The maximum rate of motion to add sources at (''/hour)
    :param ang: The mean angle of motion to add sources
    :param width: The +/- range of angles of motion
    :param version: Add sources to the 'o', 'p' or 's' images
    :param dry_run: don't push results to VOSpace.
    """

    # Construct a list of artificial KBOs with positions in the image
    # and rates of motion within the bounds given by the caller.
    filename = storage.get_image(expnums[0],
                                 ccd=ccd,
                                 version=version)
    header = fits.open(filename)[0].header
    bounds = util.get_pixel_bounds_from_datasec_keyword(header.get('DATASEC', '[33:2080,1:4612]'))

    # generate a set of artificial KBOs to add to the image.
    kbos = KBOGenerator.get_kbos(n=number,
                                 rate=(rmin, rmax),
                                 angle=(ang - width, ang + width),
                                 mag=(mmin, mmax),
                                 x=(bounds[0][0], bounds[0][1]),
                                 y=(bounds[1][0], bounds[1][1]),
                                 filename='Object.planted')

    for expnum in expnums:
        filename = storage.get_image(expnum, ccd, version)
        psf = storage.get_file(expnum, ccd, version, ext='psf.fits')
        plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk")

    if dry_run:
        return

    uri = storage.get_uri('Object', ext='planted', version='',
                          subdir=str(
                              expnums[0]) + "/ccd%s" % (str(ccd).zfill(2)))

    storage.copy('Object.planted', uri)
    for expnum in expnums:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              version=version,
                              ext='fits', prefix='fk')
        filename = os.path.basename(uri)
        storage.copy(filename, uri)

    return
示例#6
0
def scramble(expnums, ccd, version='p', dry_run=False):
    """run the plant script on this combination of exposures"""

    mjds = []
    fobjs = []
    for expnum in expnums:
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        fobjs.append(fits.open(filename))
        # Pull out values to replace in headers.. must pull them
        # as otherwise we get pointers...
        mjds.append(fobjs[-1][0].header['MJD-OBS'])

    order = [0, 2, 1]
    for idx in range(len(fobjs)):
        logging.info("Flipping %d to %d" % (fobjs[idx][0].header['EXPNUM'],
                                            expnums[order[idx]]))
        fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]]
        fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]]
        uri = storage.get_uri(expnums[order[idx]],
                              ccd=ccd,
                              version='s',
                              ext='fits')
        fname = os.path.basename(uri)
        if os.access(fname, os.F_OK):
            os.unlink(fname)
        fobjs[idx].writeto(fname)
        if dry_run:
            continue
        storage.copy(fname, uri)

    return
示例#7
0
def cutout(obj, obj_dir, radius, username, password):
    for obs in obj.mpc_observations:  # FIXME: TESTING ONLY
        if obs.null_observation:
            continue
        expnum = obs.comment.frame.split('p')[0]  # only want calibrated images
        # Using the WCS rather than the X/Y, as the X/Y can be unreliable on a long-term basis
        this_cutout = "CIRCLE ICRS {} {} {}".format(obs.coordinate.ra.degree,
                                               obs.coordinate.dec.degree,
                                               radius)
        print this_cutout
        # FIXME: should be able to use line below, but bug in VOSpace requires direct-access workaround, for now.
        # postage_stamp = storage.get_image(expnum, cutout=cutout)

        target = storage.vospace.fixURI(storage.get_uri(expnum))
        direction = "pullFromVoSpace"
        protocol = "ivo://ivoa.net/vospace/core#httpget"
        view = "cutout"
        params = {"TARGET": target,
                  "PROTOCOL": protocol,
                  "DIRECTION": direction,
                  "cutout": this_cutout,
                  "view": view}
        r = requests.get(BASEURL, params=params, auth=(username, password))
        r.raise_for_status()  # confirm the connection worked as hoped
        postage_stamp_filename = "{}_{:11.5f}_{:09.5f}_{:+09.5f}.fits".format(obj.provisional_name,
                                                                              obs.date.mjd,
                                                                              obs.coordinate.ra.degree,
                                                                              obs.coordinate.dec.degree)
        logging.info("{}".format(postage_stamp_filename))
        with open(postage_stamp_filename, 'w') as tmp_file:
            tmp_file.write(r.content)
            storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename)
        os.unlink(postage_stamp_filename)  # easier not to have them hanging around
示例#8
0
文件: scramble.py 项目: OSSOS/MOP
def scramble(expnums, ccd, version='p', dry_run=False):
    """run the plant script on this combination of exposures"""

    mjds = []
    fobjs = []
    for expnum in expnums:
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        fobjs.append(fits.open(filename))
        # Pull out values to replace in headers.. must pull them
        # as otherwise we get pointers...
        mjds.append(fobjs[-1][0].header['MJD-OBS'])

    order = [0, 2, 1]
    for idx in range(len(fobjs)):
        logging.info("Flipping %d to %d" % (fobjs[idx][0].header['EXPNUM'],
                                            expnums[order[idx]]))
        fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]]
        fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]]
        uri = storage.get_uri(expnums[order[idx]],
                              ccd=ccd,
                              version='s',
                              ext='fits')
        fname = os.path.basename(uri)
        if os.access(fname, os.F_OK):
            os.unlink(fname)
        fobjs[idx].writeto(fname)
        if dry_run:
            continue
        storage.copy(fname, uri)

    return
示例#9
0
def parse_ssois_return(ssois_return, object_name, imagetype, camera_filter='r.MP9601',
                       telescope_instrument='CFHT/MegaCam'):
    """
    Parse through objects in ssois query and filter out images of desired filter, type, exposure time, and instrument
    """

    assert camera_filter in ['r.MP9601', 'u.MP9301']

    ret_table = []
    good_table = 0

    table_reader = ascii.get_reader(Reader=ascii.Basic)
    table_reader.inconsistent_handler = _skip_missing_data
    table_reader.header.splitter.delimiter = '\t'
    table_reader.data.splitter.delimiter = '\t'
    table = table_reader.read(ssois_return)

    for row in table:
        # Excludes the OSSOS wallpaper.
        # note: 'Telescope_Insturment' is a typo in SSOIS's return format
        if not 'MegaCam' in row['Telescope_Insturment']:
            continue
        if not storage.exists(storage.get_uri(row['Image'][:-1])):  #Check if image of object exists in OSSOS observations
            continue
        if not str(row['Image_target']).startswith('WP'):
           good_table += 1
           ret_table.append(row)

    if good_table > 0:
        print " %d images found" % good_table

    return ret_table
示例#10
0
def cutout(member, object_name, image, ra, dec, radius, ra_rate, dec_rate):

    global vos_dir

    if member == 'none':
        vos_dir = '{}/nofamily'.format(_VOS_PATH)   #Planned to sort stamps on VOS into groups, but this didn't happen and so currently everything will be saved to /hasfamily
    else:
        vos_dir = '{}/hasfamily'.format(_VOS_PATH)

    radius = radius * units.deg

    expnum = image.strip('p')  # only use processed images
    target = storage.get_uri(expnum)
    coord = SkyCoord(unit="deg", ra=ra, dec=dec)

    hdulist = storage.ra_dec_cutout(target, coord, radius)

    hdulist[0].header['OBJNUM'] = (object_name,'object')    #Put image info into header for later use
    hdulist[0].header['RA'] = (ra,'degrees')
    hdulist[0].header['DEC'] = (dec,'degrees')
    hdulist[0].header['RARATE'] = (ra_rate,'arcsec/hr')
    hdulist[0].header['DECRATE'] = (dec_rate,'arcsec/hr')

    postage_stamp_filename = "{}_{}_{:8f}_{:8f}.fits".format(object_name, image, float(ra), float(dec))

    print postage_stamp_filename

    hdulist.writeto("{}/{}".format(_STAMPS_DIR, postage_stamp_filename), output_verify='warn', clobber=True)
    del hdulist

    try:
        storage.copy('{}/{}'.format(_STAMPS_DIR, postage_stamp_filename),
                     '{}/{}'.format(vos_dir, postage_stamp_filename))
    except Exception, e:
        print e
示例#11
0
文件: snr.py 项目: stephengwyn/MOP
def get_sky(expnum, ccd):
    uri = storage.get_uri(expnum, ccd, ext='phot')

    fobj = StringIO(storage.open_vos_or_local(uri).read())
    fobj.seek(0)
    phot_table = ascii.read(fobj)
    return phot_table['MSKY'].mean()
示例#12
0
文件: snr.py 项目: OSSOS/MOP
def get_sky(expnum, ccd):
    uri = storage.get_uri(expnum, ccd, ext='phot')

    fobj = StringIO(storage.open_vos_or_local(uri).read())
    fobj.seek(0)
    phot_table = ascii.read(fobj)
    return phot_table['MSKY'].mean()
示例#13
0
def parse_ssois_return(ssois_return, object_name, imagetype, camera_filter='r.MP9601',
                       telescope_instrument='CFHT/MegaCam'):
    """
    Parse through objects in ssois query and filter out images of desired filter, type, exposure time, and instrument
    """

    assert camera_filter in ['r.MP9601', 'u.MP9301']

    ret_table = []
    good_table = 0

    table_reader = ascii.get_reader(Reader=ascii.Basic)
    table_reader.inconsistent_handler = _skip_missing_data
    table_reader.header.splitter.delimiter = '\t'
    table_reader.data.splitter.delimiter = '\t'
    table = table_reader.read(ssois_return)

    for row in table:
        # Excludes the OSSOS wallpaper.
        # note: 'Telescope_Insturment' is a typo in SSOIS's return format
        if not 'MegaCam' in row['Telescope_Insturment']:
            continue
        # Check if image of object exists in OSSOS observations
        if not storage.exists(storage.get_uri(row['Image'][:-1])):
            continue
        if not str(row['Image_target']).startswith('WP'):
           good_table += 1
           ret_table.append(row)

    if good_table > 0:
        print((" %d images found" % good_table))

    return ret_table
示例#14
0
文件: plant.py 项目: R136a1-/MOP
def plant(expnums, ccd, rmin, rmax, ang, width, version="s"):
    """run the plant script on this combination of exposures"""

    ptf = open("proc-these-files", "w")
    ptf.write("# Files to be planted and search\n")
    ptf.write("# image fwhm plant\n")

    for expnum in expnums:
        fwhm = storage.get_fwhm(expnum, ccd, version=version)
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        ptf.write("%s %3.1f YES\n" % (filename[0:-5], fwhm))
        for ext in ["apcor", "obj.jmp", "trans.jmp", "psf.fits", "mopheader", "phot", "zeropoint.used"]:
            apcor = storage.get_image(expnum, ccd=ccd, version=version, ext=ext)

    ptf.close()

    cmd_args = ["plant.csh", os.curdir, str(rmin), str(rmax), str(ang), str(width)]

    util.exec_prog(cmd_args)

    if args.dryrun:
        # Don't push back to VOSpace
        return

    uri = storage.get_uri("Object", ext="planted", version="", subdir=str(expnums[0]) + "/ccd%s" % (str(ccd).zfill(2)))
    storage.copy("Object.planted", uri)
    uri = os.path.join(os.path.dirname(uri), "plant.shifts")
    storage.copy("shifts", uri)
    for expnum in expnums:
        uri = storage.get_uri(expnum, ccd=ccd, version=version, ext="fits", prefix="fk")
        filename = os.path.basename(uri)
        storage.copy(filename, uri)

        for ext in ["mopheader", "psf.fits", "fwhm", "apcor", "zeropoint.used", "trans.jmp"]:
            storage.delete(expnum, ccd, "s", ext, prefix="fk")
            storage.vlink(expnum, ccd, "s", ext, expnum, ccd, "s", ext, l_prefix="fk")

    return
示例#15
0
文件: scramble.py 项目: R136a1-/MOP
def scramble(expnums, ccd, version='p'):
    '''run the plant script on this combination of exposures'''

    mjds = []
    fobjs = []
    for expnum in expnums:
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        fobjs.append(fits.open(filename))
        # Pull out values to replace in headers.. must pull them
        # as otherwise we get pointers...
        mjds.append(fobjs[-1][0].header['MJD-OBS'])

    order = [1,0,2]
    for idx in range(len(fobjs)):
        logging.info("Flipping %d to %d" % ( fobjs[idx][0].header['EXPNUM'],
                                             expnums[order[idx]]))
        fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]]
        fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]]
        uri = storage.get_uri(expnums[order[idx]],
                                ccd=ccd,
                                version='s',
                                ext='fits')
        fname = os.path.basename(uri)
        if os.access(fname, os.F_OK):
            os.unlink(fname)
        fobjs[idx].writeto(fname)
        storage.copy(fname, uri)

        # now make a link between files that the plant system will need
        for ext in ['apcor', 'obj.jmp', 'mopheader', 'phot',
                    'psf.fits','trans.jmp', 'zeropoint.used', 'fwhm']:
            if storage.exists(storage.get_uri(expnums[order[idx]], ccd, 's', ext)):
                storage.delete(expnums[order[idx]], ccd, 's', ext)
                storage.vlink(expnums[idx], ccd, 'p', ext,
                         expnums[order[idx]], ccd, 's', ext)

    return
示例#16
0
def get_iq_and_zeropoint(image, header_extract):
	try:  # 22 is the standard chip: mid lower
		fwhm = storage.get_tag(image, 'fwhm_22')
		if fwhm is None:
			fwhm_file = storage.get_uri(image, ccd=22, ext='fwhm')# hfile+'/ccd22/'+image+'p22.fwhm'
			print fwhm_file
			fwhm = float(storage.vospace.open(fwhm_file,view='data').read())
		# now can we work with it?
		if fwhm is not None:
			iq = float(fwhm)*0.1850  # plate scale is 0.1850 arcsec/pixel
			header_extract['iq_ossos'] = iq
		else:
			header_extract['iq_ossos'] = fwhm  # HACKED FOR NOW, WILL BE ADJUSTED LATER

	except Exception, e:
		header_extract['iq_ossos'] = None # HACKED FOR NOW, FIX LATER
示例#17
0
def cutout(obj, obj_dir, radius):

    cutout_listing = storage.listdir(obj_dir, force=True)
    for obs in obj.mpc_observations:
        if obs.null_observation:
            logging.debug('skipping: {}'.format(obs))
            continue
        if obs.date > parameters.SURVEY_START:
            # can't make postage stamps of earlier linkages
            # can't parse for an obs.comment's exposure number if no
            # obs.comment exists
            try:
                parts = storage.frame2expnum(obs.comment.frame)
            except Exception as ex:
                logging.warning(f"Skipping: {obs}")
                logging.debug(f"Failed to map comment.frame to expnum: {ex}")
                continue
            uri = storage.get_uri(parts['expnum'], version=parts['version'])
            sky_coord = obs.coordinate
            # Using the WCS rather than the X/Y
            # (X/Y can be unreliable over the whole survey)
            postage_stamp_filename = f"{obj.provisional_name}_" \
                                     f"{obs.date.mjd:11.5f}_" \
                                     f"{obs.coordinate.ra.degree:09.5f}_" \
                                     f"{obs.coordinate.dec.degeee:09.5f}.fits"

            if postage_stamp_filename in cutout_listing:
                # skipping existing cutouts
                continue 

            # ast_header = storage._get_sghead(parts['expnum'])
            while True:
              try:
                hdulist = storage.ra_dec_cutout(uri, sky_coord, radius, update_wcs=True)

                with open(postage_stamp_filename, 'w') as tmp_file:
                    hdulist.writeto(tmp_file, overwrite=True, output_verify='fix+ignore')
                    storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename)
                os.unlink \
                        (postage_stamp_filename)  # easier not to have them hanging around
              except OSError as e:  # occasionally the node is not found: report and move on for later cleanup
                logging.error("OSError: -> " +str(e))
              except Exception as e:
                logging.error("Exception: -> " +str(e))
                continue
              break
示例#18
0
def cutout(obj, obj_dir, radius):

    cutout_listing = storage.listdir(obj_dir, force=True)
    for obs in obj.mpc_observations:
        if obs.null_observation:
            logging.debug('skipping: {}'.format(obs))
            continue
        if obs.date > parameters.SURVEY_START:  # can't make postage stamps of earlier linkages
            # can't parse for an obs.comment's exposure number if no obs.comment exists
            try:
                parts = storage.frame2expnum(obs.comment.frame)
            except Exception as ex:
                logging.error("Skipping: {}\nFailed to map comment.frame to expnum: {}".format(obs, ex))
                continue
            uri = storage.get_uri(parts['expnum'], version=parts['version'])
            sky_coord = obs.coordinate  # Using the WCS rather than the X/Y (X/Y can be unreliable over the whole survey)
            postage_stamp_filename = "{}_{:11.5f}_{:09.5f}_{:+09.5f}.fits".format(obj.provisional_name,
                                                                                  obs.date.mjd,
                                                                                  obs.coordinate.ra.degree,
                                                                                  obs.coordinate.dec.degree)

            if postage_stamp_filename in cutout_listing:
               # skipping existing cutouts
                continue 

            # ast_header = storage._get_sghead(parts['expnum'])
            while True:
              try:
                hdulist = storage.ra_dec_cutout(uri, sky_coord, radius, update_wcs=True)

                with open(postage_stamp_filename, 'w') as tmp_file:
                    hdulist.writeto(tmp_file, overwrite=True, output_verify='fix+ignore')
                    storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename)
                os.unlink(postage_stamp_filename)  # easier not to have them hanging around
              except OSError as e:  # occasionally the node is not found: report and move on for later cleanup
                logging.error("OSError: ->"+str(e))
              except Exception as e:
                logging.error("Exception: ->"+str(e))
                continue
              break
示例#19
0
def cutout(obj, obj_dir, radius):
    print(len([n for n in obj.mpc_observations if not n.null_observation]))
    for obs in obj.mpc_observations:
        print('starting analysis of {}'.format(str(obs)))
        if obs.null_observation:
            print('skipping')
            continue
        if obs.date > parameters.SURVEY_START:  # can't make postage stamps of earlier linkages
            # can't parse for an obs.comment's exposure number if no obs.comment exists
            try:
                expnum = obs.comment.frame.split('p')[0].strip(' ')  # only want calibrated images
            except AttributeError, e:
                print('No comment in this MPC line!')
                continue
            if not expnum.isdigit():
                print('expnum {} parsed from comment line invalid. Check comment parsing.\n{}'.format(
                    expnum, str(obs.comment))
                )
                continue
            uri = storage.get_uri(expnum)
            sky_coord = obs.coordinate  # Using the WCS rather than the X/Y (X/Y can be unreliable over the whole survey)
            print('Trying {} on {} on {}...'.format(obj.provisional_name, obs.date, expnum))
            try:
                hdulist = storage.ra_dec_cutout(uri, sky_coord, radius)
                # if not 'ASTLEVEL' in hdulist[1].header:   # FIXME: activate once all headers are retro-fitted with ASTLEVEL
                #     logging.info('Cutout invalid for use. Skipping inclusion.\n')
                #     continue
                postage_stamp_filename = "{}_{:11.5f}_{:09.5f}_{:+09.5f}.fits".format(obj.provisional_name,
                                                                                      obs.date.mjd,
                                                                                      obs.coordinate.ra.degree,
                                                                                      obs.coordinate.dec.degree)
                print("{}".format(postage_stamp_filename))

                with open(postage_stamp_filename, 'w') as tmp_file:
                    hdulist.writeto(tmp_file, clobber=True)
                    storage.copy(postage_stamp_filename, obj_dir + "/" + postage_stamp_filename)
                os.unlink(postage_stamp_filename)  # easier not to have them hanging around
            except OSError, e:  # occasionally the node is not found: report and move on for later cleanup
                print e
                continue
示例#20
0
def scramble(expnums, ccd, version='p'):
    '''run the plant script on this combination of exposures'''

    mjds = []
    fobjs = []
    for expnum in expnums:
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        fobjs.append(fits.open(filename))
        # Pull out values to replace in headers.. must pull them
        # as otherwise we get pointers...
        mjds.append(fobjs[-1][0].header['MJD-OBS'])

    order = [1, 0, 2]
    for idx in range(len(fobjs)):
        logging.info("Flipping %d to %d" %
                     (fobjs[idx][0].header['EXPNUM'], expnums[order[idx]]))
        fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]]
        fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]]
        uri = storage.get_uri(expnums[order[idx]],
                              ccd=ccd,
                              version='s',
                              ext='fits')
        fname = os.path.basename(uri)
        if os.access(fname, os.F_OK):
            os.unlink(fname)
        fobjs[idx].writeto(fname)
        storage.copy(fname, uri)

        # now make a link between files that the plant system will need
        for ext in [
                'apcor', 'obj.jmp', 'mopheader', 'phot', 'psf.fits',
                'trans.jmp', 'zeropoint.used', 'fwhm'
        ]:
            storage.delete(expnums[order[idx]], ccd, 's', ext)
            storage.vlink(expnums[idx], ccd, 'p', ext, expnums[order[idx]],
                          ccd, 's', ext)

    return
示例#21
0
def get_star_data(asteroid_id, mag, expnum, header):
    """
    From ossos psf fitted image, calculate mean of the flux of each row of the rotated PSF
    """

    # calculate mean psf
    uri = storage.get_uri(expnum.strip('p'), header[_CCD].split('d')[1])
    ossos_psf = '{}.psf.fits'.format(uri.strip('.fits'))
    local_psf = '{}{}.psf.fits'.format(expnum, header[_CCD].split('d')[1])
    local_file_path = '{}/{}'.format(_STAMPS_DIR, local_psf)
    storage.copy(ossos_psf, local_file_path)

    # pvwcs = wcs.WCS(header)
    # x, y = pvwcs.sky2xy(asteroid_id['ra'].values, asteroid_id['dec'].values)
    x = asteroid_id[_XMID_HEADER].values[0]
    y = asteroid_id[_YMID_HEADER].values[0]

    # run seepsf on the mean psf image
    iraf.set(uparm="./")
    iraf.digiphot(_doprint=0)
    iraf.apphot(_doprint=0)
    iraf.daophot(_doprint=0)
    iraf.seepsf(local_file_path, local_psf, xpsf=x, ypsf=y, magnitude=mag)

    with fits.open(local_psf) as hdulist:
        data = hdulist[0].data

    th = math.degrees(asteroid_id[_THETA_HEADER].values[0])
    data_rot = rotate(data, th)
    data_rot = np.ma.masked_where(data_rot == 0, data_rot)

    data_mean = np.ma.mean(data_rot, axis=1)

    os.unlink(local_psf)
    os.unlink(local_file_path)

    return data_mean[np.nonzero(np.ma.fix_invalid(data_mean, fill_value=0))[0]]
示例#22
0
文件: step1.py 项目: OSSOS/MOP
def run(expnum,
        ccd,
        prefix='',
        version='p',
        sex_thresh=_SEX_THRESHOLD,
        wave_thresh=_WAVE_THRESHOLD,
        maxcount=_MAX_COUNT,
        dry_run=False,
        force=True, ignore=False):
    """run the actual step1jmp/matt codes.

    expnum: the CFHT expousre to process
    ccd: which ccd in the mosaic to process
    fwhm: the image quality, FWHM, of the image.  In pixels.
    sex_thresh: the detection threhold to run sExtractor at
    wave_thresh: the detection threshold for wavelet
    maxcount: saturation level

    """
    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnum, version, ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd))
        return

    with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run):
        try:
            if not storage.get_status(dependency, prefix, expnum, version, ccd) and not ignore:
                raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format(
                    task, dependency, prefix, expnum, version, ccd))
            logging.info("Retrieving imaging and input parameters from VOSpace")
            storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader')
            filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
            fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version, default=3.5)
            basename = os.path.splitext(filename)[0]

            _get_weight_map(filename, ccd)

            logging.info("Launching step1jmp")
            logging.info(util.exec_prog(['step1jmp',
                                         '-f', basename,
                                         '-t', str(wave_thresh),
                                         '-w', str(fwhm),
                                         '-m', str(maxcount)]))

            logging.info(util.exec_prog(['step1matt',
                                         '-f', basename,
                                         '-t', str(sex_thresh),
                                         '-w', str(fwhm),
                                         '-m', str(maxcount)]))

            if os.access('weight.fits', os.R_OK):
                os.unlink('weight.fits')

            if not dry_run:
                for ext in ['obj.jmp', 'obj.matt']:
                    obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext,
                                              prefix=prefix)
                    obj_filename = basename + "." + ext
                    count = 0
                    with open(obj_filename, 'r'):
                        while True:
                            try:
                                count += 1
                                logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri))
                                storage.copy(obj_filename, obj_uri)
                                break
                            except Exception as ex:
                                if count > 10:
                                    raise ex
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task, prefix, expnum, version, ccd, status=message)
示例#23
0
文件: combine.py 项目: drusk/MOP
def combine(expnum, ccd, prefix=None, type='p', field=None, measure3=MEASURE3 ):

    if field is None:
        field=str(expnum)

    if prefix is not None and len(prefix) > 0:
        field = "%s_%s" % ( prefix, field ) 
    field += "_%s" % ( str(ccd))

    for ext in ['moving.matt','moving.jmp']:
        fname = storage.get_image(expnum,
                                  ccd=ccd,
                                  prefix=prefix,
                                  version=type, ext=ext)
    if prefix is not None and len(prefix) > 0:
        planted = storage.get_image('Object',
                                    subdir=str(expnum)+"/ccd%s" % (
            str(ccd).zfill(2)),
                                    version='',
                                    ext='planted')
    else:
        prefix = ''

    base_image = os.path.basename( 
        storage.get_uri(expnum,
                        ccd=ccd,
                        prefix=prefix,
                        version=type,
                        ext=None))


    
    cmd_args = ['comb-list', prefix+str(expnum)+type+str(ccd).zfill(2)]
    util.exec_prog(cmd_args)
    ext_list = ['cands.comb']
    if prefix is not None and len(prefix) > 0 :
        ext_list.extend( [ 'jmp.missed', 'matt.missed',
                            'jmp.found', 'matt.found',
                            'comb.missed', 'comb.found' ] )
                         

    for ext in ext_list:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              prefix=prefix,
                              version=type,
                              ext=ext)
        filename = os.path.basename(uri)
        if not os.access(filename,os.R_OK):
            logging.critical("No %s file" % (filename))
            continue
        vospace_name = "%s.%s" % ( field, ext )
        logging.info("%s -> %s" % ( filename, os.path.join(measure3, vospace_name)))
        storage.copy(filename, os.path.join(measure3, vospace_name))

    base_name = prefix+str(expnum)+type+str(ccd).zfill(2)
    cands_file = base_name+'.cands.comb'
    
    if not os.access(cands_file,os.R_OK):
        nocands_file = ( prefix+
                         str(expnum)+
                         type+
                         str(ccd).zfill(2)+
                         '.no_candidates' )
        open(nocands_file, 'w').close()
        vospace_name = "%s.no_candidates" % ( field ) 
        storage.copy(nocands_file,os.path.join(measure3, vospace_name))

        return storage.SUCCESS


    # get the images we need to compute x/y ra/dec transforms
    cands_file = mop_file.Parser().parse(cands_file)
    for file_id in cands_file.header.file_ids:
        rec_no=cands_file.header.file_ids.index(file_id)
        storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no],
                          ccd=ccd,
                          version=type,
                          prefix=prefix,
                          ext='fits')

    cmd_args = ['measure3', prefix+str(expnum)+type+str(ccd).zfill(2)]
    logging.info("Running measure3")
    util.exec_prog(cmd_args)
    
    filename=base_name+".measure3.cands.astrom"
    vospace_filename = "%s.measure3.cands.astrom" % ( field)
    storage.copy(filename, os.path.join(measure3,vospace_filename))
    return storage.SUCCESS
示例#24
0
文件: plant.py 项目: ijiraq/MOP
def plant(expnums,
          ccd,
          rmin,
          rmax,
          ang,
          width,
          number=10,
          mmin=21.0,
          mmax=25.5,
          version='s',
          dry_run=False,
          force=True):
    """Plant artificial sources into the list of images provided.

    @param dry_run: don't push results to VOSpace.
    @param width: The +/- range of angles of motion
    @param ang: The mean angle of motion to add sources
    @param rmax: The maximum rate of motion to add sources at (''/hour)
    @param rmin: The minimum rate of motion to add sources at (''/hour)
    @param expnums: list of MegaPrime exposure numbers to add artificial KBOs to
    @param ccd: which ccd to work on.
    @param mmax: Maximum magnitude to plant sources at
    @param version: Add sources to the 'o', 'p' or 's' images
    @param mmin: Minimum magnitude to plant sources at
    @param number: number of sources to plant.
    @param force: Run, even if we already succeeded at making a fk image.
    """
    message = storage.SUCCESS

    if storage.get_status(task, "", expnums[0], version, ccd) and not force:
        logging.info("{} completed successfully for {}{}{}{:02d}".format(
            task, "", expnums[0], version, ccd))
        return

    with storage.LoggingManager(task, "", expnums[0], ccd, version, dry_run):
        try:
            # Construct a list of artificial KBOs with positions in the image
            # and rates of motion within the bounds given by the caller.
            filename = storage.get_image(expnums[0], ccd=ccd, version=version)
            header = fits.open(filename)[0].header
            bounds = util.get_pixel_bounds_from_datasec_keyword(
                header.get('DATASEC', '[33:2080,1:4612]'))

            # generate a set of artificial KBOs to add to the image.
            kbos = KBOGenerator.get_kbos(n=number,
                                         rate=(rmin, rmax),
                                         angle=(ang - width, ang + width),
                                         mag=(mmin, mmax),
                                         x=(bounds[0][0], bounds[0][1]),
                                         y=(bounds[1][0], bounds[1][1]),
                                         filename='Object.planted')

            for expnum in expnums:
                filename = storage.get_image(expnum, ccd, version)
                psf = storage.get_file(expnum, ccd, version, ext='psf.fits')
                plant_kbos(filename, psf, kbos,
                           get_shifts(expnum, ccd, version), "fk")

            if dry_run:
                return
            uri = storage.get_uri('Object',
                                  ext='planted',
                                  version='',
                                  subdir=f"{expnums[0]}/ccd{int(ccd):02d}")

            storage.copy('Object.planted', uri)
            for expnum in expnums:
                uri = storage.get_uri(expnum,
                                      ccd=ccd,
                                      version=version,
                                      ext='fits',
                                      prefix='fk')
                filename = os.path.basename(uri)
                storage.copy(filename, uri)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        storage.set_status(task, "", expnums[0], version, ccd, status=message)

    return
示例#25
0
def plant(expnums, ccd, rmin, rmax, ang, width, version='s'):
    '''run the plant script on this combination of exposures'''

    ptf = open('proc-these-files', 'w')
    ptf.write("# Files to be planted and search\n")
    ptf.write("# image fwhm plant\n")

    for expnum in expnums:
        fwhm = storage.get_fwhm(expnum, ccd)
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        ptf.write("%s %3.1f YES\n" % (filename[0:-5], fwhm))
        for ext in [
                'apcor', 'obj.jmp', 'trans.jmp', 'psf.fits', 'mopheader',
                'phot', 'zeropoint.used'
        ]:
            apcor = storage.get_image(expnum, ccd=ccd, version='s', ext=ext)

    ptf.close()

    cmd_args = [
        'plant.csh', os.curdir,
        str(rmin),
        str(rmax),
        str(ang),
        str(width)
    ]

    util.exec_prog(cmd_args)

    if args.dryrun:
        # Don't push back to VOSpace
        return

    uri = storage.get_uri('Object',
                          ext='planted',
                          version='',
                          subdir=str(expnums[0]) + "/ccd%s" %
                          (str(ccd).zfill(2)))
    storage.copy('Object.planted', uri)
    uri = os.path.join(os.path.dirname(uri), 'shifts')
    storage.copy('shifts', uri)
    for expnum in expnums:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              version=version,
                              ext='fits',
                              prefix='fk')
        filename = os.path.basename(uri)
        storage.copy(filename, uri)

        for ext in [
                'mopheader', 'psf.fits', 'fwhm', 'apcor', 'zeropoint.used',
                'trans.jmp'
        ]:
            storage.delete(expnum, ccd, 's', ext, prefix='fk')
            storage.vlink(expnum,
                          ccd,
                          'p',
                          ext,
                          expnum,
                          ccd,
                          's',
                          ext,
                          l_prefix='fk')

    return
示例#26
0
文件: step3.py 项目: sevenlin123/MOP
def step3(expnums,
          ccd,
          version,
          rate_min,
          rate_max,
          angle,
          width,
          field=None,
          prefix=None):
    '''run the actual step2  on the given exp/ccd combo'''

    jmp_args = ['step3jmp']
    matt_args = ['step3matt']

    idx = 0
    cmd_args = []
    for expnum in expnums:
        idx += 1
        for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']:
            filename = storage.get_image(expnum,
                                         ccd=ccd,
                                         version=version,
                                         ext=ext,
                                         prefix=prefix)
        image = os.path.splitext(
            os.path.splitext(os.path.basename(filename))[0])[0]
        cmd_args.append('-f%d' % (idx))
        cmd_args.append(image)

    cmd_args.extend([
        '-rn',
        str(rate_min), '-rx',
        str(rate_max), '-a',
        str(angle), '-w',
        str(width)
    ])
    jmp_args.extend(cmd_args)
    matt_args.extend(cmd_args)
    util.exec_prog(jmp_args)
    util.exec_prog(matt_args)

    if field is None:
        field = str(expnums[0])
    storage.mkdir(
        os.path.dirname(
            storage.get_uri(field,
                            ccd=ccd,
                            version=version,
                            ext=ext,
                            prefix=prefix)))

    for ext in ['moving.jmp', 'moving.matt']:
        uri = storage.get_uri(field,
                              ccd=ccd,
                              version=version,
                              ext=ext,
                              prefix=prefix)
        filename = '%s%d%s%s.%s' % (prefix, expnums[0], version,
                                    str(ccd).zfill(2), ext)
        storage.copy(filename, uri)

    return
示例#27
0
def align(expnums, ccd, version='s', prefix='', dry_run=False, force=True):
    """Create a 'shifts' file that transforms the space/flux/time scale of all
    images to the first image.

    This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs.
    The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations
    while accounting for motions of sources with time.

    :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to,
                    the first frame in the list is the reference.
    :param ccd: which ccd to work on.
    :param prefix: put this string in front of expnum when looking for exposure, normally '' or 'fk'
    :param force: When true run task even if this task is recorded as having succeeded
    :param version: Add sources to the 'o', 'p' or 's' images
    :param dry_run: don't push results to VOSpace.
    """
    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnums[0], version,
                          ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(
            task, prefix, expnums[0], version, ccd))
        return

    # Get the images and supporting files that we need from the VOSpace area
    # get_image and get_file check if the image/file is already on disk.
    # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling.

    # some dictionaries to hold the various scale
    pos = {}
    apcor = {}
    mags = {}
    zmag = {}
    mjdates = {}

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version,
                                dry_run):
        try:
            for expnum in expnums:
                filename = storage.get_image(expnum, ccd=ccd, version=version)
                zmag[expnum] = storage.get_zeropoint(expnum,
                                                     ccd,
                                                     prefix=None,
                                                     version=version)
                mjdates[expnum] = float(
                    fits.open(filename)[0].header.get('MJD-OBS'))
                apcor[expnum] = [
                    float(x) for x in open(
                        storage.get_file(
                            expnum,
                            ccd=ccd,
                            version=version,
                            ext=storage.APCOR_EXT)).read().split()
                ]
                keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2']
                # load the .trans.jmp values into a 'wcs' like dictionary.
                # .trans.jmp maps current frame to reference frame in pixel coordinates.
                # the reference frame of all the frames supplied must be the same.
                shifts = dict(
                    list(
                        zip(keys, [
                            float(x) for x in open(
                                storage.get_file(
                                    expnum,
                                    ccd=ccd,
                                    version=version,
                                    ext='trans.jmp')).read().split()
                        ])))
                shifts['crpix1'] = 0.0
                shifts['crpix2'] = 0.0
                # now create a wcs object based on those transforms, this wcs links the current frame's
                # pixel coordinates to the reference frame's pixel coordinates.
                w = get_wcs(shifts)

                # get the PHOT file that was produced by the mkpsf routine
                logging.debug("Reading .phot file {}".format(expnum))
                phot = ascii.read(storage.get_file(expnum,
                                                   ccd=ccd,
                                                   version=version,
                                                   ext='phot'),
                                  format='daophot')

                # compute the small-aperture magnitudes of the stars used in the PSF
                logging.debug("Running phot on {}".format(filename))
                mags[expnum] = daophot.phot(filename,
                                            phot['XCENTER'],
                                            phot['YCENTER'],
                                            aperture=apcor[expnum][0],
                                            sky=apcor[expnum][1] + 1,
                                            swidth=apcor[expnum][0],
                                            zmag=zmag[expnum])

                # covert the x/y positions to positions in Frame 1 based on the trans.jmp values.
                logging.debug(
                    "Doing the XY translation to refrence frame: {}".format(w))
                (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"],
                                         mags[expnum]["YCENTER"], 1)
                pos[expnum] = numpy.transpose([x, y])
                # match this exposures PSF stars position against those in the first image of the set.
                logging.debug("Matching lists")
                idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum])

                # compute the magnitdue offset between the current frame and the reference.
                dmags = numpy.ma.array(
                    mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] -
                    (mags[expnum]["MAG"][idx1] - apcor[expnum][2]),
                    mask=idx1.mask)
                dmags.sort()
                # logging.debug("Computed dmags between input and reference: {}".format(dmags))
                error_count = 0

                error_count += 1
                logging.debug("{}".format(error_count))

                # compute the median and determine if that shift is small compared to the scatter.
                try:
                    midx = int(
                        numpy.sum(numpy.any([~dmags.mask], axis=0)) / 2.0)
                    dmag = float(dmags[midx])
                    logging.debug("Computed a mag delta of: {}".format(dmag))
                except Exception as e:
                    logging.error(str(e))
                    logging.error(
                        "Failed to compute mag offset between plant and found using: {}"
                        .format(dmags))
                    dmag = 99.99

                error_count += 1
                logging.debug("{}".format(error_count))

                try:
                    if math.fabs(dmag) > 3 * (dmags.std() + 0.01):
                        logging.warning(
                            "Magnitude shift {} between {} and {} is large: {}"
                            .format(dmag, expnums[0], expnum, shifts))
                except Exception as e:
                    logging.error(str(e))

                error_count += 1
                logging.debug("{}".format(error_count))

                shifts['dmag'] = dmag
                shifts['emag'] = dmags.std()
                shifts['nmag'] = len(dmags.mask) - dmags.mask.sum()
                shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum]
                shift_file = os.path.basename(
                    storage.get_uri(expnum, ccd, version, '.shifts'))

                error_count += 1
                logging.debug("{}".format(error_count))

                try:
                    fh = open(shift_file, 'w')
                    fh.write(
                        json.dumps(shifts,
                                   sort_keys=True,
                                   indent=4,
                                   separators=(',', ': '),
                                   cls=NpEncoder))
                    fh.write('\n')
                    fh.close()
                except Exception as e:
                    logging.error(
                        "Creation of SHIFTS file failed while trying to write: {}"
                        .format(shifts))
                    raise e

                error_count += 1
                logging.debug("{}".format(error_count))

                if not dry_run:
                    storage.copy(
                        shift_file,
                        storage.get_uri(expnum, ccd, version, '.shifts'))
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task,
                               prefix,
                               expnum,
                               version,
                               ccd,
                               status=message)
示例#28
0
文件: snr.py 项目: stephengwyn/MOP
 def uri(self):
     return storage.get_uri(self.expnum, self.ccd, ext='apcor')
示例#29
0
文件: step1.py 项目: OSSOS/MOP
def step1(expnum,
          ccd,
          prefix='',
          version='p',
          sex_thresh=_SEX_THRESHOLD,
          wave_thresh=_WAVE_THRESHOLD,
          maxcount=_MAX_COUNT,
          dry_run=False):
    """run the actual step1jmp/matt codes.

    expnum: the CFHT expousre to process
    ccd: which ccd in the mosaic to process
    fwhm: the image quality, FWHM, of the image.  In pixels.
    sex_thresh: the detection threhold to run sExtractor at
    wave_thresh: the detection threshold for wavelet
    maxcount: saturation level

    """

    storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader')
    filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
    fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version)
    basename = os.path.splitext(filename)[0]

    logging.info(util.exec_prog(['step1jmp',
                                 '-f', basename,
                                 '-t', str(wave_thresh),
                                 '-w', str(fwhm),
                                 '-m', str(maxcount)]))

    obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.jmp',
                              prefix=prefix)
    obj_filename = basename + ".obj.jmp"

    if not dry_run:
        storage.copy(obj_filename, obj_uri)

    ## for step1matt we need the weight image
    hdulist = fits.open(filename)
    flat_name = hdulist[0].header.get('FLAT', 'weight.fits')
    parts = os.path.splitext(flat_name)
    if parts[1] == '.fz':
        flat_name = os.path.splitext(parts[0])[0]
    else:
        flat_name = parts[0]
    try:
        flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='calibrators')
    except:
        flat_filename = storage.get_image(flat_name, ccd, version='', ext='fits', subdir='old_calibrators')

    if os.access('weight.fits', os.R_OK):
        os.unlink('weight.fits')

    if not os.access('weight.fits', os.R_OK):
        os.symlink(flat_filename, 'weight.fits')

    logging.info(util.exec_prog(['step1matt',
                                 '-f', basename,
                                 '-t', str(sex_thresh),
                                 '-w', str(fwhm),
                                 '-m', str(maxcount)]))

    if os.access('weight.fits', os.R_OK):
        os.unlink('weight.fits')

    obj_uri = storage.get_uri(expnum, ccd, version=version, ext='obj.matt',
                              prefix=prefix)
    obj_filename = basename + ".obj.matt"

    if not dry_run:
        storage.copy(obj_filename, obj_uri)

    return True
示例#30
0
        format(field), 'r') as infile:
    # it = ims.images

    with open(outfile, 'w') as ofile:
        ofile.write(
            'Expnum RA DEC MJD_middle Exptime_sec plant_xmin_px plant_xmax_px plant_ymin_px plant_ymax_px '
            'search_xmin_px search_xmax_px search_ymin_px search_ymax_px\n'.
            format())

    for triplet in infile.readlines():
        with open(outfile, 'a') as ofile:  # blank line between triplets
            ofile.write('{}'.format(triplet.split()[3]))

        for expnum in triplet.split()[0:3]:
            # expnum, ra, dec, obs_end, mjd_middle, exptime (seconds)
            header = storage.get_header(storage.get_uri(expnum))
            # print header
            # JM wants MJD_middle only: calculate it from the midpoint between the start and end
            mjd_mid = header['MJDATE'] + (header['MJDEND'] -
                                          header['MJDATE']) / 2.
            retval = [
                expnum, header['CRVAL1'], header['CRVAL2'], mjd_mid,
                header['EXPTIME']
            ]

            # now the area in which we planted TNOs for characterisation (hardwired in plant.csh's calls to kbo_gen)
            # chosen to match that in header keyword DATASEC = [33:2080,1:4612] / Imaging area of the entire CCD in
            # raw frame
            if field == 'E' or 'O':
                xmin = 66  # ya, we don't know why it got hardwired this way initially. 1.6% underplanting.
            else:  # spotted this by the time H and L etc were done. This was fixed for H/L to use DATASEC directly.
示例#31
0
def combine(expnum,
            ccd,
            prefix=None,
            file_type='p',
            field=None,
            measure3=MEASURE3,
            dry_run=False):
    if field is None:
        field = str(expnum)

    if prefix is not None and len(prefix) > 0:
        field = "%s_%s" % (prefix, field)
    field += "_%s%s" % (str(file_type), str(ccd))

    logging.info("Doing combine on field {}".format(field))

    for ext in ['moving.matt', 'moving.jmp']:
        storage.get_file(expnum,
                         ccd=ccd,
                         version=file_type,
                         ext=ext,
                         prefix=prefix)

    if prefix is not None and len(prefix) > 0:
        storage.get_file('Object',
                         version='',
                         ext='planted',
                         subdir=str(expnum) + "/ccd%s" % (str(ccd).zfill(2)))
    else:
        prefix = ''

    cmd_args = [
        'comb-list', prefix + str(expnum) + file_type + str(ccd).zfill(2)
    ]
    logging.info(str(cmd_args))
    logging.info(util.exec_prog(cmd_args))
    ext_list = ['cands.comb']
    if prefix is not None and len(prefix) > 0:
        ext_list.extend([
            'jmp.missed', 'matt.missed', 'jmp.found', 'matt.found',
            'comb.missed', 'comb.found'
        ])

    for ext in ext_list:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              prefix=prefix,
                              version=file_type,
                              ext=ext)
        filename = os.path.basename(uri)
        if not os.access(filename, os.R_OK):
            logging.critical("No %s file" % filename)
            continue
        vospace_name = "%s.%s" % (field, ext)
        if not dry_run:
            logging.info("%s -> %s" %
                         (filename, os.path.join(measure3, vospace_name)))
            storage.copy(filename, os.path.join(measure3, vospace_name))

    base_name = prefix + str(expnum) + file_type + str(ccd).zfill(2)
    cands_file = base_name + '.cands.comb'

    if not os.access(cands_file, os.R_OK):
        no_cands_file = (prefix + str(expnum) + file_type + str(ccd).zfill(2) +
                         '.no_candidates')
        open(no_cands_file, 'w').close()
        if not dry_run:
            vospace_name = "%s.no_candidates" % field
            storage.copy(no_cands_file, os.path.join(measure3, vospace_name))

        return storage.SUCCESS

    # get the images we need to compute x/y ra/dec transforms
    cands_file = mop_file.Parser().parse(cands_file)
    for file_id in cands_file.header.file_ids:
        rec_no = cands_file.header.file_ids.index(file_id)
        storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no],
                          ccd=ccd,
                          version=file_type,
                          ext='fits',
                          prefix=prefix)

    cmd_args = [
        'measure3', prefix + str(expnum) + file_type + str(ccd).zfill(2)
    ]
    logging.info("Running measure3")
    logging.info(util.exec_prog(cmd_args))

    if not dry_run:
        filename = base_name + ".measure3.cands.astrom"
        vospace_filename = "%s.measure3.cands.astrom" % field
        storage.copy(filename, os.path.join(measure3, vospace_filename))

    return storage.SUCCESS
示例#32
0
def plant(expnums,
          ccd,
          rmin,
          rmax,
          ang,
          width,
          number=10,
          version='s',
          dry_run=False):
    """Plant artificial sources into the list of images provided.

    :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to
    :param ccd: which ccd to work on.
    :param rmin: The minimum rate of motion to add sources at (''/hour)
    :param rmax: The maximum rate of motion to add sources at (''/hour)
    :param ang: The mean angle of motion to add sources
    :param width: The +/- range of angles of motion
    :param version: Add sources to the 'o', 'p' or 's' images
    :param dry_run: don't push results to VOSpace.
    """

    # Construct a list of artificial KBOs with positions in the image
    # and rates of motion within the bounds given by the caller.
    filename = storage.get_image(expnums[0], ccd=ccd, version=version)
    header = fits.open(filename)[0].header
    bounds = util.get_pixel_bounds_from_datasec_keyword(
        header.get('DATASEC', '[33:2080,1:4612]'))

    # generate a set of artifical KBOs to add to the image.
    kbos = Table(names=('x', 'y', 'mag', 'sky_rate', 'angle', 'id'))
    for kbo in KBOGenerator(n=number,
                            x=Range(bounds[0][0], bounds[0][1]),
                            y=Range(bounds[1][0], bounds[1][1]),
                            rate=Range(rmin, rmax),
                            angle=Range(ang - width, ang + width),
                            mag=Range(21.0, 25.0)):
        kbos.add_row(kbo)

    fd = open('Object.planted', 'w')
    fd.write("# ")
    kbos.write(fd, format='ascii.fixed_width', delimiter=None)
    fd.close()
    for expnum in expnums:
        filename = storage.get_image(expnum, ccd, version)
        psf = storage.get_file(expnum, ccd, version, ext='psf.fits')
        plant_kbos(filename, psf, kbos, get_shifts(expnum, ccd, version), "fk")

    if dry_run:
        return

    uri = storage.get_uri('Object',
                          ext='planted',
                          version='',
                          subdir=str(expnums[0]) + "/ccd%s" %
                          (str(ccd).zfill(2)))

    storage.copy('Object.planted', uri)
    for expnum in expnums:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              version=version,
                              ext='fits',
                              prefix='fk')
        filename = os.path.basename(uri)
        storage.copy(filename, uri)

    return
示例#33
0
def main():
    """Do the script."""
    parser = argparse.ArgumentParser(description='replace image header')
    parser.add_argument('--extname', help='name of extension to in header')
    parser.add_argument('expnum', type=str, help='exposure to update')
    parser.add_argument('-r',
                        '--replace',
                        action='store_true',
                        help='store modified image back to VOSpace?')
    parser.add_argument('-v', '--verbose', action='store_true')
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('--force',
                        action='store_true',
                        help="Re-run even if previous success recorded")
    parser.add_argument('--dbimages',
                        help="VOSpace DATA storage area.",
                        default="vos:OSSOS/dbimages")

    args = parser.parse_args()
    task = util.task()
    dependency = 'preproc'
    prefix = ""

    storage.DBIMAGES = args.dbimages

    level = logging.CRITICAL
    message_format = "%(message)s"
    if args.verbose:
        level = logging.INFO
    if args.debug:
        level = logging.DEBUG
        message_format = "%(module)s %(funcName)s %(lineno)s %(message)s"
    logging.basicConfig(level=level, format=message_format)
    storage.set_logger(task, prefix, args.expnum, None, None, False)

    message = storage.SUCCESS
    expnum = args.expnum

    exit_status = 0
    try:
        # skip if already succeeded and not in force mode
        if storage.get_status(task, prefix, expnum, "p",
                              36) and not args.force:
            logging.info("Already updated, skipping")
            sys.exit(0)

        image_hdulist = storage.get_image(args.expnum, return_file=False)
        ast_hdulist = storage.get_astheader(expnum, ccd=None)

        run_update_header(image_hdulist, ast_hdulist)
        image_filename = os.path.basename(storage.get_uri(expnum))
        image_hdulist.writeto(image_filename)
        if args.replace:
            dest = storage.dbimages_uri(expnum)
            storage.copy(image_filename, dest)
            storage.set_status('update_header', "", expnum, 'p', 36, message)
    except Exception as e:
        message = str(e)
        if args.replace:
            storage.set_status(task, prefix, expnum, 'p', 36, message)
        exit_status = message
        logging.error(message)

    return exit_status
示例#34
0
def step1(expnum,
          ccd,
          prefix='',
          version='p',
          fwhm=4,
          sex_thresh=1.3,
          wave_thresh=2.7,
          maxcount=30000):
    """run the actual step1jmp/matt codes.

    expnum: the CFHT expousre to process
    ccd: which ccd in the mosaic to process
    fwhm: the image quality, FWHM, of the image.  In pixels.
    sex_thresh: the detection threhold to run sExtractor at
    wave_thresh: the detection threshold for wavelet
    maxcount: saturation level

    """

    filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
    mopheader = storage.get_image(expnum,
                                  ccd,
                                  version=version,
                                  ext='mopheader',
                                  prefix=prefix)
    fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version)
    basename = os.path.splitext(filename)[0]

    outfile = util.exec_prog([
        'step1jmp', '-f', basename, '-t',
        str(wave_thresh), '-w',
        str(fwhm), '-m',
        str(maxcount)
    ])

    obj_uri = storage.get_uri(expnum,
                              ccd,
                              version=version,
                              ext='obj.jmp',
                              prefix=prefix)
    obj_filename = basename + ".obj.jmp"

    storage.copy(obj_filename, obj_uri)

    ## for step1matt we need the weight image
    flat_name = fits.open(filename)[0].header['FLAT']
    flat_name = flat_name[0:-5]
    flat_filename = storage.get_image(flat_name,
                                      ccd,
                                      version='',
                                      ext='fits',
                                      subdir='calibrators',
                                      rescale=False)
    if not os.access('weight.fits', os.R_OK):
        os.symlink(flat_filename, 'weight.fits')
    outfile = util.exec_prog([
        'step1matt', '-f', basename, '-t',
        str(sex_thresh), '-w',
        str(fwhm), '-m',
        str(maxcount)
    ])

    obj_uri = storage.get_uri(expnum,
                              ccd,
                              version=version,
                              ext='obj.matt',
                              prefix=prefix)
    obj_filename = basename + ".obj.matt"

    storage.copy(obj_filename, obj_uri)

    return True
示例#35
0
文件: combine.py 项目: OSSOS/MOP
def run(expnum, ccd, prefix=None, version='p', field=None,
        measure3_dir=storage.MEASURE3, dry_run=False, force=False):

    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnum, version, ccd) and not force:
        logging.info(
            "{} completed successfully for {} {} {} {}".format(task, prefix,
                                                               expnum, version,
                                                               ccd))
        return

    with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run):
        try:
            if not storage.get_status(dependency, prefix, expnum, version, ccd):
                raise IOError(35, "Cannot start {} as {} not yet completed "
                                  "for {}{}{}{:02d}".format(task, dependency,
                                                            prefix, expnum,
                                                            version, ccd))
            if field is None:
                field = str(expnum)

            if prefix is not None and len(prefix) > 0:
                field = "%s_%s" % (prefix, field)
            field += "_%s%s" % (str(version), str(ccd))

            logging.info("Doing combine on field {}".format(field))

            for ext in ['moving.matt', 'moving.jmp']:
                storage.get_file(expnum, ccd=ccd, version=version, ext=ext,
                                 prefix=prefix)

            # Get the list of objects planted into the field if prefix='fk'
            if prefix == 'fk':
                storage.get_file('Object',
                                 version='',
                                 ext='planted',
                                 subdir=str(expnum) + "/ccd%s" % (
                                     str(ccd).zfill(2)))
            else:
                prefix = ''

            cmd_args = ['comb-list',
                        prefix + str(expnum) + version + str(ccd).zfill(2)]
            logging.info(str(cmd_args))
            logging.info(util.exec_prog(cmd_args))

            # things to copy back to VOSpace, if this is an 'fk' image
            # then we have missed and found files too.
            ext_list = ['cands.comb']
            if prefix == 'fk':
                ext_list.extend(['jmp.missed', 'matt.missed',
                                 'jmp.found', 'matt.found',
                                 'comb.missed', 'comb.found'])

            for ext in ext_list:
                uri = storage.get_uri(expnum,
                                      ccd=ccd,
                                      prefix=prefix,
                                      version=version,
                                      ext=ext)
                filename = os.path.basename(uri)
                if not os.access(filename, os.R_OK):
                    logging.critical("No %s file" % filename)
                    continue
                vospace_name = "%s.%s" % (field, ext)
                if not dry_run:
                    logging.info("%s -> %s" % (
                        filename, os.path.join(measure3_dir, vospace_name)))
                    storage.copy(filename, os.path.join(measure3_dir, vospace_name))

            base_name = prefix + str(expnum) + version + str(ccd).zfill(2)
            cands_file = base_name + '.cands.comb'

            if not os.access(cands_file, os.R_OK):
                no_cands_file = (prefix +
                                 str(expnum) +
                                 version +
                                 str(ccd).zfill(2) +
                                 '.no_candidates')
                open(no_cands_file, 'w').close()
                if not dry_run:
                    vospace_name = "%s.no_candidates" % field
                    storage.copy(no_cands_file,
                                 os.path.join(measure3_dir, vospace_name))

            else:
                
                measure3.run(base_name, storage.DBIMAGES)

                if not dry_run:
                    filename = base_name + ".measure3.cands.astrom"
                    vospace_filename = "%s.measure3.cands.astrom" % field
                    storage.copy(filename, os.path.join(measure3_dir, vospace_filename))

        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task, prefix, expnum, version, ccd,
                               status=message)
示例#36
0
with open('planning/triplet_and_processing_notes/{}_15B_discovery_expnums.txt'.format(field), 'r') as infile:
    # it = ims.images

    with open(outfile, 'w') as ofile:
        ofile.write(
            'Expnum RA DEC MJD_middle Exptime_sec plant_xmin_px plant_xmax_px plant_ymin_px plant_ymax_px '
            'search_xmin_px search_xmax_px search_ymin_px search_ymax_px\n'.format())

    for triplet in infile.readlines():
        with open(outfile, 'a') as ofile:  # blank line between triplets
            ofile.write('{}'.format(triplet.split()[3]))

        for expnum in triplet.split()[0:3]:
            # expnum, ra, dec, obs_end, mjd_middle, exptime (seconds)
            header = storage.get_header(storage.get_uri(expnum))
            # print header
            # JM wants MJD_middle only: calculate it from the midpoint between the start and end
            mjd_mid = header['MJDATE'] + (header['MJDEND'] - header['MJDATE']) / 2.
            retval = [expnum, header['CRVAL1'], header['CRVAL2'], mjd_mid, header['EXPTIME']]

            # now the area in which we planted TNOs for characterisation (hardwired in plant.csh's calls to kbo_gen)
            # chosen to match that in header keyword DATASEC = [33:2080,1:4612] / Imaging area of the entire CCD in
            # raw frame
            if field == 'E' or 'O':
                xmin = 66  # ya, we don't know why it got hardwired this way initially. 1.6% underplanting.
            else:  # spotted this by the time H and L etc were done. This was fixed for H/L to use DATASEC directly.
                xmin = 33
            retval.append(xmin)
            # xmax, ymin and ymax were all correctly taken from DATASEC
            retval.append(2080)
示例#37
0
文件: align.py 项目: stephengwyn/MOP
def align(expnums, ccd, version='s', dry_run=False):
    """Create a 'shifts' file that transforms the space/flux/time scale of all images to the first image.

    This function relies on the .fwhm, .trans.jmp, .phot and .zeropoint.used files for inputs.
    The scaling we are computing here is for use in planting sources into the image at the same sky/flux locations
    while accounting for motions of sources with time.

    :param expnums: list of MegaPrime exposure numbers to add artificial KBOs to,
                    the first frame in the list is the reference.
    :param ccd: which ccd to work on.
    :param version: Add sources to the 'o', 'p' or 's' images
    :param dry_run: don't push results to VOSpace.
    """

    # Get the images and supporting files that we need from the VOSpace area
    # get_image and get_file check if the image/file is already on disk.
    # re-computed fluxes from the PSF stars and then recompute x/y/flux scaling.

    # some dictionaries to hold the various scale
    pos = {}
    apcor = {}
    mags = {}
    zmag = {}
    mjdates = {}

    for expnum in expnums:
        filename = storage.get_image(expnum, ccd=ccd, version=version)
        zmag[expnum] = storage.get_zeropoint(expnum,
                                             ccd,
                                             prefix=None,
                                             version=version)
        mjdates[expnum] = float(fits.open(filename)[0].header.get('MJD-OBS'))
        apcor[expnum] = [
            float(x) for x in open(
                storage.get_file(
                    expnum, ccd=ccd, version=version,
                    ext=storage.APCOR_EXT)).read().split()
        ]
        keys = ['crval1', 'cd1_1', 'cd1_2', 'crval2', 'cd2_1', 'cd2_2']
        # load the .trans.jmp values into a 'wcs' like dictionary.
        # .trans.jmp maps current frame to reference frame in pixel coordinates.
        # the reference frame of all the frames supplied must be the same.
        shifts = dict(
            zip(keys, [
                float(x) for x in open(
                    storage.get_file(
                        expnum, ccd=ccd, version=version,
                        ext='trans.jmp')).read().split()
            ]))
        shifts['crpix1'] = 0.0
        shifts['crpix2'] = 0.0
        # now create a wcs object based on those transforms, this wcs links the current frame's
        # pixel coordinates to the reference frame's pixel coordinates.
        w = get_wcs(shifts)

        # get the PHOT file that was produced by the mkpsf routine
        phot = ascii.read(storage.get_file(expnum,
                                           ccd=ccd,
                                           version=version,
                                           ext='phot'),
                          format='daophot')

        # compute the small-aperture magnitudes of the stars used in the PSF
        mags[expnum] = daophot.phot(filename,
                                    phot['XCENTER'],
                                    phot['YCENTER'],
                                    aperture=apcor[expnum][0],
                                    sky=apcor[expnum][1] + 1,
                                    swidth=apcor[expnum][0],
                                    zmag=zmag[expnum])

        # covert the x/y positions to positions in Frame 1 based on the trans.jmp values.
        (x, y) = w.wcs_pix2world(mags[expnum]["XCENTER"],
                                 mags[expnum]["YCENTER"], 1)
        pos[expnum] = numpy.transpose([x, y])
        # match this exposures PSF stars position against those in the first image of the set.
        idx1, idx2 = util.match_lists(pos[expnums[0]], pos[expnum])

        # compute the magnitdue offset between the current frame and the reference.
        dmags = numpy.ma.array(mags[expnums[0]]["MAG"] - apcor[expnums[0]][2] -
                               (mags[expnum]["MAG"][idx1] - apcor[expnum][2]),
                               mask=idx1.mask)
        dmags.sort()

        # compute the median and determine if that shift is small compared to the scatter.
        dmag = dmags[int(len(dmags) / 2.0)]
        if math.fabs(dmag) > 3 * (dmags.std() + 0.01):
            logging.warn(
                "Magnitude shift {} between {} and {} is large: {}".format(
                    dmag, expnums[0], expnum, shifts[expnum]))
        shifts['dmag'] = dmag
        shifts['emag'] = dmags.std()
        shifts['nmag'] = len(dmags.mask) - dmags.mask.sum()
        shifts['dmjd'] = mjdates[expnums[0]] - mjdates[expnum]
        shift_file = os.path.basename(
            storage.get_uri(expnum, ccd, version, '.shifts'))
        fh = open(shift_file, 'w')
        fh.write(
            json.dumps(shifts,
                       sort_keys=True,
                       indent=4,
                       separators=(',', ': ')))
        fh.write('\n')
        fh.close()
        if not dry_run:
            storage.copy(
                shift_file,
                os.path.basename(
                    storage.get_uri(expnum, ccd, version, '.shifts')))
示例#38
0
			print fwhm_file
			fwhm = float(storage.vospace.open(fwhm_file,view='data').read())
		# now can we work with it?
		if fwhm is not None:
			iq = float(fwhm)*0.1850  # plate scale is 0.1850 arcsec/pixel
			header_extract['iq_ossos'] = iq
		else:
			header_extract['iq_ossos'] = fwhm  # HACKED FOR NOW, WILL BE ADJUSTED LATER

	except Exception, e:
		header_extract['iq_ossos'] = None # HACKED FOR NOW, FIX LATER

	try:
		zeropt = storage.get_tag(image, 'zeropoint_22')
		if zeropt is None:
			zeropt_file = storage.get_uri(image, ccd=22, ext='zeropoint.used') # hfile+'/ccd22/'+image+'p22.zeropoint.used'  # the standard chip
			zeropt = storage.vospace.open(zeropt_file,view='data').read()
		# now let's try	
		if zeropt is not None:
			header_extract['zeropt'] = float(zeropt)

		else:
			header_extract['zeropt'] = zeropt  # HACKED FOR NOW, WILL BE ADJUSTED LATER

	except Exception, e:
		header_extract['zeropt'] = None  # HACKED: FIX LATER

	return header_extract


def retrieve_processed_images(ims):
示例#39
0
文件: snr.py 项目: OSSOS/MOP
 def uri(self):
     return storage.get_uri(self.expnum, self.ccd, ext='apcor')
示例#40
0
文件: step1.py 项目: OSSOS/MOP
def run(expnum,
        ccd,
        prefix='',
        version='p',
        sex_thresh=_SEX_THRESHOLD,
        wave_thresh=_WAVE_THRESHOLD,
        maxcount=_MAX_COUNT,
        dry_run=False, 
        force=True):
    """run the actual step1jmp/matt codes.

    expnum: the CFHT expousre to process
    ccd: which ccd in the mosaic to process
    fwhm: the image quality, FWHM, of the image.  In pixels.
    sex_thresh: the detection threhold to run sExtractor at
    wave_thresh: the detection threshold for wavelet
    maxcount: saturation level

    """
    message = storage.SUCCESS

    if storage.get_status(task, prefix, expnum, version, ccd) and not force:
        logging.info("{} completed successfully for {} {} {} {}".format(task, prefix, expnum, version, ccd))
        return
        
    with storage.LoggingManager(task, prefix, expnum, ccd, version, dry_run):
        try:        
            if not storage.get_status(dependency, prefix, expnum, version, ccd):
                raise IOError(35, "Cannot start {} as {} not yet completed for {}{}{}{:02d}".format(
                    task, dependency, prefix, expnum, version, ccd))
            logging.info("Retrieving imaging and input parameters from VOSpace")    
            storage.get_file(expnum, ccd, prefix=prefix, version=version, ext='mopheader')
            filename = storage.get_image(expnum, ccd, version=version, prefix=prefix)
            fwhm = storage.get_fwhm(expnum, ccd, prefix=prefix, version=version)
            basename = os.path.splitext(filename)[0]

            _get_weight_map(filename, ccd)
            
            logging.info("Launching step1jmp")
            logging.info(util.exec_prog(['step1jmp',
                                         '-f', basename,
                                         '-t', str(wave_thresh),
                                         '-w', str(fwhm),
                                         '-m', str(maxcount)]))

            logging.info(util.exec_prog(['step1matt',
                                 '-f', basename,
                                 '-t', str(sex_thresh),
                                 '-w', str(fwhm),
                                 '-m', str(maxcount)]))

            if os.access('weight.fits', os.R_OK):
                os.unlink('weight.fits')
            
            if not dry_run:
                for ext in ['obj.jmp', 'obj.matt']:
                    obj_uri = storage.get_uri(expnum, ccd, version=version, ext=ext,
                                              prefix=prefix)
                    obj_filename = basename + "." + ext
                    count = 0
                    with open(obj_filename, 'r'):
                      while True:
                        try:
                            count += 1
                            logging.info("Attempt {} to copy {} -> {}".format(count, obj_filename, obj_uri))
                            storage.copy(obj_filename, obj_uri)
                            break
                        except Exception as ex:
                            if count > 10:
                                raise ex
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task, prefix, expnum, version, ccd, status=message)
示例#41
0
def run(expnums,
        ccd,
        version,
        rate_min,
        rate_max,
        angle,
        width,
        field=None,
        prefix=None,
        dry_run=False,
        force=False):
    """run the actual step2  on the given exp/ccd combo"""

    jmp_args = ['step3jmp']
    matt_args = ['step3matt']

    if storage.get_status(task, prefix, expnums[0], version=version,
                          ccd=ccd) and not force:
        logging.info("{} completed successfully for {}{}{}{:02d}".format(
            task, prefix, expnums[0], version, ccd))
        return

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version,
                                dry_run):
        try:
            if not storage.get_status(
                    dependency, prefix, expnums[0], version=version, ccd=ccd):
                raise IOError(
                    35, "Cannot start {} as {} not yet completed {}{}{}{:02d}".
                    format(task, dependency, prefix, expnums[0], version, ccd))
            # Default message is success, message gets overwritten with failure messages.
            message = storage.SUCCESS

            idx = 0
            cmd_args = []
            for expnum in expnums:
                idx += 1
                for ext in ['unid.jmp', 'unid.matt', 'trans.jmp']:
                    storage.get_file(expnum,
                                     ccd=ccd,
                                     version=version,
                                     ext=ext,
                                     prefix=prefix)
                image = os.path.splitext(
                    os.path.basename(
                        storage.get_uri(expnum,
                                        ccd,
                                        version=version,
                                        prefix=prefix)))[0]
                cmd_args.append('-f%d' % idx)
                cmd_args.append(image)

            cmd_args.extend([
                '-rn',
                str(rate_min), '-rx',
                str(rate_max), '-a',
                str(angle), '-w',
                str(width)
            ])
            jmp_args.extend(cmd_args)
            matt_args.extend(cmd_args)
            logging.info(util.exec_prog(jmp_args))
            logging.info(util.exec_prog(matt_args))

            if dry_run:
                return

            if field is None:
                field = str(expnums[0])

            # Make sure a dbimages destination exists for this file.
            storage.mkdir(
                os.path.dirname(
                    storage.get_uri(field,
                                    ccd=ccd,
                                    version=version,
                                    prefix=prefix)))

            for ext in ['moving.jmp', 'moving.matt']:
                uri = storage.get_uri(field,
                                      ccd=ccd,
                                      version=version,
                                      ext=ext,
                                      prefix=prefix)
                filename = '%s%d%s%s.%s' % (prefix, expnums[0], version,
                                            str(ccd).zfill(2), ext)
                storage.copy(filename, uri)

        except Exception as ex:
            message = str(ex)
            logging.error(message)

        storage.set_status(task,
                           prefix,
                           expnums[0],
                           version=version,
                           ccd=ccd,
                           status=message)

    return
示例#42
0
def main():
    """Do the script."""
    parser = argparse.ArgumentParser(
        description='replace image header')
    parser.add_argument('--extname',
                        help='name of extension to in header')
    parser.add_argument('expnum', type=str,
                        help='exposure to update')
    parser.add_argument('-r', '--replace',
                        action='store_true',
                        help='store modified image back to VOSpace?')
    parser.add_argument('-v', '--verbose', action='store_true')
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('--force', action='store_true', help="Re-run even if previous success recorded")
    parser.add_argument('--dbimages', help="VOSpace DATA storage area.", default="vos:OSSOS/dbimages")

    args = parser.parse_args()
    task = util.task()
    dependency = 'preproc'
    prefix = ""

    storage.DBIMAGES = args.dbimages

    level = logging.CRITICAL
    message_format = "%(message)s"
    if args.verbose:
        level = logging.INFO
    if args.debug:
        level = logging.DEBUG
        message_format = "%(module)s %(funcName)s %(lineno)s %(message)s"
    logging.basicConfig(level=level, format=message_format)
    storage.set_logger(task, prefix, args.expnum, None, None, False)

    message = storage.SUCCESS
    expnum = args.expnum

    exit_status = 0
    try:
        # skip if already succeeded and not in force mode
        if storage.get_status(task, prefix, expnum, "p", 36) and not args.force:
            logging.info("Already updated, skipping")
            sys.exit(0)
    
        image_hdulist = storage.get_image(args.expnum, return_file=False)
        ast_hdulist = storage.get_astheader(expnum, ccd=None)

        run_update_header(image_hdulist, ast_hdulist)
        image_filename = os.path.basename(storage.get_uri(expnum))
        image_hdulist.writeto(image_filename)
        if args.replace:
            dest = storage.dbimages_uri(expnum)
            storage.copy(image_filename, dest)
            storage.set_status('update_header', "", expnum, 'p', 36, message)
    except Exception as e:
        message = str(e)
        if args.replace:
            storage.set_status(task, prefix, expnum, 'p', 36, message)
        exit_status = message
        logging.error(message)

    return exit_status
示例#43
0
def combine(expnum, ccd, prefix=None, type='p'):

    for ext in ['moving.matt', 'moving.jmp']:
        fname = storage.get_image(expnum,
                                  ccd=ccd,
                                  prefix=prefix,
                                  version=type,
                                  ext=ext)
    if prefix is not None and len(prefix) > 0:
        planted = storage.get_image('Object',
                                    subdir=str(expnum) + "/ccd%s" %
                                    (str(ccd).zfill(2)),
                                    version='',
                                    ext='planted')
    else:
        prefix = ''

    base_image = os.path.basename(
        storage.get_uri(expnum, ccd=ccd, prefix=prefix, version=type,
                        ext=None))

    cmd_args = ['comb-list', prefix + str(expnum) + type + str(ccd).zfill(2)]
    util.exec_prog(cmd_args)

    for ext in [
            'cands.comb', 'comb.found', 'comb.missed', 'jmp.found',
            'jmp.missed', 'matt.found', 'matt.missed'
    ]:
        uri = storage.get_uri(expnum,
                              ccd=ccd,
                              prefix=prefix,
                              version=type,
                              ext=ext)
        filename = os.path.basename(uri)
        if not os.access(filename, os.R_OK):
            logging.critical("No %s file" % (filename))
            continue
        storage.copy(filename, uri)

    base_name = prefix + str(expnum) + type + str(ccd).zfill(2)
    cands_file = base_name + '.cands.comb'

    if not os.access(cands_file, os.R_OK):
        nocands_file = (prefix + str(expnum) + type + str(ccd).zfill(2) +
                        '.no_candidates')
        open(nocands_file, 'w').close()
        storage.copy(nocands_file, 'vos:OSSOS/measure3/' + nocands_file)
        return storage.SUCCESS

    cands_file = mop_file.Parser().parse(cands_file)
    for file_id in cands_file.header.file_ids:
        rec_no = cands_file.header.file_ids.index(file_id)
        storage.get_image(expnum=cands_file.header.keywords['EXPNUM'][rec_no],
                          ccd=ccd,
                          version=type,
                          prefix=prefix,
                          ext='fits')

    cmd_args = ['measure3', prefix + str(expnum) + type + str(ccd).zfill(2)]
    util.exec_prog(cmd_args)

    filename = base_name + ".measure3.cands.astrom"

    storage.copy(filename, 'vos:OSSOS/measure3/' + filename)
    return storage.SUCCESS
示例#44
0
文件: scramble.py 项目: ijiraq/MOP
def scramble(expnums, ccd, version='p', dry_run=False, force=False, prefix=''):
    """
    run the plant script on this combination of exposures

    @param expnums: list of exposure numbers to scramble the time on
    @param ccd:  which CCD in (assumes this is a CFHT MegaCam MEF)
    @param version: should we scramble the 'p' or 'o' images?
    @param dry_run: if dry run then don't save back to VOSpace.
    @param force: if true then create scramble set, even if already exists.
    @param prefix: a string that will be pre-pended to the EXPNUM to get the filename, sometimes 'fk'.
    @return: None
    """

    # Get a list of the MJD values and then write a re-ordering of those into files with 's'
    # as their type instead of 'p' or 'o'
    mjds = []
    fobjs = []
    message = storage.SUCCESS
    if not (force or dry_run) and storage.get_status(
            task, prefix, expnums[0], version='s', ccd=ccd):
        logging.info("{} recorded as complete for {} ccd {}".format(
            task, expnums, ccd))
        return

    with storage.LoggingManager(task, prefix, expnums[0], ccd, version):
        try:
            for expnum in expnums:
                filename = storage.get_image(expnum, ccd=ccd, version=version)
                fobjs.append(fits.open(filename))
                # Pull out values to replace in headers.. must pull them
                # as otherwise we get pointers...
                mjds.append(fobjs[-1][0].header['MJD-OBS'])

            order = [0, 2, 1]
            for idx in range(len(fobjs)):
                logging.info(
                    "Flipping %d to %d" %
                    (fobjs[idx][0].header['EXPNUM'], expnums[order[idx]]))
                fobjs[idx][0].header['EXPNUM'] = expnums[order[idx]]
                fobjs[idx][0].header['MJD-OBS'] = mjds[order[idx]]
                uri = storage.get_uri(expnums[order[idx]],
                                      ccd=ccd,
                                      version='s',
                                      ext='fits')
                scramble_file_name = os.path.basename(uri)
                if os.access(scramble_file_name, os.F_OK):
                    os.unlink(scramble_file_name)
                fobjs[idx].writeto(scramble_file_name)
                if not dry_run:
                    storage.copy(scramble_file_name, uri)
            logging.info(message)
        except Exception as ex:
            message = str(ex)
            logging.error(message)

        if not dry_run:
            storage.set_status(task,
                               prefix,
                               expnum,
                               version,
                               ccd,
                               status=message)

    return