示例#1
0
def catalogue_call(avgCoord, opt, cat_name):
    data = namedtuple(typename='data',
                      field_names=['ra', 'dec', 'mag', 'emag', 'cat_name'])

    TABLES = {
        'APASS': '******',
        'SDSS': 'V/147/sdss12',
        'PanSTARRS': 'II/349/ps1',
        'SkyMapper': 'II/358/smss'
    }

    tbname = TABLES.get(cat_name, None)
    kwargs = {'radius': '0.33 deg'}
    kwargs['catalog'] = cat_name

    try:
        v = Vizier(columns=[
            'all'
        ])  # Skymapper by default does not report the error columns
        v.ROW_LIMIT = -1
        query = v.query_region(avgCoord, **kwargs)
    except VOSError:
        raise AstrosourceException("Could not find RA {} Dec {} in {}".format(
            avgCoord.ra.value, avgCoord.dec.value, cat_name))

    if query.keys():
        resp = query[tbname]
    else:
        raise AstrosourceException("Could not find RA {} Dec {} in {}".format(
            avgCoord.ra.value, avgCoord.dec.value, cat_name))

    logger.debug(f'Looking for sources in {cat_name}')
    if cat_name in ['APASS', 'PanSTARRS']:
        radecname = {'ra': 'RAJ2000', 'dec': 'DEJ2000'}
    elif cat_name == 'SDSS':
        radecname = {'ra': 'RA_ICRS', 'dec': 'DE_ICRS'}
    elif cat_name == 'SkyMapper':
        radecname = {'ra': 'RAICRS', 'dec': 'DEICRS'}
    else:
        radecname = {'ra': 'raj2000', 'dec': 'dej2000'}

    # Filter out bad data from catalogues
    if cat_name == 'PanSTARRS':
        resp = resp[where((resp['Qual'] == 52) | (resp['Qual'] == 60)
                          | (resp['Qual'] == 61))]
    elif cat_name == 'SDSS':
        resp = resp[resp['Q'] == 3]
    elif cat_name == 'SkyMapper':
        resp = resp[resp['flags'] == 0]

    data.cat_name = cat_name
    data.ra = array(resp[radecname['ra']].data)
    data.dec = array(resp[radecname['dec']].data)

    # extract RA, Dec, Mag and error as arrays
    data.mag = array(resp[opt['filter']].data)
    data.emag = array(resp[opt['error']].data)
    return data
示例#2
0
def gather_files(paths, filetype="fz"):
    # Get list of files

    filelist = paths['parent'].glob("*.{}".format(filetype))
    if filetype not in ['fits', 'fit', 'fz']:
        # Assume we are not dealing with image files but photometry files
        phot_list = [f for f in filelist]
    else:
        phot_list = export_photometry_files(filelist, paths['parent'])
    if not phot_list:
        raise AstrosourceException("No files of type '.{}' found in {}".format(
            filetype, paths['parent']))

    filters = set([os.path.basename(f).split('_')[1] for f in phot_list])

    logger.debug("Filter Set: {}".format(filters))
    if len(filters) > 1:
        raise AstrosourceException(
            "Check your images, the script detected multiple filters in your file list. Astrosource currently only does one filter at a time."
        )
    return phot_list, list(filters)[0]
示例#3
0
def gather_files(paths, filelist=None, filetype="fz", bjd=False):
    # Get list of files
    sys.stdout.write('💾 Inspecting input files\n')
    if not filelist:
        filelist = paths['parent'].glob("*.{}".format(filetype))
    if filetype not in ['fits', 'fit', 'fz']:
        # Assume we are not dealing with image files but photometry files
        phot_list = convert_photometry_files(filelist)
    else:
        phot_list_temp = export_photometry_files(filelist, paths['parent'], bjd)
        #Convert phot_list from dict to list
        phot_list_temp = phot_list_temp.keys()
        phot_list = []
        for key in phot_list_temp:
            phot_list.append(key) #SLAERT: convert dict to just the list of npy files.

    if not phot_list:
        raise AstrosourceException("No files of type '.{}' found in {}".format(filetype, paths['parent']))
    filters = set([os.path.basename(f).split('_')[1] for f in phot_list])

    logger.debug("Filter Set: {}".format(filters))
    if len(filters) > 1:
        raise AstrosourceException("Check your images, the script detected multiple filters in your file list. Astrosource currently only does one filter at a time.")
    return phot_list, list(filters)[0]
示例#4
0
def output_files(paths, photometrydata, mode='diff'):
    if mode == 'calib' and not (paths['parent'] /
                                'calibCompsUsed.csv').exists():
        raise AstrosourceException("No calibrated photometry available")

    for j, outputPhot in enumerate(photometrydata):
        r = j + 1
        logger.info("Outputting files Variable " + str(r))

        outputPeransoCalib = [
            x for x in zip(outputPhot[:, 6], outputPhot[:, 10], outputPhot[:,
                                                                           11])
        ]

        savetxt(paths['outcatPath'] / f'V{r}_{mode}Peranso.txt',
                outputPeransoCalib,
                delimiter=" ",
                fmt='%0.8f')
        savetxt(paths['outcatPath'] / f'V{r}_{mode}Excel.csv',
                outputPeransoCalib,
                delimiter=",",
                fmt='%0.8f')

        # Output Differential astroImageJ file
        outputaijCalib = [
            x for x in zip(outputPhot[:, 6] -
                           2450000.0, outputPhot[:, 10], outputPhot[:, 11])
        ]

        savetxt(paths['outcatPath'] / f'V{r}_{mode}AIJ.txt',
                outputaijCalib,
                delimiter=" ",
                fmt='%0.8f')
        savetxt(paths['outcatPath'] / f'V{r}_{mode}AIJ.csv',
                outputaijCalib,
                delimiter=",",
                fmt='%0.8f')
    return
示例#5
0
def find_stars(targets, paths, fileList, mincompstars=0.1, starreject=0.1 , acceptDistance=1.0, lowcounts=2000, hicounts=3000000, imageFracReject=0.0,  rejectStart=7):
    """
    Finds stars useful for photometry in each photometry/data file

    Parameters
    ----------
    targets : list
            List of target tuples in the format (ra, dec, 0, 0). ra and dec must be in decimal
    indir : str
            Path to files
    filelist : str
            List of photometry files to try
    acceptDistance : float
            Furtherest distance in arcseconds for matches
    lowcounts : int
            look for comparisons brighter than this
    hicounts : int
            look for comparisons dimmer than this
    imageFracReject: float
            This is a value which will reject images based on number of stars detected
    starFracReject : float
            This ia a value which will reject images that reject this fraction of available stars after....
    rejectStart : int
            This many initial images (lots of stars are expected to be rejected in the early images)
    minCompStars : int
            This is the minimum number of comp stars required

    Returns
    -------
    used_file : str
            Path to newly created file containing all images which are usable for photometry
    """
    sys.stdout.write("🌟 Identify comparison stars for photometry calculations\n")
    #Initialisation values

    # LOOK FOR REJECTING NON-WCS IMAGES
    # If the WCS matching has failed, this function will remove the image from the list

    fileSizer=0
    logger.info("Finding image with most stars detected and reject ones with bad WCS")
    referenceFrame = None

    for file in fileList:
        photFile = load(paths['parent'] / file)
        if (photFile.size < 50):
            logger.debug("REJECT")
            logger.debug(file)
            fileList.remove(file)
        elif (( asarray(photFile[:,0]) > 360).sum() > 0) :
            logger.debug("REJECT")
            logger.debug(file)
            fileList.remove(file)
        elif (( asarray(photFile[:,1]) > 90).sum() > 0) :
            logger.debug("REJECT")
            logger.debug(file)
            fileList.remove(file)
        else:
            # Sort through and find the largest file and use that as the reference file
            if photFile.size > fileSizer:
                if (( photFile[:,0] > 360).sum() == 0) and ( photFile[0][0] != 'null') and ( photFile[0][0] != 0.0) :
                    referenceFrame = photFile
                    fileSizer = photFile.size
                    logger.debug("{} - {}".format(photFile.size, file))

    if not referenceFrame.size:
        raise AstrosourceException("No suitable reference files found")

    logger.debug("Setting up reference Frame")
    fileRaDec = SkyCoord(ra=referenceFrame[:,0]*u.degree, dec=referenceFrame[:,1]*u.degree)

    logger.debug("Removing stars with low or high counts")
    rejectStars=[]
    # Check star has adequate counts
    for j in range(referenceFrame.shape[0]):
        if ( referenceFrame[j][4] < lowcounts or referenceFrame[j][4] > hicounts ):
            rejectStars.append(int(j))
    logger.debug("Number of stars prior")
    logger.debug(referenceFrame.shape[0])

    referenceFrame=delete(referenceFrame, rejectStars, axis=0)

    logger.debug("Number of stars post")
    logger.debug(referenceFrame.shape[0])

    originalReferenceFrame=referenceFrame
    originalfileList=fileList
    compchecker=0

    mincompstars=int(referenceFrame.shape[0]*mincompstars) # Transform mincompstars variable from fraction of stars into number of stars.
    if mincompstars < 1: # Always try to get at least ten comp candidates initially -- just because having a bunch is better than having 1.
        mincompstars=1
    ##### Looper function to automatically cycle through more restrictive values for imageFracReject and starreject
    while (compchecker < mincompstars): # Keep going until you get the minimum number of Comp Stars
        imgsize=imageFracReject * fileSizer # set threshold size
        rejStartCounter = 0
        usedImages=[] # Set up used images array
        imgReject = 0 # Number of images rejected due to high rejection rate
        loFileReject = 0 # Number of images rejected due to too few stars in the photometry file
        wcsFileReject=0
        for file in fileList:
            if ( not referenceFrame.shape[0] < mincompstars):
                rejStartCounter = rejStartCounter +1
                photFile = load(paths['parent'] / file)
                logger.debug('Image Number: ' + str(rejStartCounter))
                logger.debug(file)
                logger.debug("Image threshold size: "+str(imgsize))
                logger.debug("Image catalogue size: "+str(photFile.size))
                if photFile.size > imgsize and photFile.size > 7 :
                    phottmparr = asarray(photFile)
                    if (( phottmparr[:,0] > 360).sum() == 0) and ( phottmparr[0][0] != 'null') and ( phottmparr[0][0] != 0.0) :

                        # Checking existance of stars in all photometry files
                        rejectStars=[] # A list to hold what stars are to be rejected

                        # Find whether star in reference list is in this phot file, if not, reject star.
                        for j in range(referenceFrame.shape[0]):
                            photRAandDec = SkyCoord(ra = photFile[:,0]*u.degree, dec = photFile[:,1]*u.degree)
                            testStar = SkyCoord(ra = referenceFrame[j][0]*u.degree, dec = referenceFrame[j][1]*u.degree)
                            # This is the function in the whole package which requires scipy
                            idx, d2d, d3d = testStar.match_to_catalog_sky(photRAandDec)
                            if (d2d.arcsecond > acceptDistance):
                                #"No Match! Nothing within range."
                                rejectStars.append(int(j))


                    # if the rejectstar list is not empty, remove the stars from the reference List
                    if rejectStars != []:

                        if not (((len(rejectStars) / referenceFrame.shape[0]) > starreject) and rejStartCounter > rejectStart):
                            referenceFrame = delete(referenceFrame, rejectStars, axis=0)
                            logger.debug('**********************')
                            logger.debug('Stars Removed  : ' +str(len(rejectStars)))
                            logger.debug('Remaining Stars: ' +str(referenceFrame.shape[0]))
                            logger.debug('**********************')
                            usedImages.append(file)
                        else:
                            logger.debug('**********************')
                            logger.debug('Image Rejected due to too high a fraction of rejected stars')
                            logger.debug(len(rejectStars) / referenceFrame.shape[0])
                            logger.debug('**********************')
                            imgReject=imgReject+1
                            fileList.remove(file)
                    else:
                        logger.debug('**********************')
                        logger.debug('All Stars Present')
                        logger.debug('**********************')
                        usedImages.append(file)

                    # If we have removed all stars, we have failed!
                    if (referenceFrame.shape[0]==0):
                        logger.error("Problem file - {}".format(file))
                        logger.error("Running Loop again")
                        #raise AstrosourceException("All Stars Removed. Try removing problematic files or raising --imgreject value")

                    # if (referenceFrame.shape[0]< mincompstars):
                    #     logger.error("Problem file - {}".format(file))
                    #     raise AstrosourceException("There are fewer than the requested number of Comp Stars. Try removing problematic files or raising --imgreject value")

                elif photFile.size < 7:
                    logger.error('**********************')
                    logger.error("WCS Coordinates broken")
                    logger.error('**********************')
                    wcsFileReject=wcsFileReject+1
                    fileList.remove(file)
                else:
                    logger.error('**********************')
                    logger.error("CONTAINS TOO FEW STARS")
                    logger.error('**********************')
                    loFileReject=loFileReject+1
                    fileList.remove(file)
                sys.stdout.write('.')
                sys.stdout.flush()

        # Raise values of imgreject and starreject for next attempt
        starreject=starreject-0.025
        imageFracReject=imageFracReject+0.05
        compchecker = referenceFrame.shape[0]
        if starreject < 0.15:
            starreject=0.15
        if imageFracReject > 0.8:
            imageFracReject = 0.8

        if starreject == 0.15 and imageFracReject == 0.8 and mincompstars ==1:
            logger.error("Number of Candidate Comparison Stars found this cycle: " + str(compchecker))
            logger.error("Failed to find any comparison candidates with the maximum restrictions. There is something terribly wrong!")
            raise AstrosourceException("Unable to find sufficient comparison stars with the most stringent conditions in this dataset. Try reducing the --mincompstars value")

        if starreject == 0.15 and imageFracReject == 0.8 and mincompstars !=1:
            logger.error("Maximum number of Candidate Comparison Stars found this cycle: " + str(compchecker))
            logger.error("Failed to find sufficient comparison candidates with the maximum restrictions, trying with a lower value for mincompstars")
            compchecker=0
            mincompstars=int(mincompstars*0.8)
            if mincompstars < 1:
                mincompstars =1
            starreject=0.3
            imageFracReject=0.05
            referenceFrame=originalReferenceFrame
            fileList=originalfileList

        elif (compchecker < mincompstars):
            logger.error("Number of Candidate Comparison Stars found this cycle: " + str(compchecker))
            logger.error("Failed to find sufficient comparison candidates, adjusting starreject and imgreject and trying again.")
            logger.error("Now trying starreject " +str(starreject) + " and imgreject " +str(imageFracReject))
            referenceFrame=originalReferenceFrame



    # Construct the output file containing candidate comparison stars
    outputComps=[]
    for j in range (referenceFrame.shape[0]):
        outputComps.append([referenceFrame[j][0],referenceFrame[j][1]])

    logger.debug("These are the identified common stars of sufficient brightness that are in every image")
    logger.debug(outputComps)

    logger.info('Images Rejected due to high star rejection rate: {}'.format(imgReject))
    logger.info('Images Rejected due to low file size: {}'.format(loFileReject))
    logger.info('Out of this many original images: {}'.format(len(fileList)))

    logger.info("Number of candidate Comparison Stars Detected: " + str(len(outputComps)))
    logger.info('Output sent to screenedComps.csv ready for use in Comparison')

    screened_file = paths['parent'] / "screenedComps.csv"
    outputComps = asarray(outputComps)
    # outputComps.sort(axis=0)

    # Reject targetstars immediately

    # Remove targets from consideration
    if targets.shape == (4,):
        targets = [targets]

    while True:
        targetRejects=[]
        if outputComps.shape[0] ==2 and outputComps.size ==2:
            fileRaDec=SkyCoord(ra=outputComps[0]*u.degree,dec=outputComps[1]*u.degree)
        else:
            fileRaDec=SkyCoord(ra=outputComps[:,0]*u.degree,dec=outputComps[:,1]*u.degree)

        for target in targets:
            varCoord = SkyCoord(target[0],(target[1]), frame='icrs', unit=u.deg) # Need to remove target stars from consideration

            idx, d2d, _ = varCoord.match_to_catalog_sky(fileRaDec)
            if d2d.arcsecond < 5.0: # anything within 5 arcseconds of the target
                targetRejects.append(idx)

        if targetRejects==[]:
            break
        #Remove target and restore skycoord list
        outputComps=delete(outputComps, targetRejects, axis=0)
        logger.info(outputComps)
        if len(outputComps) == 0:
            logger.info("The only comparisons detected where also target stars. No adequate comparisons were found.")
            sys.exit()
        fileRaDec = SkyCoord(ra=outputComps[:,0]*u.degree, dec=outputComps[:,1]*u.degree)

    savetxt(screened_file, outputComps, delimiter=",", fmt='%0.8f')
    used_file = paths['parent'] / "usedImages.txt"
    with open(used_file, "w") as f:
        for s in usedImages:
            filename = Path(s).name
            f.write(str(filename) +"\n")

    sys.stdout.write('\n')

    return usedImages, outputComps
示例#6
0
def find_comparisons_calibrated(targets, paths, filterCode, nopanstarrs=False, nosdss=False, closerejectd=5.0, max_magerr=0.05, stdMultiplier=2, variabilityMultiplier=2):
    sys.stdout.write("тнРя╕П Find comparison stars in catalogues for calibrated photometry\n")

    FILTERS = {
                'B' : {'APASS' : {'filter' : 'Bmag', 'error' : 'e_Bmag'}},
                'V' : {'APASS' : {'filter' : 'Vmag', 'error' : 'e_Vmag'}},
                'up' : {'SDSS' : {'filter' : 'umag', 'error' : 'e_umag'},
                        'SkyMapper' : {'filter' : 'uPSF', 'error' : 'e_uPSF'}},
                'gp' : {'SDSS' : {'filter' : 'gmag', 'error' : 'e_mag'},
                        'SkyMapper' : {'filter' : 'gPSF', 'error' : 'e_gPSF'},
                        'PanSTARRS': {'filter' : 'gmag', 'error' : 'e_gmag'}},
                'rp' : {'SDSS' : {'filter' : 'rmag', 'error' : 'e_rmag'},
                        'SkyMapper' : {'filter' : 'rPSF', 'error' : 'e_rPSF'},
                        'PanSTARRS': {'filter' : 'rmag', 'error' : 'e_rmag'}},
                'ip' : {'SDSS' : {'filter' : 'imag', 'error' : 'e_imag'},
                        'SkyMapper' : {'filter' : 'iPSF', 'error' : 'e_iPSF'},
                        'PanSTARRS': {'filter' : 'imag', 'error' : 'e_imag'}},
                'zs' : {'PanSTARRS': {'filter' : 'zmag', 'error' : 'e_zmag'},
                        'SkyMapper' : {'filter' : 'zPSF', 'error' : 'e_zPSF'},
                        'SDSS' : {'filter' : 'zmag', 'error' : 'e_zmag'}},
                }


    parentPath = paths['parent']
    calibPath = parentPath / "calibcats"
    if not calibPath.exists():
        os.makedirs(calibPath)

    #Vizier.ROW_LIMIT = -1

    # Get List of Files Used
    fileList=[]
    for line in (parentPath / "usedImages.txt").read_text().strip().split('\n'):
        fileList.append(line.strip())

    logger.debug("Filter Set: " + filterCode)

    # Load compsused
    compFile = genfromtxt(parentPath / 'stdComps.csv', dtype=float, delimiter=',')
    logger.debug(compFile.shape[0])

    if compFile.shape[0] == 13 and compFile.size == 13:
        compCoords=SkyCoord(ra=compFile[0]*degree, dec=compFile[1]*degree)
    else:
        compCoords=SkyCoord(ra=compFile[:,0]*degree, dec=compFile[:,1]*degree)

    # Get Average RA and Dec from file
    if compFile.shape[0] == 13 and compFile.size == 13:
        logger.debug(compFile[0])
        logger.debug(compFile[1])
        avgCoord=SkyCoord(ra=(compFile[0])*degree, dec=(compFile[1]*degree))

    else:
        logger.debug(average(compFile[:,0]))
        logger.debug(average(compFile[:,1]))
        avgCoord=SkyCoord(ra=(average(compFile[:,0]))*degree, dec=(average(compFile[:,1]))*degree)

    try:
        catalogues = FILTERS[filterCode]
    except IndexError:
        raise AstrosourceException(f"{filterCode} is not accepted at present")

    # Look up in online catalogues

    coords=[]
    for cat_name, opt in catalogues.items():
        try:
            if coords ==[]: #SALERT - Do not search if a suitable catalogue has already been found
                logger.info("Searching " + str(cat_name))
                if cat_name == 'PanSTARRS' and nopanstarrs==True:
                    logger.info("Skipping PanSTARRS")
                elif cat_name == 'SDSS' and nosdss==True:
                    logger.info("Skipping SDSS")
                else:
                    coords = catalogue_call(avgCoord, opt, cat_name, targets=targets, closerejectd=closerejectd)
                    if coords.cat_name == 'PanSTARRS' or coords.cat_name == 'APASS':
                        max_sep=2.5 * arcsecond
                    else:
                        max_sep=1.5 * arcsecond
                    if coords !=[]:
                        cat_used=cat_name


        except AstrosourceException as e:
            logger.debug(e)

    if not coords:
        raise AstrosourceException(f"Could not find coordinate match in any catalogues for {filterCode}")

    #Setup standard catalogue coordinates
    catCoords=SkyCoord(ra=coords.ra*degree, dec=coords.dec*degree)

    #Get calib mags for least variable IDENTIFIED stars.... not the actual stars in compUsed!! Brighter, less variable stars may be too bright for calibration!
    #So the stars that will be used to calibrate the frames to get the OTHER stars.
    calibStands=[]

    if compFile.shape[0] ==13 and compFile.size ==13:
        lenloop=1
    else:
        lenloop=len(compFile[:,0])

    for q in range(lenloop):
        if compFile.shape[0] ==13 and compFile.size ==13:
            compCoord=SkyCoord(ra=compFile[0]*degree, dec=compFile[1]*degree)
        else:
            compCoord=SkyCoord(ra=compFile[q][0]*degree, dec=compFile[q][1]*degree)
        idxcomp,d2dcomp,d3dcomp=compCoord.match_to_catalog_sky(catCoords)
        if d2dcomp < max_sep:
            if not isnan(coords.mag[idxcomp]):
                if compFile.shape[0] ==13 and compFile.size ==13:
                    calibStands.append([compFile[0],compFile[1],compFile[2],coords.mag[idxcomp],coords.emag[idxcomp]])
                else:
                    calibStands.append([compFile[q][0],compFile[q][1],compFile[q][2],coords.mag[idxcomp],coords.emag[idxcomp]])
    logger.info('Calibration Stars Identified below')
    logger.info(calibStands)



    # Get the set of least variable stars to use as a comparison to calibrate the files (to eventually get the *ACTUAL* standards
    #logger.debug(asarray(calibStands).shape[0])
    if asarray(calibStands).shape[0] == 0:
        logger.info("We could not find a suitable match between any of your stars and the calibration catalogue")
        logger.info("You might need to reduce the low value (usually 10000) to get some dimmer stars in script 1")
        logger.info("You might also try using one of --nosdss or --nopanstarrs option (not both!) to prevent comparisons to these catalogues")
        raise AstrosourceException("Stars are too dim to calibrate to.")

    varimin=(min(asarray(calibStands)[:,2])) * variabilityMultiplier


    loopbreaker=0
    while loopbreaker==0:
        calibStandsReject=[]
        for q in range(len(asarray(calibStands)[:,0])):
            if calibStands[q][2] > varimin:
                calibStandsReject.append(q)
            elif calibStands[q][4] == 0:
                calibStandsReject.append(q)
            elif np.isnan(calibStands[q][4]):
                calibStandsReject.append(q)
        if len(calibStands) > len(calibStandsReject):
            loopbreaker=1
        else:
            varimin=varimin+0.01
          

    calibStands=delete(calibStands, calibStandsReject, axis=0)

    calibStand=asarray(calibStands)

    savetxt(parentPath / "calibStands.csv", calibStands , delimiter=",", fmt='%0.8f')
    # Lets use this set to calibrate each datafile and pull out the calibrated compsused magnitudes
    compUsedFile = genfromtxt(parentPath / 'compsUsed.csv', dtype=float, delimiter=',')

    calibCompUsed=[]

    calibOverlord=[] # a huge array intended to create the calibration plot and data out of all the individual calibration files.

    logger.debug("CALIBRATING EACH FILE")
    for file in fileList:
        logger.debug(file)

        #Get the phot file into memory
        photFile = load(parentPath / file)
        photCoords=SkyCoord(ra=photFile[:,0]*degree, dec=photFile[:,1]*degree)

        #Convert the phot file into instrumental magnitudes
        for r in range(len(photFile[:,0])):
            photFile[r,5]=1.0857 * (photFile[r,5]/photFile[r,4])
            photFile[r,4]=-2.5*log10(photFile[r,4])

        #Pull out the CalibStands out of each file
        tempDiff=[]
        calibOut=[]
        for q in range(len(calibStands[:,0])):
            if calibStands.size == 13 and calibStands.shape[0]== 13:
                calibCoord=SkyCoord(ra=calibStand[0]*degree,dec=calibStand[1]*degree)
                idx,d2d,d3d=calibCoord.match_to_catalog_sky(photCoords)
                tempDiff.append(calibStand[3]-photFile[idx,4])
                calibOut.append([calibStand[3],calibStand[4],photFile[idx,4],photFile[idx,5],calibStand[3]-photFile[idx,4],0])
            else:
                calibCoord=SkyCoord(ra=calibStand[q][0]*degree,dec=calibStand[q][1]*degree)
                idx,d2d,d3d=calibCoord.match_to_catalog_sky(photCoords)
                tempDiff.append(calibStand[q,3]-photFile[idx,4])
                calibOut.append([calibStand[q,3],calibStand[q,4],photFile[idx,4],photFile[idx,5],calibStand[q,3]-photFile[idx,4],0])

        #logger.debug(tempDiff)
        tempZP= (median(tempDiff))
        #logger.debug(std(tempDiff))


        #Shift the magnitudes in the phot file by the zeropoint
        for r in range(len(photFile[:,0])):
            photFile[r,4]=photFile[r,4]+tempZP

        calibOut=asarray(calibOut)

        #Shift the magnitudes in the phot file by the zeropoint

        for r in range(len(calibOut[:,0])):
            calibOut[r,5]=calibOut[r,4]-tempZP        
            calibOverlord.append([calibOut[r,0],calibOut[r,1],calibOut[r,2],calibOut[r,3],calibOut[r,4],calibOut[r,5],float(file.split("_")[2].replace("d","."))])

        file = Path(file)
        #Save the calibrated photfiles to the calib directory
        #savetxt(calibPath / "{}.calibrated.{}".format(file.stem, file.suffix), photFile, delimiter=",", fmt='%0.8f')
        savetxt(calibPath / "{}.calibrated.{}".format(file.stem, 'csv'), photFile, delimiter=",", fmt='%0.8f')
        savetxt(calibPath / "{}.compared.{}".format(file.stem, 'csv'), calibOut, delimiter=",", fmt='%0.8f')

        


        #Look within photfile for ACTUAL usedcomps.csv and pull them out
        lineCompUsed=[]
        if compUsedFile.shape[0] ==3 and compUsedFile.size == 3:
            lenloop=1
        else:
            lenloop=len(compUsedFile[:,0])
        #logger.debug(compUsedFile.size)
        for r in range(lenloop):
            if compUsedFile.shape[0] ==3 and compUsedFile.size ==3:
                compUsedCoord=SkyCoord(ra=compUsedFile[0]*degree,dec=compUsedFile[1]*degree)
            else:

                compUsedCoord=SkyCoord(ra=compUsedFile[r][0]*degree,dec=compUsedFile[r][1]*degree)
            idx,d2d,d3d=compUsedCoord.match_to_catalog_sky(photCoords)
            lineCompUsed.append(photFile[idx,4])

        #logger.debug(lineCompUsed)
        calibCompUsed.append(lineCompUsed)
        sys.stdout.write('.')
        sys.stdout.flush()

    calibOverlord=asarray(calibOverlord)
    savetxt(parentPath / "CalibAll.csv", calibOverlord, delimiter=",", fmt='%0.8f')

    # Difference versus Magnitude calibration plot
    plt.cla()
    fig = plt.gcf()
    outplotx=calibOverlord[:,0]
    outploty=calibOverlord[:,5]
    sqsol = np.linalg.lstsq(np.vstack([calibOverlord[:,0],np.ones(len(calibOverlord[:,0]))]).T,calibOverlord[:,5], rcond=None)
    m, c = sqsol[0]
    x, residuals, rank, s = sqsol
    #logger.info(m)
    #logger.info(c)
    #logger.info(residuals[0])
    
    plt.xlabel(str(cat_used) + ' ' +str(filterCode) + ' Catalogue Magnitude')
    plt.ylabel('Calibrated - Catalogue Magnitude')
    plt.plot(outplotx,outploty,'bo')
    plt.plot(outplotx,m*outplotx+c,'r')
    #plt.plot(outplotxrepeat,outploty,'ro')
    #plt.plot(linex,liney)
    plt.ylim(min(outploty)-0.05,max(outploty)+0.05,'k-')
    plt.xlim(min(outplotx)-0.05,max(outplotx)+0.05)
    #plt.errorbar(outplotx, outploty, xerr=3*calibOverlord[:,1], fmt='-o', linestyle='None')
    #plt.errorbar(outplotxrepeat, outploty, yerr=3*calibFile[:,2], fmt='-o', linestyle='None')
    plt.grid(True)
    plt.subplots_adjust(left=0.15, right=0.98, top=0.98, bottom=0.17, wspace=0.3, hspace=0.4)
    fig.set_size_inches(6,3)
    plt.savefig(parentPath / str("CalibrationSanityPlot_" +str(filterCode)+"_Magnitude.png"))
    plt.savefig(parentPath / str("CalibrationSanityPlot_" +str(filterCode)+"_Magnitude.eps"))

    with open(parentPath / "CalibrationSanityPlotCoefficients.txt", "w") as f:
        f.write("Magnitude slope     : " + str(m)+"\n")
        f.write("Magnitude zeropoint : " + str(c) +"\n")
        if not residuals.size == 0:
            f.write("Magnitude residuals : " +str(residuals[0])+"\n")
        else:
            f.write("Magnitude residuals not calculated. \n")

    # Difference vs time calibration plot
    plt.cla()
    fig = plt.gcf()
    outplotx=calibOverlord[:,6]
    outploty=calibOverlord[:,5]
    sqsol = np.linalg.lstsq(np.vstack([calibOverlord[:,6],np.ones(len(calibOverlord[:,6]))]).T,calibOverlord[:,5], rcond=None)
    m, c = sqsol[0]
    x, residuals, rank, s = sqsol
    
    plt.xlabel('BJD')
    plt.ylabel('Calibrated - Catalogue Magnitude')
    plt.plot(outplotx,outploty,'bo')
    plt.plot(outplotx,m*outplotx+c,'r')
    #plt.plot(outplotxrepeat,outploty,'ro')
    #plt.plot(linex,liney)
    plt.ylim(min(outploty)-0.05,max(outploty)+0.05,'k-')
    plt.xlim(min(outplotx)-0.05,max(outplotx)+0.05)
    #plt.errorbar(outplotx, outploty, xerr=3*calibOverlord[:,1], fmt='-o', linestyle='None')
    #plt.errorbar(outplotxrepeat, outploty, yerr=3*calibFile[:,2], fmt='-o', linestyle='None')
    plt.grid(True)
    plt.subplots_adjust(left=0.15, right=0.98, top=0.98, bottom=0.17, wspace=0.3, hspace=0.4)
    fig.set_size_inches(6,3)
    plt.savefig(parentPath / str("CalibrationSanityPlot_" +str(filterCode)+"_Time.png"))
    plt.savefig(parentPath / str("CalibrationSanityPlot_" +str(filterCode)+"_Time.eps"))

    with open(parentPath / "CalibrationSanityPlotCoefficients.txt", "a") as f:
        f.write("Time slope     : " + str(m)+"\n")
        f.write("Time zeropoint : " + str(c) +"\n")        
        if not residuals.size == 0:
            f.write("Time residuals : " +str(residuals[0])+"\n")
        else:
            f.write("Time residuals not calculated. \n")
    # Finalise calibcompsusedfile
    #logger.debug(calibCompUsed)

    calibCompUsed=asarray(calibCompUsed)
    #logger.debug(calibCompUsed[0,:])

    finalCompUsedFile=[]
    sumStd=[]
    for r in range(len(calibCompUsed[0,:])):
        #Calculate magnitude and stdev
        sumStd.append(std(calibCompUsed[:,r]))

        if compUsedFile.shape[0] ==3  and compUsedFile.size ==3:
            finalCompUsedFile.append([compUsedFile[0],compUsedFile[1],compUsedFile[2],median(calibCompUsed[:,r]),asarray(calibStands[0])[4]])
        else:
            finalCompUsedFile.append([compUsedFile[r][0],compUsedFile[r][1],compUsedFile[r][2],median(calibCompUsed[:,r]),std(calibCompUsed[:,r])])

    #logger.debug(finalCompUsedFile)
    logger.debug(" ")
    sumStd=asarray(sumStd)

    errCalib = median(sumStd) / pow((len(calibCompUsed[0,:])), 0.5)

    logger.debug("Comparison Catalogue: " + str(cat_used))
    if len(calibCompUsed[0,:]) == 1:
        logger.debug("As you only have one comparison, the uncertainty in the calibration is unclear")
        logger.debug("But we can take the catalogue value, although we should say this is a lower uncertainty")
        logger.debug("Error/Uncertainty in Calibration: " +str(asarray(calibStands[0])[4]))
    else:
        logger.debug("Median Standard Deviation of any one star: " + str(median(sumStd)))
        logger.debug("Standard Error/Uncertainty in Calibration: " +str(errCalib))

    with open(parentPath / "calibrationErrors.txt", "w") as f:
        f.write("Comparison Catalogue: " + str(cat_used)+"\n")
        f.write("Median Standard Deviation of any one star: " + str(median(sumStd)) +"\n")
        f.write("Standard Error/Uncertainty in Calibration: " +str(errCalib))

    #logger.debug(finalCompUsedFile)
    compFile = asarray(finalCompUsedFile)
    savetxt(parentPath / "calibCompsUsed.csv", compFile, delimiter=",", fmt='%0.8f')
    sys.stdout.write('\n')
    return compFile
示例#7
0
def catalogue_call(avgCoord, opt, cat_name, targets, closerejectd):
    data = namedtuple(typename='data',field_names=['ra','dec','mag','emag','cat_name'])

    TABLES = {'APASS':'******',
              'SDSS' :'V/147/sdss12',
              'PanSTARRS' : 'II/349/ps1',
              'SkyMapper' : 'II/358/smss'
              }

    tbname = TABLES.get(cat_name, None)
    kwargs = {'radius':'0.33 deg'}
    kwargs['catalog'] = cat_name

    try:
        v=Vizier(columns=['all']) # Skymapper by default does not report the error columns
        v.ROW_LIMIT=-1
        query = v.query_region(avgCoord, **kwargs)
    except VOSError:
        raise AstrosourceException("Could not find RA {} Dec {} in {}".format(avgCoord.ra.value,avgCoord.dec.value, cat_name))
    except ConnectionError:
        connected=False
        logger.info("Connection failed, waiting and trying again")
        while connected==False:
            try:
                v=Vizier(columns=['all']) # Skymapper by default does not report the error columns
                v.ROW_LIMIT=-1
                query = v.query_region(avgCoord, **kwargs)
                connected=True
            except ConnectionError:
                time.sleep(10)
                logger.info("Failed again.")
                connected=False

    if query.keys():
        resp = query[tbname]
    else:
        raise AstrosourceException("Could not find RA {} Dec {} in {}".format(avgCoord.ra.value,avgCoord.dec.value, cat_name))


    logger.debug(f'Looking for sources in {cat_name}')
    if cat_name in ['APASS','PanSTARRS']:
        radecname = {'ra' :'RAJ2000', 'dec': 'DEJ2000'}
    elif cat_name == 'SDSS':
        radecname = {'ra' :'RA_ICRS', 'dec': 'DE_ICRS'}
    elif cat_name == 'SkyMapper':
        radecname = {'ra' :'RAICRS', 'dec': 'DEICRS'}
    else:
        radecname = {'ra' :'raj2000', 'dec': 'dej2000'}

    # Filter out bad data from catalogues
    if cat_name == 'PanSTARRS':
        resp = resp[where((resp['Qual'] == 52) | (resp['Qual'] == 60) | (resp['Qual'] == 61))]
    elif cat_name == 'SDSS':
        resp = resp[resp['Q'] == 3]
    elif cat_name == 'SkyMapper':
        resp = resp[resp['flags'] == 0]

    logger.info("Original high quality sources in calibration catalogue: "+str(len(resp)))

    # Remove any objects close to targets from potential calibrators
    if targets.shape == (4,):
        targets = [targets]
    for tg in targets:
        resp = resp[where(np.abs(resp[radecname['ra']]-tg[0]) > 0.0014) and where(np.abs(resp[radecname['dec']]-tg[1]) > 0.0014)]

    logger.info("Number of calibration sources after removal of sources near targets: "+str(len(resp)))

    # Remove any star from calibration catalogue that has another star in the catalogue within closerejectd arcseconds of it.
    while True:
        fileRaDec = SkyCoord(ra=resp[radecname['ra']].data*degree, dec=resp[radecname['dec']].data*degree)
        idx, d2d, _ = fileRaDec.match_to_catalog_sky(fileRaDec, nthneighbor=2) # Closest matches that isn't itself.
        catReject = []
        for q in range(len(d2d)):
            if d2d[q] < closerejectd*arcsecond:
                catReject.append(q)
        if catReject == []:
            break
        del resp[catReject]
        logger.info(f"Stars rejected that are too close (<5arcsec) in calibration catalogue: {len(catReject)}")

    logger.info(f"Number of calibration sources after removal of sources near other sources: {len(resp)}")


    data.cat_name = cat_name
    data.ra = array(resp[radecname['ra']].data)
    data.dec = array(resp[radecname['dec']].data)

    # extract RA, Dec, Mag and error as arrays
    data.mag = array(resp[opt['filter']].data)
    data.emag = array(resp[opt['error']].data)
    return data
示例#8
0
def remove_stars_targets(parentPath, compFile, acceptDistance, targetFile, removeTargets):
    max_sep=acceptDistance * arcsecond
    logger.info("Removing Target Stars from potential Comparisons")

    if not (compFile.shape[0] == 2 and compFile.size ==2):
        fileRaDec = SkyCoord(ra=compFile[:,0]*degree, dec=compFile[:,1]*degree)
    else:
        fileRaDec = SkyCoord(ra=compFile[0]*degree, dec=compFile[1]*degree)

    # Remove any nan rows from targetFile
    targetRejecter=[]
    if not (targetFile.shape[0] == 4 and targetFile.size ==4):
        for z in range(targetFile.shape[0]):
          if isnan(targetFile[z][0]):
            targetRejecter.append(z)
        targetFile=delete(targetFile, targetRejecter, axis=0)

    # Get Average RA and Dec from file
    if compFile.shape[0] == 2 and compFile.size == 2:
        logger.debug(compFile[0])
        logger.debug(compFile[1])
        avgCoord=SkyCoord(ra=(compFile[0])*degree, dec=(compFile[1]*degree))

    else:
        logger.debug(average(compFile[:,0]))
        logger.debug(average(compFile[:,1]))
        avgCoord=SkyCoord(ra=(average(compFile[:,0]))*degree, dec=(average(compFile[:,1]))*degree)


    # Check VSX for any known variable stars and remove them from the list
    try:
        v=Vizier(columns=['all']) # Skymapper by default does not report the error columns
        v.ROW_LIMIT=-1
        variableResult=v.query_region(avgCoord, '0.33 deg', catalog='VSX')['B/vsx/vsx']
    except ConnectionError:
        connected=False
        logger.info("Connection failed, waiting and trying again")
        while connected==False:
            try:
                v=Vizier(columns=['all']) # Skymapper by default does not report the error columns
                v.ROW_LIMIT=-1
                variableResult=v.query_region(avgCoord, '0.33 deg', catalog='VSX')['B/vsx/vsx']
                connected=True
            except ConnectionError:
                time.sleep(10)
                logger.info("Failed again.")
                connected=False

    logger.debug(variableResult)

    logger.debug(variableResult.keys())

    raCat=array(variableResult['RAJ2000'].data)
    logger.debug(raCat)
    decCat=array(variableResult['DEJ2000'].data)
    logger.debug(decCat)
    varStarReject=[]
    for t in range(raCat.size):

        compCoord=SkyCoord(ra=raCat[t]*degree, dec=decCat[t]*degree)

        if not (compFile.shape[0] == 2 and compFile.size == 2):
            catCoords=SkyCoord(ra=compFile[:,0]*degree, dec=compFile[:,1]*degree)
            idxcomp,d2dcomp,d3dcomp=compCoord.match_to_catalog_sky(catCoords)
        elif not (raCat.shape[0] == 2 and raCat.size == 2): ### this is effictively the same as below
            catCoords=SkyCoord(ra=compFile[0]*degree, dec=compFile[1]*degree)
            idxcomp,d2dcomp,d3dcomp=compCoord.match_to_catalog_sky(catCoords)
        else:
            if abs(compFile[0]-raCat[0]) > 0.0014 and abs(compFile[1]-decCat[0]) > 0.0014:
                d2dcomp = 9999

        #logger.debug(d2dcomp)
        if d2dcomp != 9999:
            if d2dcomp.arcsecond[0] < max_sep.value:
                logger.debug("match!")
                varStarReject.append(t)
        #    else:
        #        logger.debug("no match!")

    logger.debug("Number of stars prior to VSX reject")
    logger.debug(compFile.shape[0])
    compFile=delete(compFile, varStarReject, axis=0)
    logger.debug("Number of stars post to VSX reject")
    logger.debug(compFile.shape[0])


    if (compFile.shape[0] ==1):
        compFile=[[compFile[0][0],compFile[0][1],0.01]]
        compFile=asarray(compFile)
        savetxt(parentPath / "compsUsed.csv", compFile, delimiter=",", fmt='%0.8f')
        sortStars=[[compFile[0][0],compFile[0][1],0.01,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]]
        sortStars=asarray(sortStars)
        savetxt("stdComps.csv", sortStars, delimiter=",", fmt='%0.8f')
        raise AstrosourceException("Looks like you have a single comparison star!")
    return compFile
示例#9
0
def find_comparisons_calibrated(filterCode,
                                paths=None,
                                max_magerr=0.05,
                                stdMultiplier=2,
                                variabilityMultiplier=2,
                                panStarrsInstead=False):
    sys.stdout.write(
        "тнРя╕П Find comparison stars in catalogues for calibrated photometry\n"
    )

    FILTERS = {
        'B': {
            'APASS': {
                'filter': 'Bmag',
                'error': 'e_Bmag'
            }
        },
        'V': {
            'APASS': {
                'filter': 'Vmag',
                'error': 'e_Vmag'
            }
        },
        'up': {
            'SDSS': {
                'filter': 'umag',
                'error': 'e_umag'
            },
            'SkyMapper': {
                'filter': 'uPSF',
                'error': 'e_uPSF'
            },
            'PanSTARRS': {
                'filter': 'umag',
                'error': 'e_umag'
            }
        },
        'gp': {
            'SDSS': {
                'filter': 'gmag',
                'error': 'e_mag'
            },
            'SkyMapper': {
                'filter': 'gPSF',
                'error': 'e_gPSF'
            },
            'PanSTARRS': {
                'filter': 'gmag',
                'error': 'e_gmag'
            }
        },
        'rp': {
            'SDSS': {
                'filter': 'rmag',
                'error': 'e_rmag'
            },
            'SkyMapper': {
                'filter': 'rPSF',
                'error': 'e_rPSF'
            },
            'PanSTARRS': {
                'filter': 'rmag',
                'error': 'e_rmag'
            }
        },
        'ip': {
            'SDSS': {
                'filter': 'imag',
                'error': 'e_imag'
            },
            'SkyMapper': {
                'filter': 'iPSF',
                'error': 'e_iPSF'
            },
            'PanSTARRS': {
                'filter': 'imag',
                'error': 'e_imag'
            }
        },
        'zs': {
            'SDSS': {
                'filter': 'zmag',
                'error': 'e_zmag'
            },
            'SkyMapper': {
                'filter': 'zPSF',
                'error': 'e_zPSF'
            },
            'PanSTARRS': {
                'filter': 'zmag',
                'error': 'e_zmag'
            }
        },
    }

    parentPath = paths['parent']
    calibPath = parentPath / "calibcats"
    if not calibPath.exists():
        os.makedirs(calibPath)

    #Vizier.ROW_LIMIT = -1

    # Get List of Files Used
    fileList = []
    for line in (parentPath /
                 "usedImages.txt").read_text().strip().split('\n'):
        fileList.append(line.strip())

    logger.debug("Filter Set: " + filterCode)

    # Load compsused
    compFile = genfromtxt(parentPath / 'stdComps.csv',
                          dtype=float,
                          delimiter=',')
    logger.debug(compFile.shape[0])

    if compFile.shape[0] == 13 and compFile.size == 13:
        compCoords = SkyCoord(ra=compFile[0] * degree,
                              dec=compFile[1] * degree)
    else:
        compCoords = SkyCoord(ra=compFile[:, 0] * degree,
                              dec=compFile[:, 1] * degree)

    # Get Average RA and Dec from file
    if compFile.shape[0] == 13 and compFile.size == 13:
        logger.debug(compFile[0])
        logger.debug(compFile[1])
        avgCoord = SkyCoord(ra=(compFile[0]) * degree,
                            dec=(compFile[1] * degree))

    else:
        logger.debug(average(compFile[:, 0]))
        logger.debug(average(compFile[:, 1]))
        avgCoord = SkyCoord(ra=(average(compFile[:, 0])) * degree,
                            dec=(average(compFile[:, 1])) * degree)

    try:
        catalogues = FILTERS[filterCode]
    except IndexError:
        raise AstrosourceException(f"{filterCode} is not accepted at present")

    # Look up in online catalogues
    for cat_name, opt in catalogues.items():
        try:
            coords = catalogue_call(avgCoord, opt, cat_name)
            if coords.cat_name == 'PanSTARRS' or coords.cat_name == 'APASS':
                max_sep = 2.5 * arcsecond
            else:
                max_sep = 1.5 * arcsecond

        except AstrosourceException as e:
            logger.debug(e)

    if not coords:
        raise AstrosourceException(
            f"Could not find coordinate match in any catalogues for {filterCode}"
        )

    #Setup standard catalogue coordinates
    catCoords = SkyCoord(ra=coords.ra * degree, dec=coords.dec * degree)

    #Get calib mags for least variable IDENTIFIED stars.... not the actual stars in compUsed!! Brighter, less variable stars may be too bright for calibration!
    #So the stars that will be used to calibrate the frames to get the OTHER stars.
    calibStands = []

    if compFile.shape[0] == 13 and compFile.size == 13:
        lenloop = 1
    else:
        lenloop = len(compFile[:, 0])

    for q in range(lenloop):
        if compFile.shape[0] == 13 and compFile.size == 13:
            compCoord = SkyCoord(ra=compFile[0] * degree,
                                 dec=compFile[1] * degree)
        else:
            compCoord = SkyCoord(ra=compFile[q][0] * degree,
                                 dec=compFile[q][1] * degree)
        idxcomp, d2dcomp, d3dcomp = compCoord.match_to_catalog_sky(catCoords)
        if d2dcomp < max_sep:
            if not isnan(coords.mag[idxcomp]):
                if compFile.shape[0] == 13 and compFile.size == 13:
                    calibStands.append([
                        compFile[0], compFile[1], compFile[2],
                        coords.mag[idxcomp], coords.emag[idxcomp]
                    ])
                else:
                    calibStands.append([
                        compFile[q][0], compFile[q][1], compFile[q][2],
                        coords.mag[idxcomp], coords.emag[idxcomp]
                    ])
    logger.info('Calibration Stars Identified below')
    logger.info(calibStands)

    # Get the set of least variable stars to use as a comparison to calibrate the files (to eventually get the *ACTUAL* standards
    #logger.debug(asarray(calibStands).shape[0])
    if asarray(calibStands).shape[0] == 0:
        logger.info(
            "We could not find a suitable match between any of your stars and the calibration catalogue"
        )
        logger.info(
            "You might need to reduce the low value (usually 10000) to get some dimmer stars in script 1"
        )
        raise AstrosourceException("Stars are too dim to calibrate to.")

    varimin = (min(asarray(calibStands)[:, 2])) * variabilityMultiplier

    calibStandsReject = []
    for q in range(len(asarray(calibStands)[:, 0])):
        if calibStands[q][2] > varimin:
            calibStandsReject.append(q)
            #logger.debug(calibStands[q][2])

    calibStands = delete(calibStands, calibStandsReject, axis=0)

    calibStand = asarray(calibStands)

    savetxt(parentPath / "calibStands.csv",
            calibStands,
            delimiter=",",
            fmt='%0.8f')
    # Lets use this set to calibrate each datafile and pull out the calibrated compsused magnitudes
    compUsedFile = genfromtxt(parentPath / 'compsUsed.csv',
                              dtype=float,
                              delimiter=',')

    calibCompUsed = []

    logger.debug("CALIBRATING EACH FILE")
    for file in fileList:
        logger.debug(file)

        #Get the phot file into memory
        photFile = load(parentPath / file)
        photCoords = SkyCoord(ra=photFile[:, 0] * degree,
                              dec=photFile[:, 1] * degree)

        #Convert the phot file into instrumental magnitudes
        for r in range(len(photFile[:, 0])):
            photFile[r, 5] = 1.0857 * (photFile[r, 5] / photFile[r, 4])
            photFile[r, 4] = -2.5 * log10(photFile[r, 4])

        #Pull out the CalibStands out of each file
        tempDiff = []
        for q in range(len(calibStands[:, 0])):
            calibCoord = SkyCoord(ra=calibStand[q][0] * degree,
                                  dec=calibStand[q][1] * degree)
            idx, d2d, d3d = calibCoord.match_to_catalog_sky(photCoords)
            tempDiff.append(calibStand[q, 3] - photFile[idx, 4])

        #logger.debug(tempDiff)
        tempZP = (median(tempDiff))
        #logger.debug(std(tempDiff))

        #Shift the magnitudes in the phot file by the zeropoint
        for r in range(len(photFile[:, 0])):
            photFile[r, 4] = photFile[r, 4] + tempZP

        file = Path(file)
        #Save the calibrated photfiles to the calib directory
        savetxt(calibPath / "{}.calibrated.{}".format(file.stem, file.suffix),
                photFile,
                delimiter=",",
                fmt='%0.8f')

        #Look within photfile for ACTUAL usedcomps.csv and pull them out
        lineCompUsed = []
        if compUsedFile.shape[0] == 3 and compUsedFile.size == 3:
            lenloop = 1
        else:
            lenloop = len(compUsedFile[:, 0])

        #logger.debug(compUsedFile.size)
        for r in range(lenloop):
            if compUsedFile.shape[0] == 3 and compUsedFile.size == 3:
                compUsedCoord = SkyCoord(ra=compUsedFile[0] * degree,
                                         dec=compUsedFile[1] * degree)
            else:
                compUsedCoord = SkyCoord(ra=compUsedFile[r][0] * degree,
                                         dec=compUsedFile[r][1] * degree)
            idx, d2d, d3d = compUsedCoord.match_to_catalog_sky(photCoords)
            lineCompUsed.append(photFile[idx, 4])

        #logger.debug(lineCompUsed)
        calibCompUsed.append(lineCompUsed)
        sys.stdout.write('.')
        sys.stdout.flush()

    # Finalise calibcompsusedfile
    #logger.debug(calibCompUsed)

    calibCompUsed = asarray(calibCompUsed)
    #logger.debug(calibCompUsed[0,:])

    finalCompUsedFile = []
    sumStd = []
    for r in range(len(calibCompUsed[0, :])):
        #Calculate magnitude and stdev
        sumStd.append(std(calibCompUsed[:, r]))
        if compUsedFile.shape[0] == 3 and compUsedFile.size == 3:
            finalCompUsedFile.append([
                compUsedFile[0], compUsedFile[1], compUsedFile[2],
                median(calibCompUsed[:, r]),
                asarray(calibStands[0])[4]
            ])
        else:
            finalCompUsedFile.append([
                compUsedFile[r][0], compUsedFile[r][1], compUsedFile[r][2],
                median(calibCompUsed[:, r]),
                std(calibCompUsed[:, r])
            ])

    #logger.debug(finalCompUsedFile)
    logger.debug(" ")
    sumStd = asarray(sumStd)

    errCalib = median(sumStd) / pow((len(calibCompUsed[0, :])), 0.5)

    #logger.debug(len(calibCompUsed[0,:]))
    if len(calibCompUsed[0, :]) == 1:
        logger.debug(
            "As you only have one comparison, the uncertainty in the calibration is unclear"
        )
        logger.debug(
            "But we can take the catalogue value, although we should say this is a lower uncertainty"
        )
        logger.debug("Error/Uncertainty in Calibration: " +
                     str(asarray(calibStands[0])[4]))
    else:
        logger.debug("Median Standard Deviation of any one star: " +
                     str(median(sumStd)))
        logger.debug("Standard Error/Uncertainty in Calibration: " +
                     str(errCalib))

    with open(parentPath / "calibrationErrors.txt", "w") as f:
        f.write("Median Standard Deviation of any one star: " +
                str(median(sumStd)) + "\n")
        f.write("Standard Error/Uncertainty in Calibration: " + str(errCalib))

    #logger.debug(finalCompUsedFile)
    compFile = asarray(finalCompUsedFile)
    savetxt(parentPath / "calibCompsUsed.csv",
            compFile,
            delimiter=",",
            fmt='%0.8f')
    sys.stdout.write('\n')
    return compFile
示例#10
0
def find_stars(targetStars,
               paths,
               fileList,
               acceptDistance=1.0,
               minimumCounts=10000,
               maximumCounts=1000000,
               imageFracReject=0.0,
               starFracReject=0.1,
               rejectStart=7,
               minCompStars=1):
    """
    Finds stars useful for photometry in each photometry/data file

    Parameters
    ----------
    targetStars : list
            List of target tuples in the format (ra, dec, 0, 0). ra and dec must be in decimal
    indir : str
            Path to files
    filelist : str
            List of photometry files to try
    acceptDistance : float
            Furtherest distance in arcseconds for matches
    minimumCounts : int
            look for comparisons brighter than this
    maximumCounts : int
            look for comparisons dimmer than this
    imageFracReject: float
            This is a value which will reject images based on number of stars detected
    starFracReject : float
            This ia a value which will reject images that reject this fraction of available stars after....
    rejectStart : int
            This many initial images (lots of stars are expected to be rejected in the early images)
    minCompStars : int
            This is the minimum number of comp stars required

    Returns
    -------
    used_file : str
            Path to newly created file containing all images which are usable for photometry
    """

    #Initialisation values
    usedImages = []
    # Generate a blank targetstars.csv file
    targetfile = paths['parent'] / "targetstars.csv"
    np.savetxt(targetfile, targetStars, delimiter=",", fmt='%0.8f')

    # LOOK FOR REJECTING NON-WCS IMAGES
    # If the WCS matching has failed, this function will remove the image from the list
    #wcsReject=[]
    #q=0
    fileSizer = 0
    logger.info(
        "Finding image with most stars detected and reject ones with bad WCS")
    referenceFrame = None

    for file in fileList:
        photFile = np.genfromtxt(file, dtype=float, delimiter=',')
        if ((np.asarray(photFile[:, 0]) > 360).sum() > 0):
            logger.debug("REJECT")
            logger.debug(file)
            fileList.remove(file)
        elif ((np.asarray(photFile[:, 1]) > 90).sum() > 0):
            logger.debug("REJECT")
            logger.debug(file)
            fileList.remove(file)
        else:
            # Sort through and find the largest file and use that as the reference file
            if photFile.size > fileSizer:
                phottmparr = np.asarray(photFile)
                if ((phottmparr[:, 0] > 360).sum()
                        == 0) and (phottmparr[0][0] !=
                                   'null') and (phottmparr[0][0] != 0.0):
                    referenceFrame = photFile
                    fileSizer = photFile.size
                    logger.debug("{} - {}".format(photFile.size, file))
    if not referenceFrame.size:
        raise AstrosourceException("No suitable reference files found")

    logger.debug("Setting up reference Frame")
    fileRaDec = SkyCoord(ra=referenceFrame[:, 0] * u.degree,
                         dec=referenceFrame[:, 1] * u.degree)

    logger.debug("Removing stars with low or high counts")
    rejectStars = []
    # Check star has adequate counts
    for j in range(referenceFrame.shape[0]):
        if (referenceFrame[j][4] < minimumCounts
                or referenceFrame[j][4] > maximumCounts):
            rejectStars.append(int(j))
    logger.debug("Number of stars prior")
    logger.debug(referenceFrame.shape[0])

    referenceFrame = np.delete(referenceFrame, rejectStars, axis=0)

    logger.debug("Number of stars post")
    logger.debug(referenceFrame.shape[0])

    imgsize = imageFracReject * fileSizer  # set threshold size
    rejStartCounter = 0
    imgReject = 0  # Number of images rejected due to high rejection rate
    loFileReject = 0  # Number of images rejected due to too few stars in the photometry file
    wcsFileReject = 0
    for file in fileList:
        rejStartCounter = rejStartCounter + 1
        photFile = np.genfromtxt(file, dtype=float, delimiter=',')
        # DUP fileRaDec = SkyCoord(ra=photFile[:,0]*u.degree, dec=photFile[:,1]*u.degree)

        logger.debug('Image Number: ' + str(rejStartCounter))
        logger.debug(file)
        logger.debug("Image threshold size: " + str(imgsize))
        logger.debug("Image catalogue size: " + str(photFile.size))
        if photFile.size > imgsize and photFile.size > 7:
            phottmparr = np.asarray(photFile)
            if ((phottmparr[:, 0] > 360).sum() == 0) and (
                    phottmparr[0][0] != 'null') and (phottmparr[0][0] != 0.0):

                # Checking existance of stars in all photometry files
                rejectStars = [
                ]  # A list to hold what stars are to be rejected

                # Find whether star in reference list is in this phot file, if not, reject star.
                for j in range(referenceFrame.shape[0]):
                    photRAandDec = SkyCoord(ra=photFile[:, 0] * u.degree,
                                            dec=photFile[:, 1] * u.degree)
                    testStar = SkyCoord(ra=referenceFrame[j][0] * u.degree,
                                        dec=referenceFrame[j][1] * u.degree)
                    # This is the only line in the whole package which requires scipy
                    idx, d2d, d3d = testStar.match_to_catalog_sky(photRAandDec)
                    if (d2d.arcsecond > acceptDistance):
                        #"No Match! Nothing within range."
                        rejectStars.append(int(j))

            # if the rejectstar list is not empty, remove the stars from the reference List
            if rejectStars != []:

                if not (((len(rejectStars) / referenceFrame.shape[0]) >
                         starFracReject) and rejStartCounter > rejectStart):
                    referenceFrame = np.delete(referenceFrame,
                                               rejectStars,
                                               axis=0)
                    logger.debug('**********************')
                    logger.debug('Stars Removed  : ' + str(len(rejectStars)))
                    logger.debug('Remaining Stars: ' +
                                 str(referenceFrame.shape[0]))
                    logger.debug('**********************')
                    usedImages.append(file)
                else:
                    logger.debug('**********************')
                    logger.debug(
                        'Image Rejected due to too high a fraction of rejected stars'
                    )
                    logger.debug(len(rejectStars) / referenceFrame.shape[0])
                    logger.debug('**********************')
                    imgReject = imgReject + 1
            else:
                logger.debug('**********************')
                logger.debug('All Stars Present')
                logger.debug('**********************')
                usedImages.append(file)

            # If we have removed all stars, we have failed!
            if (referenceFrame.shape[0] == 0):
                logger.error("Problem file - {}".format(file))
                raise AstrosourceException(
                    "All Stars Removed. Try removing problematic files or raising the imageFracReject"
                )

            if (referenceFrame.shape[0] < minCompStars):
                logger.error("Problem file - {}".format(file))
                raise AstrosourceException(
                    "There are fewer than the requested number of Comp Stars. Try removing problematic files or raising the imageFracReject"
                )

        elif photFile.size < 7:
            logger.error('**********************')
            logger.error("WCS Coordinates broken")
            logger.error('**********************')
            wcsFileReject = wcsFileReject + 1
        else:
            logger.error('**********************')
            logger.error("CONTAINS TOO FEW STARS")
            logger.error('**********************')
            loFileReject = loFileReject + 1

    # Construct the output file containing candidate comparison stars
    outputComps = []
    for j in range(referenceFrame.shape[0]):
        outputComps.append([referenceFrame[j][0], referenceFrame[j][1]])

    logger.debug(
        "These are the identified common stars of sufficient brightness that are in every image"
    )
    logger.debug(outputComps)

    logger.info('Images Rejected due to high star rejection rate: {}'.format(
        imgReject))
    logger.info(
        'Images Rejected due to low file size: {}'.format(loFileReject))
    logger.info('Out of this many original images: {}'.format(len(fileList)))

    logger.info("Number of candidate Comparison Stars Detected: " +
                str(len(outputComps)))
    logger.info('Output sent to screenedComps.csv ready for use in Comparison')

    screened_file = paths['parent'] / "screenedComps.csv"
    np.savetxt(screened_file, outputComps, delimiter=",", fmt='%0.8f')
    used_file = paths['parent'] / "usedImages.txt"
    with open(used_file, "w") as f:
        for s in usedImages:
            f.write(str(s) + "\n")

    return usedImages
示例#11
0
def photometric_calculations(targets,
                             paths,
                             acceptDistance=10.0,
                             errorReject=0.5):

    photFileArray, fileList = photometry_files_to_array(paths['parent'])

    if (paths['parent'] / 'calibCompsUsed.csv').exists():
        logger.debug("Calibrated")
        compFile = np.genfromtxt(paths['parent'] / 'calibCompsUsed.csv',
                                 dtype=float,
                                 delimiter=',')
        calibFlag = 1
    else:
        logger.debug("Differential")
        compFile = np.genfromtxt(paths['parent'] / 'compsUsed.csv',
                                 dtype=float,
                                 delimiter=',')
        calibFlag = 0

    # Get total counts for each file
    fileCount = []
    compArray = []
    allCountsArray = []
    for imgs in range(photFileArray.shape[0]):
        allCounts = 0.0
        allCountsErr = 0.0
        photFile = photFileArray[imgs]
        fileRaDec = SkyCoord(ra=photFile[:, 0] * u.degree,
                             dec=photFile[:, 1] * u.degree)
        logger.debug("Calculating total Comparison counts for : {}".format(
            fileList[imgs]))
        #logger.debug(compFile.shape[0])

        if compFile.shape[0] == 5 and compFile.size == 5:
            loopLength = 1
        else:
            loopLength = compFile.shape[0]
        #logger.debug(compFile.size)
        #sys.exit()
        for j in range(loopLength):
            if compFile.size == 2 or (compFile.shape[0] == 5
                                      and compFile.size == 5):
                ##
                ##    for j in range(compFile.shape[0]):
                ##        if compFile.size == 2 or compFile.shape[0] == 5:
                matchCoord = SkyCoord(ra=compFile[0] * u.degree,
                                      dec=compFile[1] * u.degree)
            else:
                matchCoord = SkyCoord(ra=compFile[j][0] * u.degree,
                                      dec=compFile[j][1] * u.degree)
            idx, d2d, d3d = matchCoord.match_to_catalog_sky(fileRaDec)
            allCounts = np.add(allCounts, photFile[idx][4])
            allCountsErr = np.add(allCountsErr, photFile[idx][5])

        allCountsArray.append([allCounts, allCountsErr])

    logger.debug(allCountsArray)

    allcountscount = 0

    if len(targets) == 4:
        loopLength = 1
    else:
        loopLength = targets.shape[0]
    # For each variable calculate all the things
    for q in range(loopLength):
        starErrorRejCount = 0
        starDistanceRejCount = 0
        logger.debug("****************************")
        logger.debug("Processing Variable {}".format(q + 1))
        if int(len(targets)) == 4:
            logger.debug("RA {}".format(targets[0]))
        else:
            logger.debug("RA {}".format(targets[q][0]))
        if int(len(targets)) == 4:
            logger.debug("Dec {}".format(targets[1]))
        else:
            logger.debug("Dec {}".format(targets[q][1]))
        if int(len(targets)) == 4:
            varCoord = SkyCoord(
                targets[0], (targets[1]), frame='icrs',
                unit=u.deg)  # Need to remove target stars from consideration
        else:
            varCoord = SkyCoord(
                targets[q][0], (targets[q][1]), frame='icrs',
                unit=u.deg)  # Need to remove target stars from consideration

        # Grabbing variable rows
        logger.debug(
            "Extracting and Measuring Differential Magnitude in each Photometry File"
        )
        outputPhot = []  # new
        compArray = []
        compList = []
        allcountscount = 0
        for imgs in range(photFileArray.shape[0]):
            compList = []
            fileRaDec = SkyCoord(ra=photFileArray[imgs][:, 0] * u.degree,
                                 dec=photFileArray[imgs][:, 1] * u.degree)
            idx, d2d, _ = varCoord.match_to_catalog_sky(fileRaDec)
            starRejected = 0
            if (np.less(d2d.arcsecond, acceptDistance)):
                magErrVar = 1.0857 * (photFileArray[imgs][idx][5] /
                                      photFileArray[imgs][idx][4])
                if magErrVar < errorReject:

                    magErrEns = 1.0857 * (allCountsErr / allCounts)
                    magErrTotal = pow(
                        pow(magErrVar, 2) + pow(magErrEns, 2), 0.5)

                    #templist is a temporary holder of the resulting file.
                    tempList = photFileArray[imgs][idx, :]
                    googFile = Path(fileList[imgs]).name
                    tempList = np.append(
                        tempList,
                        float(googFile.split("_")[5].replace("d", ".")))
                    tempList = np.append(
                        tempList,
                        float(googFile.split("_")[4].replace("a", ".")))
                    tempList = np.append(tempList,
                                         allCountsArray[allcountscount][0])
                    tempList = np.append(tempList,
                                         allCountsArray[allcountscount][1])

                    #Differential Magnitude
                    tempList = np.append(
                        tempList,
                        2.5 * np.log10(allCountsArray[allcountscount][0] /
                                       photFileArray[imgs][idx][4]))
                    tempList = np.append(tempList, magErrTotal)
                    tempList = np.append(tempList, photFileArray[imgs][idx][4])
                    tempList = np.append(tempList, photFileArray[imgs][idx][5])

                    if (compFile.shape[0] == 5 and compFile.size == 5) or (
                            compFile.shape[0] == 3 and compFile.size == 3):
                        loopLength = 1
                    else:
                        loopLength = compFile.shape[0]
                    #logger.debug(compFile.size)
                    #sys.exit()
                    for j in range(loopLength):
                        if compFile.size == 2 or (compFile.shape[0] == 3
                                                  and compFile.size == 3) or (
                                                      compFile.shape[0] == 5
                                                      and compFile.size == 5):
                            matchCoord = SkyCoord(ra=compFile[0] * u.degree,
                                                  dec=compFile[1] * u.degree)
                        else:
                            matchCoord = SkyCoord(ra=compFile[j][0] * u.degree,
                                                  dec=compFile[j][1] *
                                                  u.degree)
                        idx, d2d, d3d = matchCoord.match_to_catalog_sky(
                            fileRaDec)
                        tempList = np.append(tempList,
                                             photFileArray[imgs][idx][4])

                    outputPhot.append(tempList)
                    fileCount.append(allCounts)
                    allcountscount = allcountscount + 1

                else:
                    starErrorRejCount = starErrorRejCount + 1
                    starRejected = 1
            else:
                starDistanceRejCount = starDistanceRejCount + 1
                starRejected = 1

            if (starRejected == 1):

                #templist is a temporary holder of the resulting file.
                tempList = photFileArray[imgs][idx, :]
                googFile = Path(fileList[imgs]).name
                tempList = np.append(
                    tempList, float(googFile.split("_")[5].replace("d", ".")))
                tempList = np.append(
                    tempList, float(googFile.split("_")[4].replace("a", ".")))
                tempList = np.append(tempList,
                                     allCountsArray[allcountscount][0])
                tempList = np.append(tempList,
                                     allCountsArray[allcountscount][1])

                #Differential Magnitude
                tempList = np.append(tempList, np.nan)
                tempList = np.append(tempList, np.nan)
                tempList = np.append(tempList, photFileArray[imgs][idx][4])
                tempList = np.append(tempList, photFileArray[imgs][idx][5])

                if (compFile.shape[0] == 5
                        and compFile.size == 5) or (compFile.shape[0] == 3
                                                    and compFile.size == 3):
                    loopLength = 1
                else:
                    loopLength = compFile.shape[0]
                #logger.debug(compFile.shape[0])
                #sys.exit()
                for j in range(loopLength):
                    if compFile.size == 2 or (compFile.shape[0] == 3
                                              and compFile.size == 3) or (
                                                  compFile.shape[0] == 5
                                                  and compFile.size == 5):
                        matchCoord = SkyCoord(ra=compFile[0] * u.degree,
                                              dec=compFile[1] * u.degree)
                    else:
                        matchCoord = SkyCoord(ra=compFile[j][0] * u.degree,
                                              dec=compFile[j][1] * u.degree)
                    idx, d2d, d3d = matchCoord.match_to_catalog_sky(fileRaDec)
                    tempList = np.append(tempList, photFileArray[imgs][idx][4])
                outputPhot.append(tempList)
                fileCount.append(allCounts)
                allcountscount = allcountscount + 1

        # Check for dud images
        imageReject = []
        for j in range(np.asarray(outputPhot).shape[0]):
            if np.isnan(outputPhot[j][11]):
                imageReject.append(j)
        outputPhot = np.delete(outputPhot, imageReject, axis=0)

        ## REMOVE MAJOR OUTLIERS FROM CONSIDERATION
        stdVar = np.nanstd(np.asarray(outputPhot)[:, 10])
        avgVar = np.nanmean(np.asarray(outputPhot)[:, 10])
        starReject = []
        stdevReject = 0
        for j in range(np.asarray(outputPhot).shape[0]):
            if outputPhot[j][10] > avgVar + (
                    4 * stdVar) or outputPhot[j][10] < avgVar - (4 * stdVar):
                starReject.append(j)
                stdevReject = stdevReject + 1

        logger.info("Rejected Stdev Measurements: : {}".format(stdevReject))
        logger.info(
            "Rejected Error Measurements: : {}".format(starErrorRejCount))
        logger.info("Rejected Distance Measurements: : {}".format(
            starDistanceRejCount))
        logger.info("Variability of Comparisons")
        logger.info("Average : {}".format(avgVar))
        logger.info("Stdev   : {}".format(stdVar))

        outputPhot = np.delete(outputPhot, starReject, axis=0)
        if outputPhot.shape[0] > 2:
            np.savetxt(os.path.join(paths['outcatPath'],
                                    "doerPhot_V" + str(q + 1) + ".csv"),
                       outputPhot,
                       delimiter=",",
                       fmt='%0.8f')
            logger.debug('Saved doerPhot_V')
        else:
            raise AstrosourceException("Photometry not possible")
        return outputPhot
示例#12
0
def plot_bls(paths, startPeriod=0.1, endPeriod=3.0, nf=1000, nb=200, qmi=0.01, qma=0.1):
    '''
     Input parameters:
     ~~~~~~~~~~~~~~~~~
     n    = number of data points
     t    = array {t(i)}, containing the time values of the time series
     x    = array {x(i)}, containing the data values of the time series
     u    = temporal/work/dummy array, must be dimensioned in the
            calling program in the same way as  {t(i)}
     v    = the same as  {u(i)}
     nf   = number of frequency points in which the spectrum is computed
     fmin = minimum frequency (MUST be > 0)
     df   = frequency step
     nb   = number of bins in the folded time series at any test period
     qmi  = minimum fractional transit length to be tested
     qma  = maximum fractional transit length to be tested
     paths = dict of Path objects
    '''
    # Get list of phot files
    trimPath = paths['parent'] / "trimcats"
    eelbsPath = paths['parent'] / "eelbs"
    # check directory structure
    if not trimPath.exists():
        os.makedirs(trimPath)
    if not eelbsPath.exists():
        os.makedirs(eelbsPath)
    fileList = paths['outcatPath'].glob('*diffExcel*csv')
    r=0
    # calculate period range
    fmin = 1/endPeriod
    fmax = 1/startPeriod
    df = (fmax-fmin)/nf
    dp = (endPeriod-startPeriod)/nf
    for file in fileList:
        photFile = np.genfromtxt(file, dtype=float, delimiter=',')
        logger.debug('**********************')
        logger.debug('Testing: ' + str(file))
        t = photFile[:,0]
        f = photFile[:,1]
        res = bls(t, f, qmi, qma, fmin, df, nf, nb, startPeriod, dp)
        if not res:
            raise AstrosourceException("BLS fit failed")
        else: # If it did not fail, then do the rest.
            logger.debug("Best SR: ", res[0], "\nIngress: ", res[1], "\nEgress: ", res[2], "\nq: ", res[3], \
        "\nDepth: ", res[4], "\nPeriod: ", res[5], "\nSDE: ", res[6])
            t1 = t[0]
            u = t - t1
            s = np.mean(f)
            v = f - s
            f0 = 1.0/res[5] #  freq = 1/T
            nbin = nb # number of bin
            n = len(t)
            ibi = np.zeros(nbin)
            y = np.zeros(nbin)
            phase = np.linspace(0.0, 1.0, nbin)
            for i in range(n):
                ph = u[i]*f0
                ph = ph - int(ph)
                j = int(nbin*ph) # data to a bin
                ibi[j] += 1.0 # number of data in a bin
                y[j] = y[j] + v[i] # sum of light in a bin

            plt.figure(figsize=(15,6))

            powerPeriod=np.asarray(res[10])
            plt.subplot(1, 2, 1)
            plt.plot(powerPeriod[:,0], powerPeriod[:,1], 'r.')

            plt.title("EELBS Period Trials")
            plt.xlabel(r"Trialled Period")
            plt.ylabel(r"Likelihood")

            plt.subplot(1, 2, 2)
            plt.plot(phase, np.divide(y, ibi, out=np.zeros_like(y), where=ibi!=0), 'r.')
            fite = np.zeros(nbin) + res[8] # H
            fite[res[1]:res[2]+1] = res[9] # L
            plt.plot(phase, fite)
            plt.gca().invert_yaxis()
            plt.title("\nDepth: "+ str(-res[4]) + "     " + "Period: {0} d  bin: {1}".format(1/f0, nbin))
            plt.xlabel(r"Phase ($\phi$)")
            plt.ylabel(r"Mean value of $x(\phi)$ in a bin")
            plt.tight_layout()
            filebase = str(file).split("/")[-1].split("\\")[-1].replace(".csv","").replace("_calibExcel","")
            plot_filename = "{}_EELBS_Plot.png".format(filebase)
            plt.savefig(eelbsPath / plot_filename)

            logger.info("Saved {}".format(plot_filename))
            plt.clf()
            # Write text file
            texFileName=eelbsPath / '{}_EELBS_Statistics.txt'.format(filebase)
            logger.info("Saved {}".format(texFileName))
            with open(texFileName, "w") as f:
                f.write("Best SR: " +str(res[0])+"\n")
                f.write("Ingress: " + str(res[1])+"\n")
                f.write("Egress: "+ str(res[2])+"\n")
                f.write("nq: "+ str(res[3])+"\n")
                f.write("Depth: "+ str(-res[4])+"\n")
                f.write("Period: "+ str(res[5])+"\n")
                f.write("SDE: "+ str(res[6])+"\n")
    return plot_filename, texFileName
示例#13
0
def bls(t, x, qmi, qma, fmin, df, nf, nb, startPeriod, dp):
    """First trial, BLS algorithm, only minor modification from author's code
     Output parameters:
     ~~~~~~~~~~~~~~~~~~
     p    = array {p(i)}, containing the values of the BLS spectrum
            at the i-th frequency value -- the frequency values are
            computed as  f = fmin + (i-1)*df
     bper = period at the highest peak in the frequency spectrum
     bpow = value of {p(i)} at the highest peak
     depth= depth of the transit at   *bper*
     qtran= fractional transit length  [ T_transit/bper ]
     in1  = bin index at the start of the transit [ 0 < in1 < nb+1 ]
     in2  = bin index at the end   of the transit [ 0 < in2 < nb+1 ]
     Remarks:
     ~~~~~~~~
     -- *fmin* MUST be greater than  *1/total time span*
     -- *nb*   MUST be lower than  *nbmax*
     -- Dimensions of arrays {y(i)} and {ibi(i)} MUST be greater than
        or equal to  *nbmax*.
     -- The lowest number of points allowed in a single bin is equal
        to   MAX(minbin,qmi*N),  where   *qmi*  is the minimum transit
        length/trial period,   *N*  is the total number of data points,
        *minbin*  is the preset minimum number of the data points per
        bin.
    """
    n = len(t)
    rn = len(x)
    #! use try
    if n != rn:
        raise AstrosourceException("Different size of array, t and x")
    rn = float(rn) # float of n
    minbin = 5
    nbmax = 2000
    if nb > nbmax:
        raise AstrosourceException("Error: NB > NBMAX!")
    tot = t[-1] - t[0] # total time span
    if fmin < 1.0/tot:
        raise AstrosourceException("Error: fmin < 1/T")
    # parameters in binning (after folding)
    kmi = int(qmi*nb) # nb is number of bin -> a single period
    if kmi < 1:
        kmi = 1
    kma = int(qma*nb) + 1
    kkmi = rn*qmi # to check the bin size
    if kkmi < minbin:
        kkmi = minbin
    # For the extension of arrays (edge effect: transit happen at the edge of data set)
    nb1 = nb + 1
    nbkma = nb + kma
    # Data centering
    t1 = t[0]
    u = t - t1
    s = np.median(x) # ! Modified
    v = x - s
    bpow = 0.0
    p = np.zeros(nf)
    # setup array for power vs period plot
    powerPeriod=[]
    # Start period search
    for jf in range(nf):
        #f0 = fmin + df*jf # iteration in frequency not period
        #p0 = 1.0/f0
        # Actually iterate in period
        p0 = startPeriod + dp*jf
        f0 = 1.0/p0
        # Compute folded time series with p0 period
        ibi = np.zeros(nbkma)
        y = np.zeros(nbkma)
        # Median version
        yMedian = np.zeros(shape=(nf,n))
        yMedian.fill(np.nan)
        for i in range(n):
            ph = u[i]*f0 # instead of t mod P, he use t*f then calculate the phase (less computation)
            ph = ph - int(ph)
            j = int(nb*ph) # data to a bin
            ibi[j] = ibi[j] + 1 # number of data in a bin
            y[j] = y[j] + v[i] # sum of light in a bin
            yMedian[j][i]=v[i]
        # Repopulate y[j] and ibi[j] with the median value
        for i in range(nb+1):
            #logger.debug(i)
            ibi[i]=1
            y[i]=np.nanmedian(yMedian[i,:])
        # Extend the arrays  ibi()  and  y() beyond nb by wrapping
        for j in range(nb1, nbkma):
            jnb = j - nb
            ibi[j] = ibi[jnb]
            y[j] = y[jnb]
        # Compute BLS statictics for this trial period
        power = 0.0
        for i in range(nb): # shift the test period
            s = 0.0
            k = 0
            kk = 0
            nb2 = i + kma
            # change the size of test period (from kmi to kma)
            for j in range(i, nb2):
                k = k + 1
                kk = kk + ibi[j]
                s = s + y[j]
                if k < kmi: continue # only calculate SR for test period > kmi
                if kk < kkmi: continue #
                rn1 = float(kk)
                powo = s*s/(rn1*(rn - rn1))
                if powo > power: # save maximum SR in a test period
                    power = powo # SR value
                    jn1 = i #
                    jn2 = j
                    rn3 = rn1
                    s3 = s
        power = np.sqrt(power)
        p[jf] = power
        powerPeriod.append([p0,power])
        if power > bpow:
            # If it isn't an resonance of a day
            if not ((p0 > 0.95 and p0 < 1.05) or (p0 > 1.95 and p0 < 2.05) or (p0 > 2.98 and p0 < 3.02) or (p0 > 6.65 and p0 < 6.67) or (p0 > 3.32 and p0 < 3.34) or (p0 > 3.64 and p0 < 3.68)):
                bpow = power # Save the absolute maximum of SR
                in1 = jn1
                in2 = jn2
                qtran = rn3/rn
                # depth = -s3*rn/(rn3*(rn - rn3))
                # ! Modified
                high = -s3/(rn - rn3)
                low = s3/rn3
                depth = high - low
                bper = p0

    sde = (bpow - np.mean(p))/np.std(p) # signal detection efficiency
    return bpow, in1, in2, qtran, depth, bper, sde, p, high, low, powerPeriod
示例#14
0
def find_comparisons_calibrated(filterCode,
                                paths=None,
                                max_magerr=0.05,
                                stdMultiplier=2,
                                variabilityMultiplier=2,
                                panStarrsInstead=False):

    parentPath = paths['parent']
    calibPath = parentPath / "calibcats"
    if not calibPath.exists():
        os.makedirs(calibPath)

    Vizier.ROW_LIMIT = -1
    max_sep = 1.0 * u.arcsec

    # Get List of Files Used
    fileList = []
    for line in (parentPath /
                 "usedImages.txt").read_text().strip().split('\n'):
        fileList.append(line.strip())

    logger.debug("Filter Set: " + filterCode)

    # Load compsused
    compFile = np.genfromtxt(parentPath / 'stdComps.csv',
                             dtype=float,
                             delimiter=',')
    logger.debug(compFile.shape[0])

    if compFile.shape[0] == 13:
        compCoords = SkyCoord(ra=compFile[0] * u.degree,
                              dec=compFile[1] * u.degree)
    else:
        compCoords = SkyCoord(ra=compFile[:, 0] * u.degree,
                              dec=compFile[:, 1] * u.degree)

    # Get Average RA and Dec from file
    if compFile.shape[0] == 13:
        logger.debug(compFile[0])
        logger.debug(compFile[1])
        avgCoord = SkyCoord(ra=(compFile[0]) * u.degree,
                            dec=(compFile[1] * u.degree))

    else:
        logger.debug(np.average(compFile[:, 0]))
        logger.debug(np.average(compFile[:, 1]))
        avgCoord = SkyCoord(ra=(np.average(compFile[:, 0])) * u.degree,
                            dec=(np.average(compFile[:, 1])) * u.degree)

    # get results from internetz

    if filterCode == 'B' or filterCode == 'V':
        #collect APASS results
        apassResult = Vizier.query_region(avgCoord,
                                          '0.33 deg',
                                          catalog='APASS')['II/336/apass9']

        logger.debug(apassResult)
        apassResult = apassResult.to_pandas()

        logger.debug(apassResult.keys())

        apassSearchResult = apassResult[[
            'RAJ2000', 'DEJ2000', 'Bmag', 'e_Bmag', 'Vmag', 'e_Vmag'
        ]].as_matrix()

        logger.debug(apassSearchResult)

        raCat = apassSearchResult[:, 0]
        logger.debug(raCat)
        decCat = apassSearchResult[:, 1]
        logger.debug(decCat)
        if filterCode == 'B':
            magCat = apassSearchResult[:, 2]
            logger.debug(magCat)
            emagCat = apassSearchResult[:, 3]
            logger.debug(emagCat)

        if filterCode == 'V':
            magCat = apassSearchResult[:, 4]
            logger.debug(magCat)
            emagCat = apassSearchResult[:, 5]
            logger.debug(emagCat)

    elif filterCode == 'up' or filterCode == 'gp' or filterCode == 'rp' or filterCode == 'ip' or filterCode == 'zs':
        # Are there entries in SDSS?
        sdssResult = SDSS.query_region(avgCoord, '0.33 deg')
        #print(sdssResult)
        sdssFind = 1

        # If not in SDSS, try Skymapper
        if sdssResult == None:
            sdssFind = 0
            logger.debug("Not found in SDSS, must be in the South.")
            #logger.debug(ConeSearch.URL)
            ConeSearch.URL = 'http://skymapper.anu.edu.au/sm-cone/aus/query?'
            sdssResult = ConeSearch.query_region(avgCoord, '0.33 deg')

            logger.debug(sdssResult)

            searchResult = sdssResult.to_table().to_pandas()

            logger.debug(searchResult)

            sdssSearchResult = searchResult[[
                'raj2000', 'dej2000', 'u_psf', 'e_u_psf', 'g_psf', 'e_g_psf',
                'r_psf', 'e_r_psf', 'i_psf', 'e_i_psf', 'z_psf', 'e_z_psf'
            ]].as_matrix()

            logger.debug(sdssSearchResult[:, 0])

            raCat = sdssSearchResult[:, 0]
            logger.debug(raCat)
            decCat = sdssSearchResult[:, 1]
            logger.debug(decCat)
            if filterCode == 'up':
                magCat = sdssSearchResult[:, 2]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 3]
                logger.debug(emagCat)
            if filterCode == 'gp':
                magCat = sdssSearchResult[:, 4]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 5]
                logger.debug(emagCat)
            if filterCode == 'rp':
                magCat = sdssSearchResult[:, 6]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 7]
                logger.debug(emagCat)
            if filterCode == 'ip':
                magCat = sdssSearchResult[:, 8]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 9]
                logger.debug(emagCat)
            if filterCode == 'zs':
                magCat = sdssSearchResult[:, 10]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 11]
                logger.debug(emagCat)

        elif panStarrsInstead and filterCode != 'up':
            logger.debug("Panstarrs!")

            sdssResult = Vizier.query_region(avgCoord,
                                             '0.33 deg',
                                             catalog='PanStarrs')['II/349/ps1']
            logger.debug(sdssResult)
            logger.debug(sdssResult.keys())

            searchResult = sdssResult.to_pandas()

            logger.debug(searchResult)

            sdssSearchResult = searchResult[[
                'RAJ2000', 'DEJ2000', 'gmag', 'e_gmag', 'gmag', 'e_gmag',
                'rmag', 'e_rmag', 'imag', 'e_imag', 'zmag', 'e_zmag'
            ]].as_matrix()

            logger.debug(sdssSearchResult[:, 0])

            raCat = sdssSearchResult[:, 0]
            logger.debug(raCat)
            decCat = sdssSearchResult[:, 1]
            logger.debug(decCat)
            if filterCode == 'up':
                magCat = sdssSearchResult[:, 2]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 3]
                logger.debug(emagCat)
            if filterCode == 'gp':
                magCat = sdssSearchResult[:, 4]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 5]
                logger.debug(emagCat)
            if filterCode == 'rp':
                magCat = sdssSearchResult[:, 6]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 7]
                logger.debug(emagCat)
            if filterCode == 'ip':
                magCat = sdssSearchResult[:, 8]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 9]
                logger.debug(emagCat)
            if filterCode == 'zs':
                magCat = sdssSearchResult[:, 10]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 11]
                logger.debug(emagCat)

        else:
            logger.debug("goodo lets do the sdss stuff then.")
            sdssResult = Vizier.query_region(avgCoord,
                                             '0.33 deg',
                                             catalog='SDSS')['V/147/sdss12']
            logger.debug(sdssResult)
            logger.debug(sdssResult.keys())

            searchResult = sdssResult.to_pandas()

            logger.debug(searchResult)

            sdssSearchResult = searchResult[[
                'RA_ICRS', 'DE_ICRS', 'umag', 'e_umag', 'gmag', 'e_gmag',
                'rmag', 'e_rmag', 'imag', 'e_imag', 'zmag', 'e_zmag'
            ]].as_matrix()

            logger.debug(sdssSearchResult[:, 0])

            raCat = sdssSearchResult[:, 0]
            logger.debug(raCat)
            decCat = sdssSearchResult[:, 1]
            logger.debug(decCat)
            if filterCode == 'up':
                magCat = sdssSearchResult[:, 2]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 3]
                logger.debug(emagCat)
            if filterCode == 'gp':
                magCat = sdssSearchResult[:, 4]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 5]
                logger.debug(emagCat)
            if filterCode == 'rp':
                magCat = sdssSearchResult[:, 6]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 7]
                logger.debug(emagCat)
            if filterCode == 'ip':
                magCat = sdssSearchResult[:, 8]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 9]
                logger.debug(emagCat)
            if filterCode == 'zs':
                magCat = sdssSearchResult[:, 10]
                logger.debug(magCat)
                emagCat = sdssSearchResult[:, 11]
                logger.debug(emagCat)

    #Setup standard catalogue coordinates
    catCoords = SkyCoord(ra=raCat * u.degree, dec=decCat * u.degree)

    #Get calib mags for least variable IDENTIFIED stars.... not the actual stars in compUsed!! Brighter, less variable stars may be too bright for calibration!
    #So the stars that will be used to calibrate the frames to get the OTHER stars.
    calibStands = []
    if compFile.shape[0] == 13:
        lenloop = 1
    else:
        lenloop = len(compFile[:, 0])
    for q in range(lenloop):
        if compFile.shape[0] == 13:
            compCoord = SkyCoord(ra=compFile[0] * u.degree,
                                 dec=compFile[1] * u.degree)
        else:
            compCoord = SkyCoord(ra=compFile[q][0] * u.degree,
                                 dec=compFile[q][1] * u.degree)
        idxcomp, d2dcomp, d3dcomp = compCoord.match_to_catalog_sky(catCoords)

        if d2dcomp * u.arcsecond < max_sep * u.arcsecond:
            if not np.isnan(magCat[idxcomp]):
                #logger.debug(idxcomp)
                #logger.debug(d2dcomp)
                #logger.debug(magCat[idxcomp])
                #logger.debug(emagCat[idxcomp])

                if compFile.shape[0] == 13:
                    calibStands.append([
                        compFile[0], compFile[1], compFile[2], magCat[idxcomp],
                        emagCat[idxcomp]
                    ])
                else:
                    calibStands.append([
                        compFile[q][0], compFile[q][1], compFile[q][2],
                        magCat[idxcomp], emagCat[idxcomp]
                    ])

    # Get the set of least variable stars to use as a comparison to calibrate the files (to eventually get the *ACTUAL* standards
    #logger.debug(np.asarray(calibStands).shape[0])
    if np.asarray(calibStands).shape[0] == 0:
        logger.info(
            "We could not find a suitable match between any of your stars and the calibration catalogue"
        )
        logger.info(
            "You might need to reduce the low value (usually 10000) to get some dimmer stars in script 1"
        )
        raise AstrosourceException(
            "Perhaps try 5000 then 1000. You are trying to find dim stars to calibrate to."
        )

    varimin = (np.min(np.asarray(calibStands)[:, 2])) * variabilityMultiplier

    logger.debug("varimin")
    #logger.debug(np.asarray(calibStands)[:,2])
    logger.debug(varimin)

    calibStandsReject = []
    for q in range(len(np.asarray(calibStands)[:, 0])):
        if calibStands[q][2] > varimin:
            calibStandsReject.append(q)
            #logger.debug(calibStands[q][2])

    calibStands = np.delete(calibStands, calibStandsReject, axis=0)

    calibStand = np.asarray(calibStands)

    np.savetxt(parentPath / "calibStands.csv",
               calibStands,
               delimiter=",",
               fmt='%0.8f')
    # Lets use this set to calibrate each datafile and pull out the calibrated compsused magnitudes
    compUsedFile = np.genfromtxt(parentPath / 'compsUsed.csv',
                                 dtype=float,
                                 delimiter=',')

    calibCompUsed = []

    logger.debug("CALIBRATING EACH FILE")
    for file in fileList:

        logger.debug(file)

        #Get the phot file into memory
        photFile = np.genfromtxt(parentPath / file, dtype=float, delimiter=',')
        photCoords = SkyCoord(ra=photFile[:, 0] * u.degree,
                              dec=photFile[:, 1] * u.degree)

        #Convert the phot file into instrumental magnitudes
        for r in range(len(photFile[:, 0])):
            photFile[r, 5] = 1.0857 * (photFile[r, 5] / photFile[r, 4])
            photFile[r, 4] = -2.5 * np.log10(photFile[r, 4])

        #Pull out the CalibStands out of each file
        tempDiff = []
        for q in range(len(calibStands[:, 0])):
            calibCoord = SkyCoord(ra=calibStand[q][0] * u.degree,
                                  dec=calibStand[q][1] * u.degree)
            idx, d2d, d3d = calibCoord.match_to_catalog_sky(photCoords)
            tempDiff.append(calibStand[q, 3] - photFile[idx, 4])

        #logger.debug(tempDiff)
        tempZP = (np.median(tempDiff))
        #logger.debug(np.std(tempDiff))

        #Shift the magnitudes in the phot file by the zeropoint
        for r in range(len(photFile[:, 0])):
            photFile[r, 4] = photFile[r, 4] + tempZP

        file = Path(file)
        #Save the calibrated photfiles to the calib directory
        np.savetxt(calibPath /
                   "{}.calibrated.{}".format(file.stem, file.suffix),
                   photFile,
                   delimiter=",",
                   fmt='%0.8f')

        #Look within photfile for ACTUAL usedcomps.csv and pull them out
        lineCompUsed = []
        if compUsedFile.shape[0] == 3 and compUsedFile.size == 3:
            lenloop = 1
        else:
            lenloop = len(compUsedFile[:, 0])

        #logger.debug(compUsedFile.size)
        for r in range(lenloop):
            if compUsedFile.shape[0] == 3 and compUsedFile.size == 3:
                compUsedCoord = SkyCoord(ra=compUsedFile[0] * u.degree,
                                         dec=compUsedFile[1] * u.degree)
            else:
                compUsedCoord = SkyCoord(ra=compUsedFile[r][0] * u.degree,
                                         dec=compUsedFile[r][1] * u.degree)
            idx, d2d, d3d = compUsedCoord.match_to_catalog_sky(photCoords)
            lineCompUsed.append(photFile[idx, 4])

        #logger.debug(lineCompUsed)
        calibCompUsed.append(lineCompUsed)

    # Finalise calibcompsusedfile
    #logger.debug(calibCompUsed)

    calibCompUsed = np.asarray(calibCompUsed)
    #logger.debug(calibCompUsed[0,:])

    finalCompUsedFile = []
    sumStd = []
    for r in range(len(calibCompUsed[0, :])):
        #Calculate magnitude and stdev
        #logger.debug(calibCompUsed[:,r])
        #logger.debug(np.median(calibCompUsed[:,r]))
        #logger.debug(np.std(calibCompUsed[:,r]))
        #sumStd=sumStd+np.std(calibCompUsed[:,r])
        sumStd.append(np.std(calibCompUsed[:, r]))
        #logger.debug(calibCompUsed[:,r])
        #logger.debug(np.std(calibCompUsed[:,r]))
        if compUsedFile.shape[0] == 3 and compUsedFile.size == 3:
            finalCompUsedFile.append([
                compUsedFile[0], compUsedFile[1], compUsedFile[2],
                np.median(calibCompUsed[:, r]),
                np.asarray(calibStands[0])[4]
            ])
        else:
            finalCompUsedFile.append([
                compUsedFile[r][0], compUsedFile[r][1], compUsedFile[r][2],
                np.median(calibCompUsed[:, r]),
                np.std(calibCompUsed[:, r])
            ])

    #logger.debug(finalCompUsedFile)
    logger.debug(" ")
    sumStd = np.asarray(sumStd)

    errCalib = np.median(sumStd) / pow((len(calibCompUsed[0, :])), 0.5)

    #logger.debug(len(calibCompUsed[0,:]))
    if len(calibCompUsed[0, :]) == 1:
        logger.debug(
            "As you only have one comparison, the uncertainty in the calibration is unclear"
        )
        logger.debug(
            "But we can take the catalogue value, although we should say this is a lower uncertainty"
        )
        logger.debug("Error/Uncertainty in Calibration: " +
                     str(np.asarray(calibStands[0])[4]))
    else:
        logger.debug("Median Standard Deviation of any one star: " +
                     str(np.median(sumStd)))
        logger.debug("Standard Error/Uncertainty in Calibration: " +
                     str(errCalib))

    with open(parentPath / "calibrationErrors.txt", "w") as f:
        f.write("Median Standard Deviation of any one star: " +
                str(np.median(sumStd)) + "\n")
        f.write("Standard Error/Uncertainty in Calibration: " + str(errCalib))

    #logger.debug(finalCompUsedFile)
    compFile = np.asarray(finalCompUsedFile)
    np.savetxt(parentPath / "calibCompsUsed.csv",
               compFile,
               delimiter=",",
               fmt='%0.8f')
    return compFile
示例#15
0
def remove_targets(parentPath, compFile, acceptDistance):
    max_sep = acceptDistance * u.arcsec
    logger.info("Removing Target Stars from potential Comparisons")
    targetFile = np.genfromtxt(parentPath / 'targetstars.csv',
                               dtype=float,
                               delimiter=',')
    fileRaDec = SkyCoord(ra=compFile[:, 0] * u.degree,
                         dec=compFile[:, 1] * u.degree)
    # Remove any nan rows from targetFile
    targetRejecter = []
    if not (targetFile.shape[0] == 4 and targetFile.size == 4):
        for z in range(targetFile.shape[0]):
            if np.isnan(targetFile[z][0]):
                targetRejecter.append(z)
        targetFile = np.delete(targetFile, targetRejecter, axis=0)

    # Remove targets from consideration
    if len(targetFile) == 4:
        loopLength = 1
    else:
        loopLength = targetFile.shape[0]
    targetRejects = []
    tg_file_len = len(targetFile)
    for tf in targetFile:
        if tg_file_len == 4:
            varCoord = SkyCoord(targetFile[0], (targetFile[1]),
                                frame='icrs',
                                unit=u.deg)
        else:
            varCoord = SkyCoord(
                tf[0], (tf[1]), frame='icrs',
                unit=u.deg)  # Need to remove target stars from consideration
        idx, d2d, _ = varCoord.match_to_catalog_sky(fileRaDec)
        if d2d.arcsecond < acceptDistance:
            targetRejects.append(idx)
        if tg_file_len == 4:
            break
    compFile = np.delete(compFile, idx, axis=0)

    # Get Average RA and Dec from file
    if compFile.shape[0] == 13:
        logger.debug(compFile[0])
        logger.debug(compFile[1])
        avgCoord = SkyCoord(ra=(compFile[0]) * u.degree,
                            dec=(compFile[1] * u.degree))

    else:
        logger.debug(np.average(compFile[:, 0]))
        logger.debug(np.average(compFile[:, 1]))
        avgCoord = SkyCoord(ra=(np.average(compFile[:, 0])) * u.degree,
                            dec=(np.average(compFile[:, 1])) * u.degree)

    # Check VSX for any known variable stars and remove them from the list
    variableResult = Vizier.query_region(avgCoord, '0.33 deg',
                                         catalog='VSX')['B/vsx/vsx']

    logger.debug(variableResult)

    variableResult = variableResult.to_pandas()

    logger.debug(variableResult.keys())

    variableSearchResult = variableResult[['RAJ2000', 'DEJ2000']].to_numpy()

    raCat = variableSearchResult[:, 0]
    logger.debug(raCat)
    decCat = variableSearchResult[:, 1]
    logger.debug(decCat)

    varStarReject = []
    for t in range(raCat.size):
        logger.debug(raCat[t])
        compCoord = SkyCoord(ra=raCat[t] * u.degree, dec=decCat[t] * u.degree)
        logger.debug(compCoord)
        catCoords = SkyCoord(ra=compFile[:, 0] * u.degree,
                             dec=compFile[:, 1] * u.degree)
        idxcomp, d2dcomp, d3dcomp = compCoord.match_to_catalog_sky(catCoords)
        logger.debug(d2dcomp)
        if d2dcomp * u.arcsecond < max_sep * u.arcsecond:
            logger.debug("match!")
            varStarReject.append(t)
        else:
            logger.debug("no match!")

    logger.debug("Number of stars prior to VSX reject")
    logger.debug(compFile.shape[0])
    compFile = np.delete(compFile, varStarReject, axis=0)
    logger.debug("Number of stars post to VSX reject")
    logger.debug(compFile.shape[0])

    if (compFile.shape[0] == 1):
        compFile = [[compFile[0][0], compFile[0][1], 0.01]]
        compFile = np.asarray(compFile)
        np.savetxt(parentPath / "compsUsed.csv",
                   compFile,
                   delimiter=",",
                   fmt='%0.8f')
        sortStars = [[
            compFile[0][0], compFile[0][1], 0.01, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
            0.0, 0.0, 0.0, 0.0
        ]]
        sortStars = np.asarray(sortStars)
        np.savetxt("stdComps.csv", sortStars, delimiter=",", fmt='%0.8f')
        raise AstrosourceException(
            "Looks like you have a single comparison star!")
    return compFile
示例#16
0
def photometric_calculations(targets, paths, acceptDistance=5.0, errorReject=0.5, filesave=True):
    fileCount=[]
    photometrydata = []
    sys.stdout.write('🖥 Starting photometric calculations\n')

    photFileArray,fileList = photometry_files_to_array(paths['parent'])

    if (paths['parent'] / 'calibCompsUsed.csv').exists():
        logger.debug("Calibrated")
        compFile=genfromtxt(paths['parent'] / 'calibCompsUsed.csv', dtype=float, delimiter=',')
        calibFlag=1
    else:
        logger.debug("Differential")
        compFile=genfromtxt(paths['parent'] / 'compsUsed.csv', dtype=float, delimiter=',')
        calibFlag=0

    # Get total counts for each file
    if compFile.shape[0]== 5 and compFile.size ==5:
        loopLength=1
    else:
        loopLength=compFile.shape[0]
    allCountsArray = get_total_counts(photFileArray, compFile, loopLength)

    allcountscount=0

    if len(targets)== 4 and targets.size == 4:
        loopLength=1
    else:
        loopLength=targets.shape[0]
    # For each variable calculate all the things
    for q in range(loopLength):
        starErrorRejCount=0
        starDistanceRejCount=0
        logger.debug("****************************")
        logger.debug("Processing Variable {}".format(q+1))
        if int(len(targets)) == 4 and targets.size==4:
            logger.debug("RA {}".format(targets[0]))
        else:
            logger.debug("RA {}".format(targets[q][0]))
        if int(len(targets)) == 4 and targets.size==4:
            logger.debug("Dec {}".format(targets[1]))
        else:
            logger.debug("Dec {}".format(targets[q][1]))
        if int(len(targets)) == 4 and targets.size==4:
            varCoord = SkyCoord(targets[0],(targets[1]), frame='icrs', unit=degree) # Need to remove target stars from consideration
        else:
            varCoord = SkyCoord(targets[q][0],(targets[q][1]), frame='icrs', unit=degree) # Need to remove target stars from consideration

        # Grabbing variable rows
        logger.debug("Extracting and Measuring Differential Magnitude in each Photometry File")
        outputPhot=[] # new
        compArray=[]
        compList=[]
        allcountscount=0
        for imgs, photFile in enumerate(photFileArray):
            sys.stdout.write('.')
            compList=[]
            fileRaDec = SkyCoord(ra=photFile[:,0]*degree, dec=photFile[:,1]*degree)
            idx, d2d, _ = varCoord.match_to_catalog_sky(fileRaDec)
            starRejected=0
            if (less(d2d.arcsecond, acceptDistance)):
                magErrVar = 1.0857 * (photFile[idx][5]/photFile[idx][4])
                if magErrVar < errorReject:

                    magErrEns = 1.0857 * (allCountsArray[allcountscount][1]/allCountsArray[allcountscount][0])
                    magErrTotal = pow( pow(magErrVar,2) + pow(magErrEns,2),0.5)

                    #templist is a temporary holder of the resulting file.
                    tempList=photFile[idx,0:6]
                    # logger.debug(f"{tempList}")
                    googFile = Path(fileList[imgs]).name
                    tempList = append(tempList, float(googFile.split("_")[2].replace("d",".")))
                    tempList = append(tempList, float(googFile.split("_")[4].replace("a",".")))
                    tempList = append(tempList, allCountsArray[allcountscount][0])
                    tempList = append(tempList, allCountsArray[allcountscount][1])

                    #Differential Magnitude
                    tempList = append(tempList, 2.5 * log10(allCountsArray[allcountscount][0]/photFile[idx][4]))
                    tempList = append(tempList, magErrTotal)
                    tempList = append(tempList, photFile[idx][4])
                    tempList = append(tempList, photFile[idx][5])

                    if (compFile.shape[0]== 5 and compFile.size ==5) or (compFile.shape[0]== 3 and compFile.size ==3):
                        loopLength=1
                    else:
                        loopLength=compFile.shape[0]
                    for j in range(loopLength):
                        if compFile.size == 2 or (compFile.shape[0]== 3 and compFile.size ==3) or (compFile.shape[0]== 5 and compFile.size ==5):
                            matchCoord=SkyCoord(ra=compFile[0]*degree, dec=compFile[1]*degree)
                        else:
                            matchCoord=SkyCoord(ra=compFile[j][0]*degree, dec=compFile[j][1]*degree)
                        idx, d2d, d3d = matchCoord.match_to_catalog_sky(fileRaDec)
                        tempList=append(tempList, photFileArray[imgs][idx][4])
                    # logger.debug(f"{tempList}")
                    outputPhot.append(tempList)

                    fileCount.append(allCountsArray[allcountscount][0])
                    allcountscount=allcountscount+1

                else:
                    starErrorRejCount=starErrorRejCount+1
                    starRejected=1
            else:
                starDistanceRejCount=starDistanceRejCount+1
                starRejected=1
            if ( starRejected == 1):

                    #templist is a temporary holder of the resulting file.
                    tempList=photFileArray[imgs][idx,:]
                    googFile = Path(fileList[imgs]).name
                    tempList=append(tempList, float(googFile.split("_")[2].replace("d",".")))
                    tempList=append(tempList, float(googFile.split("_")[4].replace("a",".")))
                    tempList=append(tempList, allCountsArray[allcountscount][0])
                    tempList=append(tempList, allCountsArray[allcountscount][1])

                    #Differential Magnitude
                    tempList=append(tempList,nan)
                    tempList=append(tempList,nan)
                    tempList=append(tempList, photFileArray[imgs][idx][4])
                    tempList=append(tempList, photFileArray[imgs][idx][5])

                    if (compFile.shape[0]== 5 and compFile.size ==5) or (compFile.shape[0]== 3 and compFile.size ==3):
                        loopLength=1
                    else:
                        loopLength=compFile.shape[0]

                    for j in range(loopLength):
                        if compFile.size == 2 or (compFile.shape[0]== 3 and compFile.size ==3) or (compFile.shape[0]== 5 and compFile.size ==5):
                            matchCoord=SkyCoord(ra=compFile[0]*degree, dec=compFile[1]*degree)
                        else:
                            matchCoord=SkyCoord(ra=compFile[j][0]*degree, dec=compFile[j][1]*degree)
                        idx, d2d, d3d = matchCoord.match_to_catalog_sky(fileRaDec)
                        tempList=append(tempList, photFileArray[imgs][idx][4])
                    outputPhot.append(tempList)
                    fileCount.append(allCountsArray[allcountscount][0])
                    allcountscount=allcountscount+1

        # Check for dud images
        imageReject=[]
        for j in range(asarray(outputPhot).shape[0]):
            if isnan(outputPhot[j][11]):
                imageReject.append(j)
        outputPhot=delete(outputPhot, imageReject, axis=0)
        try:
            outputPhot=np.vstack(asarray(outputPhot))
        except ValueError:
            raise AstrosourceException("No target stars were detected in your dataset. Check your input target(s) RA/Dec")

        ## REMOVE MAJOR OUTLIERS FROM CONSIDERATION
        stdVar=nanstd((outputPhot)[:,10])
        avgVar=nanmean((outputPhot)[:,10])
        starReject=[]
        stdevReject=0
        for j in range(asarray(outputPhot).shape[0]):
            if outputPhot[j][10] > avgVar+(4*stdVar) or outputPhot[j][10] < avgVar-(4*stdVar) :
                starReject.append(j)
                stdevReject=stdevReject+1
        sys.stdout.write('\n')
        logger.info("Rejected Stdev Measurements: : {}".format(stdevReject))
        logger.info("Rejected Error Measurements: : {}".format(starErrorRejCount))
        logger.info("Rejected Distance Measurements: : {}".format(starDistanceRejCount))
        logger.info("Variability of Comparisons")
        logger.info("Average : {}".format(avgVar))
        logger.info("Stdev   : {}".format(stdVar))

        outputPhot=delete(outputPhot, starReject, axis=0)

        # Add calibration columns
        outputPhot= np.c_[outputPhot, np.ones(outputPhot.shape[0]),np.ones(outputPhot.shape[0])]

        if outputPhot.shape[0] > 2:
            savetxt(paths['outcatPath'] / f"doerPhot_V{str(q+1)}.csv", outputPhot, delimiter=",", fmt='%0.8f')
            logger.debug('Saved doerPhot_V')
        else:
            raise AstrosourceException("Photometry not possible")
        logger.debug(array(outputPhot).shape)

        photometrydata.append(outputPhot)
    # photometrydata = trim_catalogue(photometrydata)
    return photometrydata
示例#17
0
def find_variable_stars(targets, acceptDistance=1.0, errorReject=0.05, parentPath=None):
    '''
    Find stable comparison stars for the target photometry and remove variables

    Parameters
    ----------
    targetStars : list
            List of target tuples in the formal (ra, dec, 0, 0). ra and dec must be in decimal
    acceptDistance : float
        acceptible distance between stars in different images
    errorReject : float
        reject measurements with instrumental errors larger than this (this is not total error, just the estimated error in the single measurement of the variable)
    acceptDistance : float
        Furthest distance in arcseconds for matches

    Returns
    -------
    outfile : str
    '''
    minimumVariableCounts = 10000  # Do not try to detect variables dimmer than this.
    minimumNoOfObs = 10 # Minimum number of observations to count as a potential variable.

    # Load in list of used files
    fileList = []
    with open(parentPath / "usedImages.txt", "r") as f:
        for line in f:
            fileList.append(line.strip())

    # LOAD Phot FILES INTO LIST
    photFileArray = []
    for file in fileList:
        photFileArray.append(load(parentPath / file))

    if not photFileArray:
        raise AstrosourceException("No input files")

    # LOAD IN COMPARISON FILE
    preFile = genfromtxt(parentPath / 'stdComps.csv', dtype=float, delimiter=',')

    if preFile.shape[0] != 13:
        preFile=(preFile[preFile[:, 2].argsort()])

    # GET REFERENCE IMAGE
    # Sort through and find the largest file and use that as the reference file
    fileSizer = 0
    logger.debug("Finding image with most stars detected")
    for photFile in photFileArray:
        if photFile.size > fileSizer:
            referenceFrame = photFile
            fileSizer = photFile.size

    compFile = genfromtxt(parentPath / "compsUsed.csv", dtype=float, delimiter=',')
    logger.debug("Stable Comparison Candidates below variability threshold")
    outputPhot = []

    # Get total counts for each file
    allCountsArray = get_total_counts(photFileArray, compFile, loopLength=compFile.shape[0])

    # Define targetlist as every star in referenceImage above a count threshold
    logger.debug("Setting up Variable Search List")
    targetFile = referenceFrame
    # Although remove stars that are below the variable countrate
    starReject=[]
    for q in range(targetFile.shape[0]):
        if targetFile[q][4] < minimumVariableCounts:
            starReject.append(q)
    logger.debug("Total number of stars in reference Frame: {}".format(targetFile.shape[0]))
    targetFile = delete(targetFile, starReject, axis=0)
    logger.debug("Total number of stars with sufficient counts: {}".format(targetFile.shape[0]))

    ## NEED TO REMOVE COMPARISON STARS FROM TARGETLIST

    allcountscount=0
    # For each variable calculate the variability
    outputVariableHolder=[]
    q=0
    for target in targetFile:
        q=q+1
        logger.debug("*********************")
        logger.debug("Processing Target {}".format(str(q)))
        logger.debug("RA {}".format(target[0]))
        logger.debug("DEC {}".format(target[1]))
        varCoord = SkyCoord(target[0],(target[1]), frame='icrs', unit=degree) # Need to remove target stars from consideration
        outputPhot=[]
        compArray=[]
        compList=[]

        diffMagHolder=[]

        allcountscount=0

        for photFile in photFileArray:
            compList=[]
            fileRaDec = SkyCoord(ra=photFile[:,0]*degree, dec=photFile[:,1]*degree)
            idx, d2d, d3d = varCoord.match_to_catalog_sky(fileRaDec)
            if (less(d2d.arcsecond, acceptDistance) and ((multiply(-2.5,log10(divide(photFile[idx][4],allCountsArray[allcountscount][0])))) != inf )):
                diffMagHolder=append(diffMagHolder,(multiply(-2.5,log10(divide(photFile[idx][4],allCountsArray[allcountscount][0])))))
            allcountscount=add(allcountscount,1)

        ## REMOVE MAJOR OUTLIERS FROM CONSIDERATION
        while True:
            stdVar=std(diffMagHolder)
            avgVar=average(diffMagHolder)
            starReject=[]
            z=0
            for j in range(asarray(diffMagHolder).shape[0]):
                if diffMagHolder[j] > avgVar+(4*stdVar) or diffMagHolder[j] < avgVar-(4*stdVar) :
                    starReject.append(j)
                    logger.debug("REJECT {}".format(diffMagHolder[j]))
                    z=1
            diffMagHolder=delete(diffMagHolder, starReject, axis=0)
            if z==0:
                break

        diffmag = asarray(diffMagHolder)
        logger.debug("Standard Deviation in mag: {}".format(std(diffmag)))
        logger.debug("Median Magnitude: {}".format(median(diffmag)))
        logger.debug("Number of Observations: {}".format(diffmag.shape[0]))

        if (diffmag.shape[0] > minimumNoOfObs):
            outputVariableHolder.append( [target[0],target[1],median(diffmag), std(diffmag), diffmag.shape[0]])

    plot_variability(outputVariableHolder, parentPath)

    savetxt(parentPath / "starVariability.csv", outputVariableHolder, delimiter=",", fmt='%0.8f')

    return outputVariableHolder
示例#18
0
def output_files(paths, photometrydata, mode='diff'):
    if mode == 'calib' and not (paths['parent'] /
                                'calibCompsUsed.csv').exists():
        raise AstrosourceException("No calibrated photometry available")

    for j, outputPhot in enumerate(photometrydata):
        r = j + 1
        logger.info("Outputting files Variable " + str(r))

        if mode == 'calib':
            calibIndex = asarray(outputPhot).shape[1] - 1
            magColumn = outputPhot[:, calibIndex - 1]
            magerrColumn = outputPhot[:, calibIndex]
        else:
            magColumn = outputPhot[:, 10]
            magerrColumn = outputPhot[:, 11]

        outputPeransoCalib = [
            x for x in zip(outputPhot[:, 6], magColumn, magerrColumn)
        ]

        savetxt(paths['outcatPath'] / f'V{r}_{mode}Peranso.txt',
                outputPeransoCalib,
                delimiter=" ",
                fmt='%0.8f')
        savetxt(paths['outcatPath'] / f'V{r}_{mode}Excel.csv',
                outputPeransoCalib,
                delimiter=",",
                fmt='%0.8f')

        #output for EXOTIC modelling
        outputEXOTICCalib = [
            x
            for x in zip(outputPhot[:,
                                    6], magColumn, magerrColumn, outputPhot[:,
                                                                            7])
        ]

        outputEXOTICCalib = asarray(outputEXOTICCalib)
        exoMedian = median(outputEXOTICCalib[:, 1])

        for q in range(outputEXOTICCalib.shape[0]):
            outputEXOTICCalib[q][1] = (1 - pow(10, (
                (outputEXOTICCalib[q][1] - exoMedian) / 2.5))) + 1
            outputEXOTICCalib[q][2] = (outputEXOTICCalib[q][2] /
                                       1.0857) * outputEXOTICCalib[q][1]

        outputEXOTICCalib = outputEXOTICCalib[outputEXOTICCalib[:,
                                                                0].argsort()]

        savetxt(paths['outcatPath'] / f'V{r}_{mode}EXOTIC.csv',
                outputEXOTICCalib,
                delimiter=",",
                fmt='%0.8f')

        # Output Differential astroImageJ file
        outputaijCalib = [
            x
            for x in zip(outputPhot[:, 6] - 2450000.0, magColumn, magerrColumn)
        ]

        savetxt(paths['outcatPath'] / f'V{r}_{mode}AIJ.txt',
                outputaijCalib,
                delimiter=" ",
                fmt='%0.8f')
        savetxt(paths['outcatPath'] / f'V{r}_{mode}AIJ.csv',
                outputaijCalib,
                delimiter=",",
                fmt='%0.8f')
    return