Exemple #1
0
def imageToDict(filepath):
    """
    Function to get the output data from an image
    """

    header, data = readFits.read(filepath)

    reverseHistogram = (1, 0)["Avg" in filepath]
    image = DamicImage.DamicImage(data[:, :, -1], reverse=reverseHistogram)

    dictImage = {}

    ### Define name, skips, size

    dictImage['imgName'] = os.path.basename(filepath)
    dictImage['NDCMS'] = header['NDCMS']
    dictImage['NAXIS1'] = header['NAXIS1']
    dictImage['NAXIS2'] = header['NAXIS2']

    ### Define output variables

    #find image noise
    dictImage['imgNoise'] = pd.computeImageNoise(data[:, :, :-1])

    #find individual peak noise
    nSmoothing = 4 if int(
        header['NDCMS']
    ) > 1 else 12  # need less agressive moving average on skipper images
    skImageNoise, skImageNoiseErr = pd.computeSkImageNoise(
        image, nMovingAverage=nSmoothing)
    dictImage['skNoise'] = float(skImageNoise)
    dictImage['skNoiseError'] = float(skImageNoiseErr)

    #find entropy of the average image
    dictImage['aveImgS'] = pd.imageEntropy(data[:, :, -1])

    #find rate of entropy change as a function of skips
    entropySlope, entropySlopeErr, _ = pd.imageEntropySlope(data[:, :, :-1])
    dictImage['dSdskip'] = entropySlope
    dictImage['dSdskipError'] = entropySlopeErr

    #find dark current

    darkCurrent, darkCurrentErr = pd.computeDarkCurrent(
        image, nMovingAverage=nSmoothing)
    dictImage['DC'] = float(darkCurrent)
    dictImage['DCError'] = float(darkCurrentErr)

    # Compute pixel noise metrics
    ntrials = 10000
    singlePixelVariance, _ = ps.singlePixelVariance(data[:, :, :-1],
                                                    ntrials=ntrials)
    imageNoiseVariance, _ = ps.imageNoiseVariance(data[:, :, :-1],
                                                  header['NDCMS'] -
                                                  c.SKIPPER_OFFSET,
                                                  ntrials=ntrials)

    #find variance of random pixels
    dictImage['pixVar'] = singlePixelVariance

    #find variance of random clusters of pixels
    dictImage['clustVar'] = imageNoiseVariance

    #find tail ratio
    dictImage['tailRatio'] = pd.computeImageTailRatio(image)

    ### Define input variables

    headervars = [
        'EXP', 'AMPL', 'HCKDIRN', 'VCKDIRN', 'ITGTIME', 'VIDGAIN', 'PRETIME',
        'POSTIME', 'DGWIDTH', 'RGWIDTH', 'OGWIDTH', 'SWWIDTH', 'HWIDTH',
        'HOWIDTH', 'VWIDTH', 'VOWIDTH', 'ONEVCKHI', 'ONEVCKLO', 'TWOVCKHI',
        'TWOVCKLO', 'TGHI', 'TGLO', 'HUHI', 'HULO', 'HLHI', 'HLLO', 'RGHI',
        'RGLO', 'SWLO', 'DGHI', 'DGLO', 'OGHI', 'OGLO', 'BATTR', 'VDD1',
        'VDD2', 'DRAIN1', 'DRAIN2', 'VREF1', 'VREF2', 'OPG1', 'OPG2'
    ]

    for var in headervars:
        try:
            dictImage[var] = header[var]
        except:
            dictImage[var] = -1

    return dictImage
Exemple #2
0
def paramsToList(params):
    """
        Converts lmfit.params to a list. Only for poiss + gauss function
    """

    par = [ params["sigma"].value, params["lamb"].value, params["offset"].value, params["ADU"].value, params["N"].value, params["npoisson"].value]
    return par


if __name__ == "__main__":

    filename = "../FS_Avg_Img_27.fits"
    # filename = "../Img_00.fits"

    header, data = readFits.read(filename)

    # Test datark current
    damicimage = DamicImage.DamicImage(data[:, :, -1], reverse=False, minRange=500)
    plt.hist(damicimage.centers, bins=damicimage.edges, weights=damicimage.hpix) # Plot histogram of data


    # Perform poisson gaus fit to data
    minres = computeGausPoissDist(damicimage, )
    params = minres.params
    print(lmfit.fit_report(minres))
    print(parseFitMinimum(minres))

    # Plot fit results
    par = paramsToList(params)
    x = np.linspace(damicimage.centers[0], damicimage.centers[-1], 2000)
Exemple #3
0
            self.reverseHistogram()

    def reverseHistogram(self):

        # Shifts the histogram axis (centers and edges) to be centered around zero and flips the values
        self.hpix = np.flip(self.hpix)
        # self.centers = (self.centers - self.med)
        # self.edges = (self.edges - self.med)


if __name__ == "__main__":

    # Test to see if reversing image works
    imgname = "../Img_11.fits"

    header, data = readFits.read(imgname)

    normalImage = DamicImage(data[:, :, -1], False, "normalImage test")
    reverseImage = DamicImage(data[:, :, -1], True, "forward test")

    normalImage.histogramImage(minRange=80)
    reverseImage.histogramImage(minRange=80)
    reverseImage.reverseHistogram()

    fig, axs = plt.subplots(1, 2, figsize=(14, 8))
    axs[0].hist(normalImage.centers, weights=normalImage.hpix, bins=normalImage.edges)
    axs[1].hist(
        reverseImage.centers, weights=reverseImage.hpix, bins=reverseImage.edges
    )
    plt.show()
Exemple #4
0
def processImage(filename, headerString):
    """
    Function to do enclose the image processing function. Returns an analysisOutput object 
    """

    # Read image
    header, data = readFits.read(filename)

    try:
        nskips = header["NDCMS"]
    except KeyError:
        nskips = 1

    # Create skipper
    reverseHistogram = (1, 0)["Avg" in filename]
    image = DamicImage.DamicImage(data[:, :, -1], reverse=reverseHistogram)

    processedImage = AnalysisOutput(filename,
                                    nskips=nskips,
                                    header=headerString)

    # Compute average image entropy
    processedImage.aveImgS = pd.imageEntropy(data[:, :, -1])

    # Compute Entropy slope
    entropySlope, entropySlopeErr, _ = pd.imageEntropySlope(data[:, :, :-1])
    processedImage.dSdskip = pd.convertValErrToString(
        (entropySlope, entropySlopeErr))

    # Try to perform fit of poisson + gauss
    minresult = PoissonGausFit.computeGausPoissDist(image)

    # If sucessful parse results, otherwise fall back on individual fits
    if minresult.success:
        poisGausFitOut = PoissonGausFit.parseFitMinimum(minresult)

        # Parse fit values
        processedImage.skNoise = pd.convertValErrToString(
            poisGausFitOut["sigma"])
        processedImage.darkCurrent = pd.convertValErrToString(
            poisGausFitOut["lambda"])
        processedImage.aduConversion = pd.convertValErrToString(
            poisGausFitOut["ADU"])

        # Compute dark current (which implicitly requires pois + gaus fit to converge)
        processedImage.tailRatio = pd.computeImageTailRatio(image)

    else:

        nSmoothing = (4 if nskips > 1000 else 8
                      )  # need less agressive moving average on skipper images
        skImageNoise, skImageNoiseErr = pd.computeSkImageNoise(
            image, nMovingAverage=nSmoothing)
        processedImage.skNoise = pd.convertValErrToString(
            (skImageNoise, skImageNoiseErr))

        # Compute Dark current
        darkCurrent, darkCurrentErr = pd.computeDarkCurrent(
            image, nMovingAverage=nSmoothing)
        processedImage.darkCurrent = pd.convertValErrToString(
            (darkCurrent, darkCurrentErr))

    # Compute Overall image noise (fit to entire image) and skipper noise
    processedImage.imgNoise = pd.computeImageNoise(data[:, :, :-1])

    return processedImage
Exemple #5
0
	# files = [ f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f)) ]

	# filepattern = re.compile(filename)
	# filesmatched = list(filter(filepattern.match, files))

	print(filename)


	if parameterScan == None:
		# datafile = os.path.join(directory, filesmatched[0])

		datafile = filename[0]


		# read data
		header, data = readFits.read(datafile)
		data = data[:,:3000,:]


		# plot overall spectrum
		fullImage = DamicImage.DamicImage(np.mean(data[:, :, skipOffset:-1], axis=-1), bw=imagebw, reverse=reverse)
		fig, ax, fit = plotPixelSpectrum(fullImage, reverse=reverse)


		# row dark current
		axrow = plotDarkCurrentRows(fullImage, plotall=True, mask=mask)

	else:
		legend = []
		darkcurrent = []
		paramscan = []
    rowIndex[:, 0] = np.random.uniform(0, nrows - pixelArrayMaxEdgeRow,
                                       ntrials).astype(int)
    colIndex[:, 0] = np.random.uniform(0, ncolumns - pixelArrayMaxEdgeCol,
                                       ntrials).astype(int)

    # Create pixel array to be as much of a square as possible
    for i in range(npixels):
        rowIndex[:, i] = rowIndex[:, 0] + i % pixelArrayMaxEdgeRow
        colIndex[:, i] = colIndex[:, 0] + i // pixelArrayMaxEdgeRow

    return rowIndex, colIndex


if __name__ == "__main__":

    header, data = readFits.read("../Img_20.fits")

    median, dist = singlePixelVariance(data[:, :, :-1], ntrials=100000)

    print("Individual Pixel Variance Median: %0.2f" % median)
    fig, ax = plt.subplots(1, 1, figsize=(12, 8))
    med = np.median(dist)
    mad = scipy.stats.median_absolute_deviation(dist)
    bins = np.linspace(med - 3 * mad, med + 5 * mad, 200)
    ax.hist(dist, bins=bins)
    ax.set_title("Single Pixel Noise", fontsize=18)
    ax.set_xlabel("Variance of Single Pixels", fontsize=16)
    # plt.show()

    pixMed, pixDist = imageNoiseVariance(data[:, :, :-1],
                                         header["NDCMS"] - c.SKIPPER_OFFSET,