예제 #1
0
 def default(self, obj):
     try:
         if isinstance(obj, int):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return int(obj)
         elif isinstance(obj, float):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return float(obj)
         if isinstance(obj, numpy.integer):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return int(obj)
         elif isinstance(obj, numpy.floating):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return float(obj)
         elif isinstance(obj, complex):
             return self.default(numpy.real(obj))
         elif isinstance(obj, numpy.ndarray):
             return obj.tolist()
         else:
             return super(NPEncoder, self).default(obj)
     except:
         logging.error(ExHelper.getException('NPEncoder'))
         return None
예제 #2
0
def saveImage(imageData, ra, dec, scale, fileName):

    try:
        # Initialize WCS information, http://docs.astropy.org/en/stable/wcs/
        wcs = WCS(naxis=2)

        # Use the center of the image as projection center
        wcs.wcs.crpix = [
            imageData.shape[1] / 2. + 0.5, imageData.shape[0] / 2. + 0.5
        ]

        # Set the coordinates of the image center
        wcs.wcs.crval = [ra, dec]  # Aladin RA goes from 0 to 360 (for J2000)

        # Set the pixel scale (in deg/pix)
        wcs.wcs.cdelt = [scale, scale]

        # Set the coordinate system
        wcs.wcs.ctype = ['RA---CAR',
                         'DEC--CAR']  # ['GLON-CAR', 'GLAT-CAR'] # ra, dec

        # And produce a FITS header
        header = wcs.to_header()

        #Avoid transparent color in Aladin HipsGen
        if consts.GEN_ALADIN_READY_FITS:
            imageData[imageData < 1] = 1

        # We can also just output one of the wavelengths
        fits.writeto(fileName, imageData, header=header, clobber=True)

        print('Saved: ' + fileName)

    except:
        print(ExHelper.getException('saveImage'))
예제 #3
0
 def default(self, obj):
     try:
         if isinstance(obj, int):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return int(obj)
         elif isinstance(obj, float):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return float(obj)
         if isinstance(obj, numpy.integer):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return int(obj)
         elif isinstance(obj, numpy.floating):
             if obj > CONFIG.BIG_NUMBER:
                 return CONFIG.BIG_NUMBER
             if obj < -CONFIG.BIG_NUMBER:
                 return -CONFIG.BIG_NUMBER
             return float(obj)
         elif isinstance(obj, complex):
             return self.default(numpy.real(obj))
         elif isinstance(obj, numpy.ndarray):
             return obj.tolist()
         else:
             return super(NPEncoder, self).default(obj)
     except:
         logging.error(ExHelper.getException('NPEncoder'))
         return None
예제 #4
0
def get(key):
    try:
        if contains(key):
            return cached_datasets[key]
    except:
        logging.error(ExHelper.getException('dataset_cache.get'))

    return None
예제 #5
0
def get(key):
    try:
        if contains(key):
            return cached_datasets[key]
    except:
        logging.error(ExHelper.getException('dataset_cache.get'))

    return None
예제 #6
0
def remove(key):
    try:
        if contains(key):
            del cached_datasets[key]
            return True
    except:
        logging.error(ExHelper.getException('dataset_cache.remove'))

    return False
예제 #7
0
def remove(key):
    try:
        if contains(key):
            del cached_datasets[key]
            return True
    except:
        logging.error(ExHelper.getException('dataset_cache.remove'))

    return False
예제 #8
0
def remove_with_prefix(key_prefix):
    try:
        remove_keys = []
        for key in cached_datasets.keys():
            if key.startswith(key_prefix):
                remove_keys.append(key)
        for key in remove_keys:
            remove(key)
    except:
        logging.error(ExHelper.getException('dataset_cache.remove_with_prefix'))
예제 #9
0
def remove_with_prefix(key_prefix):
    try:
        remove_keys = []
        for key in cached_datasets.keys():
            if key.startswith(key_prefix):
                remove_keys.append(key)
        for key in remove_keys:
            remove(key)
    except:
        logging.error(
            ExHelper.getException('dataset_cache.remove_with_prefix'))
예제 #10
0
파일: dave_bulk.py 프로젝트: swapsha96/dave
def get_intermediate_file(filepath, target):
    try:
        stingray_object = DaveReader.get_stingray_object(filepath)
        if stingray_object:
            filename = FileUtils.get_intermediate_filename(
                target, filepath, HEN_FILE_EXTENSION)
            if DaveReader.save_to_intermediate_file(stingray_object, filename):
                return filename
    except:
        logging.error(ExHelper.getException('get_intermediate_file'))

    return None
예제 #11
0
def get_key(value, strict=False):
    try:
        m = hashlib.md5()
        if strict:
            m.update(str(value).encode('utf-8'))
        else:
            m.update(str(value + str(randint(0,99999))).encode('utf-8'))
        ugly_key = str(m.digest())
        return "".join(e for e in ugly_key if e.isalnum())
    except:
        logging.error(ExHelper.getException('dataset_cache.remove_with_prefix'))

    return ""
예제 #12
0
def get_destination(target, filename):
    try:
        if CONFIG.IS_LOCAL_SERVER:
            if filename.startswith('/') and os.path.isfile(filename):
                # This is supposed to be an absolute path
                return filename
            else:
                # Relative path
                return "/".join([target, filename])
        else:
            return "/".join([target, secure_filename(filename)])
    except:
        logging.error(ExHelper.getException('get_destination'))
        return ""
예제 #13
0
def get_key(value, strict=False):
    try:
        m = hashlib.md5()
        if strict:
            m.update(str(value).encode('utf-8'))
        else:
            m.update(str(value + str(randint(0, 99999))).encode('utf-8'))
        ugly_key = str(m.digest())
        return "".join(e for e in ugly_key if e.isalnum())
    except:
        logging.error(
            ExHelper.getException('dataset_cache.remove_with_prefix'))

    return ""
예제 #14
0
def get_destination(target, filename):
    try:
        if CONFIG.IS_LOCAL_SERVER:
            if filename.startswith('/') and os.path.isfile(filename):
                # This is supposed to be an absolute path
                return filename
            else:
                # Relative path
                return "/".join([target, filename])
        else:
            return "/".join([target, secure_filename(filename)])
    except:
        logging.error(ExHelper.getException('get_destination'))
        return ""
예제 #15
0
def add(key, dataset):
    try:
        cached_datasets[key] = dataset
    except:
        logging.error(ExHelper.getException('dataset_cache.add'))
예제 #16
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug(
                    "get_file_dataset: returned cached dataset, cache_key: " +
                    str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " +
                          str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING,
                                               hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(
                        destination,
                        hdulist,
                        dsId='FITS',
                        hduname=get_hdu_string_from_hdulist(
                            CONFIG.EVENTS_STRING, hdulist),
                        column=CONFIG.TIME_COLUMN,
                        gtistring=CONFIG.GTI_STRING,
                        extra_colums=['PI', "PHA"],
                        time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(
                        destination,
                        hdulist,
                        hduname='RATE',
                        column=CONFIG.TIME_COLUMN,
                        gtistring=CONFIG.GTI_STRING,
                        time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING,
                                                 hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(
                        hdulist,
                        gtistring=CONFIG.GTI_STRING,
                        time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " +
                                 CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " +
                                 CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file
                                               in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) +
                             " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug(
                    "get_file_dataset, dataset added to cache, cache_key: " +
                    str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
예제 #17
0
def contains(key):
    try:
        return key in cached_datasets
    except:
        logging.error(ExHelper.getException('dataset_cache.contains'))
        return False
예제 #18
0
def add(key, dataset):
    try:
        cached_datasets[key] = dataset
    except:
        logging.error(ExHelper.getException('dataset_cache.add'))
예제 #19
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug("get_file_dataset: returned cached dataset, cache_key: " + str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " + str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(destination, hdulist, dsId='FITS',
                                                       hduname=get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist),
                                                       column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING,
                                                       extra_colums=['PI', "PHA"], time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
                                                                column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING, hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(hdulist,gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " + CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " + CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) + " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug("get_file_dataset, dataset added to cache, cache_key: " + str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
예제 #20
0
파일: dave_bulk.py 프로젝트: swapsha96/dave
def bulk_analisys(filenames, plot_configs, outdir):
    try:

        results = dict()
        results["outdir"] = outdir
        results["plot_configs"] = []

        # For each plot config do a bulk analisys
        for plot_config in plot_configs:

            plot_config_outdir = "/".join([outdir, plot_config["id"]])

            dt = plot_config["dt"]
            filters = FltHelper.get_filters_clean_color_filters(
                plot_config["filters"])
            filters = FltHelper.apply_bin_size_to_filters(filters, dt)

            if "class" in plot_config:
                if plot_config["class"] == "LcPlot":

                    args = ['--outdir', plot_config_outdir]
                    args.extend(['--bintime', str(dt)])

                    args = add_filter_to_args(args, filters,
                                              CONFIG.TIME_COLUMN,
                                              '--safe-interval')
                    args = add_filter_to_args(args, filters, "PI",
                                              '--pi-interval')
                    args = add_filter_to_args(args, filters, "E",
                                              '--e-interval')
                    #args = add_filter_to_args(args, filters, "PHA", '--pha-interval')
                    #args = add_filter_to_args(args, filters, "RATE", '--rate-interval')

                    args.extend(filenames)

                    logging.debug("Calling MPlcurve, args: " + str(args))

                    MPlcurve(args)

                    push_plotconfig_results(results["plot_configs"],
                                            plot_config["id"],
                                            plot_config_outdir)

                elif plot_config["class"] == "PDSPlot":

                    logging.error(
                        "PDSPlot not supported yet, still work in progress!!")
                    ''' TO MANY QUESTIONS OPENED FOR IMPLEMENT PDS ON BULK ANALYSIS
                    args = ['--outdir', plot_config_outdir]

                    args.extend(['--kind', "PDS"])
                    args.extend(['--bintime', str(dt)])

                    # Normalization: MaltPyn supports Leahy or rms, now DAVE sends leahy, frac, abs or none
                    args.extend(['--norm', str(plot_config["norm"])])

                    # Rebin: is this
                    args.extend(['--rebin', str(plot_config["norm"])])

                    args.extend(filenames)

                    MPfspec(args)

                    push_plotconfig_results(results["plot_configs"], plot_config["id"], plot_config_outdir)
                    '''

                else:
                    logging.error("PlotConfig.class not supported!!")
            else:
                logging.error("PlotConfig has no class key!!")

        return results

    except:
        logging.error(ExHelper.getException('bulk_analisys'))
        return None
예제 #21
0
def contains(key):
    try:
        return key in cached_datasets
    except:
        logging.error(ExHelper.getException('dataset_cache.contains'))
        return False