Esempio n. 1
0
def get_lightcurve_ds_from_events_ds(destination, axis, dt):

    try:

        if len(axis) != 2:
            logging.warn("Wrong number of axis")
            return ""

        dataset = DaveReader.get_file_dataset(destination)
        lc = get_lightcurve_from_dataset(dataset, axis, "", [], "", dt)

        if lc:
            #Changes lc format to stingray_addons format
            tmp_lc = {}
            tmp_lc['lc'] = lc.countrate
            tmp_lc['elc'] = []  # TODO: Get error from lightcurve
            tmp_lc['time'] = lc.time
            tmp_lc['GTI'] = lc.gti

            lc_dataset = DataSet.get_lightcurve_dataset_from_stingray_lcurve(tmp_lc, dataset.tables["EVENTS"].header, dataset.tables["EVENTS"].header_comments,
                                                                            "RATE", "TIME")
            dataset = None  # Dispose memory
            lc = None  # Dispose memory

            new_cache_key = DsCache.get_key(destination + "|ligthcurve")
            DsCache.add(new_cache_key, dataset)  # Adds new cached dataset for new key
            return new_cache_key

    except:
        logging.error(str(sys.exc_info()))

    return ""
Esempio n. 2
0
def append_file_to_dataset(destination, next_destination):
    dataset = DaveReader.get_file_dataset(destination)
    if DsHelper.is_events_dataset(dataset):
        next_dataset = DaveReader.get_file_dataset(next_destination)
        if DsHelper.is_events_dataset(next_dataset):
            # Looks what dataset is earliest
            ds_start_time = DsHelper.get_events_dataset_start(dataset)
            next_ds_start_time = DsHelper.get_events_dataset_start(next_dataset)

            if next_ds_start_time < ds_start_time:
                #Swap datasets
                tmp_ds = dataset
                dataset = next_dataset
                next_dataset = tmp_ds

            #Join and cache joined dataset
            dataset.tables["EVENTS"] = dataset.tables["EVENTS"].join(next_dataset.tables["EVENTS"])
            dataset.tables["GTI"] = DsHelper.join_gti_tables(dataset.tables["GTI"], next_dataset.tables["GTI"])

            DsCache.remove(destination)  # Removes previous cached dataset for prev key
            new_cache_key = DsCache.get_key(destination + "|" + next_destination)
            DsCache.add(new_cache_key, dataset)  # Adds new cached dataset for new key
            return new_cache_key

    return ""
Esempio n. 3
0
def get_divided_lightcurve_ds(lc0_destination, lc1_destination):

    try:

        lc0_ds = DaveReader.get_file_dataset(lc0_destination)
        if not DsHelper.is_lightcurve_dataset(lc0_ds):
            logging.warn("Wrong dataset type for lc0")
            return ""

        count_rate_0 = np.array(lc0_ds.tables["RATE"].columns["RATE"].values)

        lc1_ds = DaveReader.get_file_dataset(lc1_destination)
        if not DsHelper.is_lightcurve_dataset(lc1_ds):
            logging.warn("Wrong dataset type for lc1")
            return ""

        count_rate_1 = np.array(lc1_ds.tables["RATE"].columns["RATE"].values)

        if count_rate_0.shape == count_rate_1.shape:

            ret_lc_ds = lc0_ds.clone(True)

            with np.errstate(all='ignore'): # Ignore divisions by 0 and others
                count_rate = np.nan_to_num(count_rate_0 / count_rate_1)
            count_rate[count_rate > BIG_NUMBER]=0

            ret_lc_ds.tables["RATE"].columns["RATE"].clear()
            ret_lc_ds.tables["RATE"].columns["RATE"].add_values(count_rate) # TODO: Set error from lightcurve

            lc0_ds = None  # Dispose memory
            lc1_ds = None  # Dispose memory
            count_rate_1 = None  # Dispose memory
            count_rate_0 = None  # Dispose memory
            count_rate = None  # Dispose memory

            new_cache_key = DsCache.get_key(lc0_destination + "|" + lc1_destination + "|ligthcurve")
            DsCache.add(new_cache_key, ret_lc_ds)  # Adds new cached dataset for new key
            return new_cache_key

        else:
            logging.warn("Lightcurves have different shapes.")
            return None

    except:
        logging.error(str(sys.exc_info()))

    return ""
Esempio n. 4
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug(
                    "get_file_dataset: returned cached dataset, cache_key: " +
                    str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " +
                          str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING,
                                               hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(
                        destination,
                        hdulist,
                        dsId='FITS',
                        hduname=get_hdu_string_from_hdulist(
                            CONFIG.EVENTS_STRING, hdulist),
                        column=CONFIG.TIME_COLUMN,
                        gtistring=CONFIG.GTI_STRING,
                        extra_colums=['PI', "PHA"],
                        time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(
                        destination,
                        hdulist,
                        hduname='RATE',
                        column=CONFIG.TIME_COLUMN,
                        gtistring=CONFIG.GTI_STRING,
                        time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING,
                                                 hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(
                        hdulist,
                        gtistring=CONFIG.GTI_STRING,
                        time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " +
                                 CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " +
                                 CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file
                                               in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) +
                             " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug(
                    "get_file_dataset, dataset added to cache, cache_key: " +
                    str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
Esempio n. 5
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug("get_file_dataset: returned cached dataset, cache_key: " + str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " + str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(destination, hdulist, dsId='FITS',
                                                       hduname=get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist),
                                                       column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING,
                                                       extra_colums=['PI', "PHA"], time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
                                                                column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING, hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(hdulist,gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " + CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " + CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) + " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug("get_file_dataset, dataset added to cache, cache_key: " + str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
Esempio n. 6
0
def get_file_dataset(destination):

    if not destination:
        return None

    if DsCache.contains(destination):
        logging.debug("Returned cached dataset")
        return DsCache.get(destination)

    filename = os.path.splitext(destination)[0]
    file_extension = magic.from_file(destination)
    logging.debug("File extension: %s" % file_extension)

    if file_extension.find("ASCII") == 0:

        table_id = "txt_table"
        header_names = ["Time", "Rate", "color1", "color2"]
        dataset = get_txt_dataset(destination, table_id, header_names)

        table = dataset.tables[table_id]
        table.add_columns(["Amplitude"])
        numValues = len(table.columns["Time"].values)
        random_values = np.random.uniform(-1, 1, size=numValues)
        table.columns["Amplitude"].values = random_values

        DsCache.add(destination, dataset)
        return dataset

    elif file_extension.find("FITS") == 0:

        # ds_id = "fits_table"
        # table_ids = ["Primary", "EVENTS", "GTI"]
        # dataset = get_fits_dataset(destination, ds_id, table_ids)
        # return dataset

        # Opening Fits
        hdulist = fits.open(destination)

        if 'EVENTS' in hdulist:
            # If EVENTS extension found, consider the Fits as EVENTS Fits
            dataset = get_events_fits_dataset_with_stingray(
                destination,
                hdulist,
                dsId='FITS',
                hduname='EVENTS',
                column='TIME',
                gtistring=gtistring)

        elif 'RATE' in hdulist:
            # If RATE extension found, consider the Fits as LIGHTCURVE Fits
            dataset = get_lightcurve_fits_dataset_with_stingray(
                destination,
                hdulist,
                hduname='RATE',
                column='TIME',
                gtistring=gtistring)

        else:
            # If not EVENTS or RATE extension found, consider the Fits as GTI Fits
            dataset = get_gti_fits_dataset_with_stingray(hdulist,
                                                         gtistring=gtistring)

        if dataset:
            DsCache.add(destination, dataset)

        return dataset

    else:
        return None