예제 #1
0
def get_destination(filename, target):
    if not filename:
        logging.error("No filename or cache key setted for filename %s" % filename)
        return None

    if not SessionHelper.is_file_uploaded(filename):
        if not DsCache.contains(filename):
            if not FileUtils.file_exist(target, filename):
                logging.error("Filename not uploaded or not found in cache for filename %s" % filename)
                return None

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        if not DsCache.contains(filename):
            logging.error("Invalid file or not found in cache filename %s" % filename)
            return None
        else:
            destination = filename # Filename represents only a joined dataset key, not a real file

    return destination
예제 #2
0
def get_destination(filename, target):
    if not filename:
        logging.error("No filename or cache key setted for filename %s" %
                      filename)
        return None

    if not SessionHelper.is_file_uploaded(filename):
        if not DsCache.contains(filename):
            logging.error(
                "Filename not uploaded or not found in cache for filename %s" %
                filename)
            return None

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        if not DsCache.contains(filename):
            logging.error("Invalid file or not found in cache filename %s" %
                          filename)
            return None
        else:
            destination = filename  # Filename represents only a joined dataset key, not a real file

    return destination
예제 #3
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug("get_file_dataset: returned cached dataset, cache_key: " + str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " + str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(destination, hdulist, dsId='FITS',
                                                       hduname=get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist),
                                                       column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING,
                                                       extra_colums=['PI', "PHA"], time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
                                                                column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING, hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(hdulist,gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " + CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " + CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) + " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug("get_file_dataset, dataset added to cache, cache_key: " + str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
예제 #4
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug(
                    "get_file_dataset: returned cached dataset, cache_key: " +
                    str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " +
                          str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING,
                                               hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(
                        destination,
                        hdulist,
                        dsId='FITS',
                        hduname=get_hdu_string_from_hdulist(
                            CONFIG.EVENTS_STRING, hdulist),
                        column=CONFIG.TIME_COLUMN,
                        gtistring=CONFIG.GTI_STRING,
                        extra_colums=['PI', "PHA"],
                        time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(
                        destination,
                        hdulist,
                        hduname='RATE',
                        column=CONFIG.TIME_COLUMN,
                        gtistring=CONFIG.GTI_STRING,
                        time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING,
                                                 hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(
                        hdulist,
                        gtistring=CONFIG.GTI_STRING,
                        time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " +
                                 CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " +
                                 CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file
                                               in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) +
                             " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug(
                    "get_file_dataset, dataset added to cache, cache_key: " +
                    str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
예제 #5
0
def get_file_dataset(destination):

    if not destination:
        return None

    if DsCache.contains(destination):
        logging.debug("Returned cached dataset")
        return DsCache.get(destination)

    filename = os.path.splitext(destination)[0]
    file_extension = magic.from_file(destination)
    logging.debug("File extension: %s" % file_extension)

    if file_extension.find("ASCII") == 0:

        table_id = "txt_table"
        header_names = ["Time", "Rate", "color1", "color2"]
        dataset = get_txt_dataset(destination, table_id, header_names)

        table = dataset.tables[table_id]
        table.add_columns(["Amplitude"])
        numValues = len(table.columns["Time"].values)
        random_values = np.random.uniform(-1, 1, size=numValues)
        table.columns["Amplitude"].values = random_values

        DsCache.add(destination, dataset)
        return dataset

    elif file_extension.find("FITS") == 0:

        # ds_id = "fits_table"
        # table_ids = ["Primary", "EVENTS", "GTI"]
        # dataset = get_fits_dataset(destination, ds_id, table_ids)
        # return dataset

        # Opening Fits
        hdulist = fits.open(destination)

        if 'EVENTS' in hdulist:
            # If EVENTS extension found, consider the Fits as EVENTS Fits
            dataset = get_events_fits_dataset_with_stingray(
                destination,
                hdulist,
                dsId='FITS',
                hduname='EVENTS',
                column='TIME',
                gtistring=gtistring)

        elif 'RATE' in hdulist:
            # If RATE extension found, consider the Fits as LIGHTCURVE Fits
            dataset = get_lightcurve_fits_dataset_with_stingray(
                destination,
                hdulist,
                hduname='RATE',
                column='TIME',
                gtistring=gtistring)

        else:
            # If not EVENTS or RATE extension found, consider the Fits as GTI Fits
            dataset = get_gti_fits_dataset_with_stingray(hdulist,
                                                         gtistring=gtistring)

        if dataset:
            DsCache.add(destination, dataset)

        return dataset

    else:
        return None