Exemple #1
0
def apply_background_to_lc(lc, bck_destination, filters, axis, gti_destination, dt):
    filtered_bck_ds = get_filtered_dataset(bck_destination, filters, gti_destination)
    if DsHelper.is_events_dataset(filtered_bck_ds):

        logging.debug("Create background lightcurve ....")
        bck_eventlist = DsHelper.get_eventlist_from_dataset(filtered_bck_ds, axis)
        if bck_eventlist and len(bck_eventlist.time) > 0:
            bck_lc = bck_eventlist.to_lc(dt)

            if lc.countrate.shape == bck_lc.countrate.shape:
                lc.countrate -= bck_lc.countrate
            else:
                logging.warn("Background counts differs from lc counts, omiting Bck data.")

            bck_lc = None

        else:
            logging.warn("Wrong lightcurve counts for background data...")

        bck_eventlist = None  # Dispose memory
        filtered_bck_ds = None

    else:
        logging.warn("Background dataset is None!, omiting Bck data.")

    return lc
def get_divided_lightcurve_ds(lc0_filename, lc1_filename, lc0_bck_filename, lc1_bck_filename, target):
    lc0_destination = get_destination(lc0_filename, target)
    if not lc0_destination:
        return common_error("Invalid file or cache key for lc0 data")

    lc1_destination = get_destination(lc1_filename, target)
    if not lc1_destination:
        return common_error("Invalid file or cache key for lc1 data")

    lc0_bck_destination = ""
    if lc0_bck_filename:
        lc0_bck_destination = get_destination(lc0_bck_filename, target)
        if not lc0_bck_destination:
            return common_error("Invalid file or cache key for lc0_bck data")

    lc1_bck_destination = ""
    if lc1_bck_filename:
        lc1_bck_destination = get_destination(lc1_bck_filename, target)
        if not lc1_bck_destination:
            return common_error("Invalid file or cache key for lc1_bck data")

    logging.debug("get_divided_lightcurve_ds lc0: %s" % lc0_filename)
    logging.debug("get_divided_lightcurve_ds lc1: %s" % lc1_filename)
    logging.debug("get_divided_lightcurve_ds lc0_bck: %s" % lc0_bck_filename)
    logging.debug("get_divided_lightcurve_ds lc1_bck: %s" % lc1_bck_filename)

    cache_key = DaveEngine.get_divided_lightcurve_ds(lc0_destination, lc1_destination,
                                                    lc0_bck_destination, lc1_bck_destination)

    logging.debug("get_divided_lightcurve_ds: Finish! cache_key ->  %s" % cache_key)

    return json.dumps(cache_key, cls=NPEncoder)
Exemple #3
0
def get_fits_dataset(hdulist, dsId, table_ids):
    dataset = DataSet.get_empty_dataset(dsId)

    for t in range(len(hdulist)):
        if isinstance(hdulist[t], fits.hdu.table.BinTableHDU):
            if hdulist[t].name in table_ids:
                table_id = hdulist[t].name

                header_names = hdulist[t].columns.names
                tbdata = hdulist[t].data
                dataset.add_table(table_id, header_names)

                header, header_comments = get_header(hdulist, table_id)
                dataset.tables[table_id].set_header_info(header, header_comments)

                for i in range(len(header_names)):
                    header_name = header_names[i]
                    dataset.tables[table_id].columns[header_name].add_values(np.nan_to_num(tbdata.field(i)))
            else:
                logging.warn("Ignored table data: %s" % hdulist[t].name)
        else:
            logging.warn("No valid data on: %s" % t)
            logging.warn("Type of Data: %s" % type(hdulist[t]))

    hdulist.close()

    logging.debug("Read fits file successfully: %s" % dsId)

    return dataset
Exemple #4
0
def get_colors_lightcurve(src_destination, bck_destination, gti_destination, filters, axis, dt):

    if len(axis) != 2:
        logging.warn("Wrong number of axis")
        return None

    try:
        filters = FltHelper.apply_bin_size_to_filters(filters, dt)

        count_column_name = "PI"
        color_keys = FltHelper.get_color_keys_from_filters(filters)
        filtered_datasets = split_dataset_with_color_filters(src_destination, filters, color_keys, count_column_name, gti_destination)
        color_axis = get_color_axis_for_ds()

        # Creates lightcurves array applying bck and gtis from each color
        logging.debug("Create color lightcurves ....")
        lightcurves = get_lightcurves_from_datasets_array(filtered_datasets, color_keys, count_column_name, color_axis, bck_destination, filters, gti_destination, dt)
        filtered_datasets = None  # Dispose memory

        # Preapares the result
        logging.debug("Result color lightcurves ....")
        if len(lightcurves) == 4:
            if lightcurves[0]:
                result = push_to_results_array([], lightcurves[0].time)
                result = push_divided_values_to_results_array(result, lightcurves[0].countrate, lightcurves[1].countrate)
                result = push_divided_values_to_results_array(result, lightcurves[2].countrate, lightcurves[3].countrate)
                return result

    except:
        logging.error(str(sys.exc_info()))

    return None
Exemple #5
0
def get_fits_dataset(destination, dsId, table_ids):
    hdulist = fits.open(destination)
    dataset = DataSet.get_empty_dataset(dsId)

    for t in range(len(hdulist)):

        if isinstance(hdulist[t], fits.hdu.table.BinTableHDU):
            table_id = table_ids[t]
            header_names = hdulist[t].columns.names
            tbdata = hdulist[t].data
            dataset.add_table(table_id, header_names)

            for i in range(len(header_names)):
                header_name = header_names[i]
                dataset.tables[table_id].columns[header_name].values.append(
                    tbdata.field(i))

        else:
            logging.warn("No valid data on: %s" % t)
            logging.warn("Type of Data: %s" % type(hdulist[t]))

    hdulist.close()

    logging.debug("Read fits file successfully: %s" % destination)

    return dataset
Exemple #6
0
def get_fits_dataset(hdulist, dsId, table_ids):
    dataset = DataSet.get_empty_dataset(dsId)

    for t in range(len(hdulist)):
        if isinstance(hdulist[t], fits.hdu.table.BinTableHDU):
            if hdulist[t].name in table_ids:
                table_id = hdulist[t].name

                header_names = hdulist[t].columns.names
                tbdata = hdulist[t].data
                dataset.add_table(table_id, header_names)

                header, header_comments = get_header(hdulist, table_id)
                dataset.tables[table_id].set_header_info(
                    header, header_comments)

                for i in range(len(header_names)):
                    header_name = header_names[i]
                    dataset.tables[table_id].columns[header_name].add_values(
                        np.nan_to_num(tbdata.field(i)))
            else:
                logging.warn("Ignored table data: %s" % hdulist[t].name)
        else:
            logging.warn("No valid data on: %s" % t)
            logging.warn("Type of Data: %s" % type(hdulist[t]))

    hdulist.close()

    logging.debug("Read fits file successfully: %s" % dsId)

    return dataset
Exemple #7
0
def get(key):

    if key in cached_datasets:
        dataset_hits[key] = dataset_hits[key] + 1
        if LOG_CACHE_HITS:
            logging.debug("DATASET CACHE: n(" + str(len(cached_datasets)) + ") HITS -> " + str(dataset_hits))
        return cached_datasets[key]

    return None
Exemple #8
0
def save_file(target, file):

    logging.debug("save_file: %s - %s" % (type(file), file))

    if not os.path.isdir(target):
        os.mkdir(target)

    destination = get_destination(target, file.filename)
    file.save(destination)

    return destination
Exemple #9
0
def save_file(target, file):

    logging.debug("save_file: %s - %s" % (type(file), file))

    if not os.path.isdir(target):
        os.mkdir(target)

    destination = get_destination(target, file.filename)
    file.save(destination)

    return destination
Exemple #10
0
def get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
                                            column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING, time_offset=0):

    supported_rate_columns = set(['RATE', 'RATE1', 'COUNTS'])
    found_rate_columns = set(hdulist[hduname].data.names)
    intersection_columns = supported_rate_columns.intersection(found_rate_columns)

    #Check if HDUCLAS1 = LIGHTCURVE column exists
    logging.debug("Reading Lightcurve Fits columns")
    if "HDUCLAS1" not in hdulist[hduname].header:
        logging.warn("HDUCLAS1 not found in header: " + hduname)
        return None

    elif hdulist[hduname].header["HDUCLAS1"] != "LIGHTCURVE":
        logging.warn("HDUCLAS1 is not LIGHTCURVE")
        return None

    elif len(intersection_columns) == 0:
        logging.warn("RATE, RATE1 or COUNTS columns not found in " + str(hduname) + " HDU, found columns: " + str(hdulist[hduname].data.names))
        return None

    elif len(intersection_columns) > 1:
        logging.warn("RATE, RATE1 or COUNTS ambiguous columns found in " + str(hduname) + " HDU, found columns: " + str(hdulist[hduname].data.names))
        return None

    ratecolumn = list(intersection_columns)[0]
    if len(hdulist[hduname].data[ratecolumn].shape) != 1 \
        or not (isinstance(hdulist[hduname].data[ratecolumn][0], int) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], np.integer) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], float) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], np.floating)):
        logging.warn("Wrong data type found for column: " + str(ratecolumn) + " in " + str(hduname) + " HDU, expected Integer or Float.")
        return None

    header, header_comments = get_header(hdulist, hduname)

    # Reads the lightcurve with HENDRICS
    outfile = lcurve_from_fits(destination, gtistring=get_hdu_string_from_hdulist(gtistring, hdulist),
                             timecolumn=column, ratecolumn=ratecolumn, ratehdu=1,
                             fracexp_limit=CONFIG.FRACEXP_LIMIT)[0]

    lcurve, events_start_time = substract_tstart_from_lcurve(load_data(outfile), time_offset)

    dataset = DataSet.get_lightcurve_dataset_from_stingray_lcurve(lcurve, header, header_comments,
                                                                    hduname, column)

    # Stores the events_start_time in time column extra
    dataset.tables[hduname].columns[column].set_extra("TSTART", events_start_time)

    logging.debug("Read Lightcurve fits with stingray file successfully: " + str(destination) + ", tstart: " + str(events_start_time) + ", rate: " + str(len(lcurve["counts"])))

    return dataset
def get_intermediate_files(filepaths, target):
    filenames = []

    for filepath in filepaths:
        if not FileUtils.is_valid_file(filepath):
            logging.error("Filepath not found or invalid: %s" % filepath)
        else:
            filename = DaveBulk.get_intermediate_file(filepath, target)
            logging.debug("get_intermediate_files filename: %s" % filename)
            if filename:
                filenames.append(filename)

    return json.dumps(filenames, cls=NPEncoder)
Exemple #12
0
def get_intermediate_files(filepaths, target):
    filenames = []

    for filepath in filepaths:
        if not FileUtils.is_valid_file(filepath):
            logging.error("Filepath not found or invalid: %s" % filepath)
        else:
            filename = DaveBulk.get_intermediate_file(filepath, target)
            logging.debug("get_intermediate_files filename: %s" % filename)
            if filename:
                filenames.append(filename)

    return json.dumps(filenames, cls=NPEncoder)
Exemple #13
0
def get_events_fits_dataset_with_stingray(destination,
                                          hdulist,
                                          dsId='FITS',
                                          hduname='EVENTS',
                                          column='TIME',
                                          gtistring='GTI,STDGTI'):

    # Gets columns from fits hdu table
    logging.debug("Reading Events Fits columns")
    columns = get_fits_table_column_names(hdulist, hduname)

    # Gets FITS header properties
    header = dict()
    header_comments = dict()
    for header_column in hdulist[hduname].header:
        header[header_column] = str(hdulist[hduname].header[header_column])
        header_comments[header_column] = str(
            hdulist[hduname].header.comments[header_column])

    # Gets start time of observation
    events_start_time = 0
    if "TSTART" in header:
        events_start_time = hdulist[hduname].header["TSTART"]

    # Closes the FITS file, further file data reads will be done via Stingray
    hdulist.close()

    # Prepares additional_columns
    additional_columns = []
    for i in range(len(columns)):
        if columns[i] != column:
            additional_columns.append(columns[i])

    # Reads fits data
    logging.debug("Reading Events Fits columns's data")
    fits_data = load_events_and_gtis(destination,
                                     additional_columns=additional_columns,
                                     gtistring=gtistring,
                                     hduname=hduname,
                                     column=column)

    gti_start = fits_data.gti_list[:, 0] - events_start_time
    gti_end = fits_data.gti_list[:, 1] - events_start_time

    logging.debug("Read Events fits... gti_start: " + str(len(gti_start)) +
                  ", gti_end: " + str(len(gti_end)))

    event_values = fits_data.ev_list - events_start_time

    dataset = DataSet.get_dataset_applying_gtis(dsId, header, header_comments,
                                                fits_data.additional_data,
                                                event_values, gti_start,
                                                gti_end, None, None, hduname,
                                                column)

    logging.debug("Read Events fits with stingray file successfully: %s" %
                  destination)

    return dataset
Exemple #14
0
def get_txt_dataset(destination, table_id, header_names):

    data = np.loadtxt(destination)
    dataset = DataSet.get_hdu_type_dataset(table_id, header_names, hduname="EVENTS")

    # Column1, Column1Err, Column2, Column2Err .. header order expected
    for i in range(len(header_names)):
        header_name = header_names[i]
        column = dataset.tables[table_id].columns[header_name]
        column.values = data[0:len(data), i * 2]
        column.error_values = data[0:len(data), (i * 2) + 1]

    logging.debug("Read txt file successfully: %s" % destination)

    return dataset
Exemple #15
0
def get_txt_dataset(destination, table_id, header_names):

    data = np.loadtxt(destination)
    dataset = DataSet.get_dataset(table_id, table_id, header_names)

    # Column1, Column1Err, Column2, Column2Err .. header order expected
    for i in range(len(header_names)):
        header_name = header_names[i]
        column = dataset.tables[table_id].columns[header_name]
        column.values = data[0:len(data), i * 2]
        column.error_values = data[0:len(data), (i * 2) + 1]

    logging.debug("Read txt file successfully: %s" % destination)

    return dataset
def add_time_offset_to_dataset(dataset, time_offset):
    if time_offset != 0:

        logging.debug("add_time_offset_to_dataset: dataset: " + str(dataset.id) + ", time_offset: " + str(time_offset))

        ds_gti = get_stingray_gti_from_gti_table (dataset.tables["GTI"])
        ds_gti[:, 0] = ds_gti[:, 0] + time_offset
        ds_gti[:, 1] = ds_gti[:, 1] + time_offset
        dataset.tables["GTI"] = get_gti_table_from_stingray_gti(ds_gti)

        hdutable = get_hdutable_from_dataset(dataset)
        if hdutable:
            hdutable.columns[CONFIG.TIME_COLUMN].values = hdutable.columns[CONFIG.TIME_COLUMN].values + time_offset

    return dataset
Exemple #17
0
def add_time_offset_to_dataset(dataset, time_offset):
    if time_offset != 0:

        logging.debug("add_time_offset_to_dataset: dataset: " +
                      str(dataset.id) + ", time_offset: " + str(time_offset))

        ds_gti = get_stingray_gti_from_gti_table(dataset.tables["GTI"])
        ds_gti[:, 0] = ds_gti[:, 0] + time_offset
        ds_gti[:, 1] = ds_gti[:, 1] + time_offset
        dataset.tables["GTI"] = get_gti_table_from_stingray_gti(ds_gti)

        hdutable = get_hdutable_from_dataset(dataset)
        if hdutable:
            hdutable.columns[CONFIG.TIME_COLUMN].values = hdutable.columns[
                CONFIG.TIME_COLUMN].values + time_offset

    return dataset
def bulk_analisys(filenames, plot_configs, outdir, target):

    logging.debug("bulk_analisys filenames: %s" % filenames)
    logging.debug("bulk_analisys plot_configs: %s" % plot_configs)
    logging.debug("bulk_analisys outdir: %s" % outdir)

    absolute_outdir = "/".join([target, outdir])
    bulk_data = DaveBulk.bulk_analisys(filenames, plot_configs, absolute_outdir)
    logging.debug("bulk_analisys: Finish!")
    return json.dumps(bulk_data, cls=NPEncoder)
Exemple #19
0
def create_gti_from_condition(time, condition, safe_interval=0, dt=None):
    """Create a GTI list from a time array and a boolean mask ("condition").
    Parameters
    ----------
    time : array-like
        Array containing times
    condition : array-like
        An array of bools, of the same length of time.
        A possible condition can be, e.g., the result of lc > 0.
    Returns
    -------
    gtis : [[gti0_0, gti0_1], [gti1_0, gti1_1], ...]
        The newly created GTIs
    Other parameters
    ----------------
    safe_interval : float or [float, float]
        A safe interval to exclude at both ends (if single float) or the start
        and the end (if pair of values) of GTIs.
    dt : float
        The width (in sec) of each bin of the time array. Can be irregular.
    """
    import collections

    assert len(time) == len(condition), \
        'The length of the condition and time arrays must be the same.'
    idxs = contiguous_regions(condition)

    if not isinstance(safe_interval, collections.Iterable):
        safe_interval = [safe_interval, safe_interval]

    dt = _assign_value_if_none(dt,
                               np.zeros_like(time) + (time[1] - time[0]) / 2)

    gtis = []
    for idx in idxs:
        logging.debug(idx)
        startidx = idx[0]
        stopidx = idx[1] - 1

        t0 = time[startidx] - dt[startidx] + safe_interval[0]
        t1 = time[stopidx] + dt[stopidx] - safe_interval[1]
        if t1 - t0 < 0:
            continue
        gtis.append([t0, t1])
    return np.array(gtis)
Exemple #20
0
def check_gtis(gti):
    """Check if GTIs are well-behaved. No start>end, no overlaps.
    Raises
    ------
    AssertionError
        If GTIs are not well-behaved.
    """
    gti_start = gti[:, 0]
    gti_end = gti[:, 1]

    logging.debug('-- GTI: ' + repr(gti))
    # Check that GTIs are well-behaved
    assert np.all(gti_end >= gti_start), 'This GTI is incorrect'
    # Check that there are no overlaps in GTIs
    assert np.all(gti_start[1:] >= gti_end[:-1]), 'This GTI has overlaps'
    logging.debug('-- Correct')

    return
Exemple #21
0
def bulk_analisys(filenames, plot_configs, outdir, target):

    logging.debug("bulk_analisys filenames: %s" % filenames)
    logging.debug("bulk_analisys plot_configs: %s" % plot_configs)
    logging.debug("bulk_analisys outdir: %s" % outdir)

    absolute_outdir = "/".join([target, outdir])
    bulk_data = DaveBulk.bulk_analisys(filenames, plot_configs,
                                       absolute_outdir)
    logging.debug("bulk_analisys: Finish!")
    return json.dumps(bulk_data, cls=NPEncoder)
Exemple #22
0
def get_joined_lightcurves_from_colors(src_destination, bck_destination, gti_destination, filters, axis, dt):

    if len(axis) != 2:
        logging.warn("Wrong number of axis")
        return None

    try:
        filters = FltHelper.apply_bin_size_to_filters(filters, dt)

        # Prepares SRC_LC
        clean_filters = FltHelper.get_filters_clean_color_filters(filters)
        filtered_ds = get_filtered_dataset(src_destination, clean_filters, gti_destination)

        # Creates src lightcurve applying bck and gtis
        src_lc = get_lightcurve_from_dataset(filtered_ds, axis, bck_destination, clean_filters, gti_destination, dt)
        if not src_lc:
            logging.warn("Cant create lc_src")
            return None

        count_column_name = "PI"
        color_keys = FltHelper.get_color_keys_from_filters(filters)
        filtered_datasets = split_dataset_with_color_filters(src_destination, filters, color_keys, count_column_name, gti_destination)
        color_axis = get_color_axis_for_ds()

        # Creates lightcurves array applying bck and gtis from each color
        logging.debug("Create color lightcurves ....")
        lightcurves = get_lightcurves_from_datasets_array(filtered_datasets, color_keys, count_column_name, color_axis, bck_destination, filters, gti_destination, dt)
        filtered_datasets = None  # Dispose memory

        if len(lightcurves) == 2:

            # Preapares the result
            logging.debug("Result joined lightcurves ....")
            result = push_to_results_array([], src_lc.countrate)
            result = push_divided_values_to_results_array(result, lightcurves[0].countrate, lightcurves[1].countrate)
            result = push_to_results_array(result, src_lc.time)
            return result

    except:
        logging.error(str(sys.exc_info()))

    return None
Exemple #23
0
def get_stingray_object(destination, time_offset=0):

    if not destination:
        return None

    filename = os.path.splitext(destination)[0]
    file_extension = magic.from_file(destination)
    logging.debug("File extension: %s" % file_extension)

    if file_extension.find("FITS") == 0:

        # Opening Fits
        hdulist = fits.open(destination, memmap=True)

        if 'EVENTS' in hdulist:
            # If EVENTS extension found, consider the Fits as EVENTS Fits
            fits_data = load_events_and_gtis(destination,
                                             additional_columns=['PI', "PHA"],
                                             gtistring=CONFIG.GTI_STRING,
                                             hduname='EVENTS',
                                             column=CONFIG.TIME_COLUMN)
            return substract_tstart_from_events(fits_data, time_offset)

        elif 'RATE' in hdulist:
            # If RATE extension found, consider the Fits as LIGHTCURVE Fits
            # Reads the lightcurve with hendrics
            outfile = lcurve_from_fits(destination,
                                       gtistring=get_hdu_string_from_hdulist(
                                           CONFIG.GTI_STRING, hdulist),
                                       timecolumn=CONFIG.TIME_COLUMN,
                                       ratecolumn=None,
                                       ratehdu=1,
                                       fracexp_limit=CONFIG.FRACEXP_LIMIT)[0]
            return substract_tstart_from_lcurve(load_lcurve(outfile),
                                                time_offset)

        else:
            logging.error("Unsupported FITS type!")

    else:
        logging.error("Unknown file extension: %s" % file_extension)
        return None
def append_file_to_dataset(filename, nextfile, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not nextfile:
        return common_error("No nextfile setted")

    if not SessionHelper.is_file_uploaded(nextfile):
        if not FileUtils.file_exist(target, nextfile):
            logging.error("Filename not uploaded for nextfile %s" % nextfile)
            return common_error("Nextfile not uploaded")

    next_destination = FileUtils.get_destination(target, nextfile)
    if not FileUtils.is_valid_file(next_destination):
        return common_error("Invalid next file")

    logging.debug("append_file_to_dataset, destination: %s" % destination)
    logging.debug("append_file_to_dataset, next_destination: %s" % next_destination)

    new_filename = DaveEngine.append_file_to_dataset(destination, next_destination)

    logging.debug("append_file_to_dataset, cache_key: %s" % new_filename)

    return json.dumps(new_filename)
Exemple #25
0
def append_file_to_dataset(filename, nextfile, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not nextfile:
        return common_error("No nextfile setted")

    if not SessionHelper.is_file_uploaded(nextfile):
        if not FileUtils.file_exist(target, nextfile):
            logging.error("Filename not uploaded for nextfile %s" % nextfile)
            return common_error("Nextfile not uploaded")

    next_destination = FileUtils.get_destination(target, nextfile)
    if not FileUtils.is_valid_file(next_destination):
        return common_error("Invalid next file")

    logging.debug("append_file_to_dataset, destination: %s" % destination)
    logging.debug("append_file_to_dataset, next_destination: %s" %
                  next_destination)

    new_filename = DaveEngine.append_file_to_dataset(destination,
                                                     next_destination)

    logging.debug("append_file_to_dataset, cache_key: %s" % new_filename)

    return json.dumps(new_filename)
Exemple #26
0
def get_joined_lightcurves(lc0_destination, lc1_destination, filters, axis, dt):

    try:

        if len(axis) != 2:
            logging.warn("Wrong number of axis")
            return None

        filters = FltHelper.get_filters_clean_color_filters(filters)
        filters = FltHelper.apply_bin_size_to_filters(filters, dt)

        lc0_ds = get_filtered_dataset(lc0_destination, filters)
        if not DsHelper.is_lightcurve_dataset(lc0_ds):
            logging.warn("Wrong dataset type for lc0")
            return None

        lc1_ds = get_filtered_dataset(lc1_destination, filters)
        if not DsHelper.is_lightcurve_dataset(lc1_ds):
            logging.warn("Wrong dataset type for lc1")
            return None

        #  Problaby here we can use a stronger checking
        if len(lc0_ds.tables["RATE"].columns["TIME"].values) == len(lc1_ds.tables["RATE"].columns["TIME"].values):

            # Preapares the result
            logging.debug("Result joined lightcurves ....")
            result = push_to_results_array([], lc0_ds.tables["RATE"].columns["RATE"].values)
            result = push_to_results_array(result, lc1_ds.tables["RATE"].columns["RATE"].values)
            result = push_to_results_array(result, lc0_ds.tables["RATE"].columns["TIME"].values)
            return result

        else:
            logging.warn("Lightcurves have different durations.")
            return None

    except:
        logging.error(str(sys.exc_info()))

    return None
Exemple #27
0
def get_lightcurve_fits_dataset_with_stingray(destination,
                                              hdulist,
                                              hduname='RATE',
                                              column='TIME',
                                              gtistring='GTI,STDGTI'):

    #Check if HDUCLAS1 = LIGHTCURVE column exists
    logging.debug("Reading Lightcurve Fits columns")
    if "HDUCLAS1" not in hdulist[hduname].header:
        logging.warn("HDUCLAS1 not found in header: " + hduname)
        return None

    if hdulist[hduname].header["HDUCLAS1"] != "LIGHTCURVE":
        logging.warn("HDUCLAS1 is not LIGHTCURVE")
        return None

    # Gets FITS header properties
    header = dict()
    header_comments = dict()
    for header_column in hdulist[hduname].header:
        header[header_column] = str(hdulist[hduname].header[header_column])
        header_comments[header_column] = str(
            hdulist[hduname].header.comments[header_column])

    lcurve = lcurve_from_fits(destination,
                              gtistring=gtistring,
                              timecolumn=column,
                              ratecolumn=None,
                              ratehdu=1,
                              fracexp_limit=0.9)

    dataset = DataSet.get_lightcurve_dataset_from_stingray_lcurve(
        lcurve, header, header_comments, hduname, column)

    logging.debug("Read Lightcurve fits with stingray file successfully: %s" %
                  destination)

    return dataset
Exemple #28
0
def get_events_fits_dataset_with_stingray(destination,
                                          hdulist,
                                          dsId='FITS',
                                          hduname='EVENTS',
                                          column=CONFIG.TIME_COLUMN,
                                          gtistring=CONFIG.GTI_STRING,
                                          extra_colums=[],
                                          time_offset=0):

    # Gets columns from fits hdu table
    logging.debug("Reading Events Fits columns")
    columns = get_fits_table_column_names(hdulist, hduname)

    header, header_comments = get_header(hdulist, hduname)

    # Closes the FITS file, further file data reads will be done via Stingray
    hdulist.close()

    # Prepares additional_columns
    additional_columns = []
    for i in range(len(columns)):
        if columns[i] != column:
            if len(extra_colums) == 0 or columns[i] in extra_colums:
                additional_columns.append(columns[i])

    # Reads fits data
    logging.debug("Reading Events Fits columns's data")
    fits_data = load_events_and_gtis(destination,
                                     additional_columns=additional_columns,
                                     gtistring=gtistring,
                                     hduname=hduname,
                                     column=column)

    event_list, events_start_time = substract_tstart_from_events(
        fits_data, time_offset)

    # Gets PI column data from eventlist if requiered and PHA not in additional_data
    if "PI" in additional_columns \
        and "PI" not in fits_data.additional_data \
        and "PHA" not in fits_data.additional_data:
        fits_data.additional_data["PI"] = event_list.pi

    dataset = DataSet.get_dataset_applying_gtis(dsId, header, header_comments,
                                                fits_data.additional_data, [],
                                                event_list.time, [],
                                                event_list.gti[:, 0],
                                                event_list.gti[:, 1], None,
                                                None, "EVENTS", column)

    # Stores the events_start_time in time column extra
    dataset.tables["EVENTS"].columns[column].set_extra("TSTART",
                                                       events_start_time)

    logging.debug("Read Events fits with stingray file successfully: " +
                  str(destination) + ", tstart: " + str(events_start_time))

    return dataset
Exemple #29
0
def get_stingray_object(destination, time_offset=0):

    if not destination:
        return None

    filename = os.path.splitext(destination)[0]
    file_extension = magic.from_file(destination)
    logging.debug("File extension: %s" % file_extension)

    if file_extension.find("FITS") == 0:

        # Opening Fits
        hdulist = fits.open(destination, memmap=True)

        if 'EVENTS' in hdulist:
            # If EVENTS extension found, consider the Fits as EVENTS Fits
            fits_data = load_events_and_gtis(destination,
                                             additional_columns=['PI', "PHA"],
                                             gtistring=CONFIG.GTI_STRING,
                                             hduname='EVENTS', column=CONFIG.TIME_COLUMN)
            return substract_tstart_from_events(fits_data, time_offset)

        elif 'RATE' in hdulist:
            # If RATE extension found, consider the Fits as LIGHTCURVE Fits
            # Reads the lightcurve with hendrics
            outfile = lcurve_from_fits(destination, gtistring=get_hdu_string_from_hdulist(CONFIG.GTI_STRING, hdulist),
                                     timecolumn=CONFIG.TIME_COLUMN, ratecolumn=None, ratehdu=1,
                                     fracexp_limit=CONFIG.FRACEXP_LIMIT)[0]
            return substract_tstart_from_lcurve(load_lcurve(outfile), time_offset)

        else:
            logging.error("Unsupported FITS type!")

    else:
        logging.error("Unknown file extension: %s" % file_extension)
        return None
Exemple #30
0
def get_lightcurve_ds_from_events_ds(filename, target, axis, dt):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    logging.debug("get_lightcurve_ds_from_events_ds filename: %s" % filename)
    logging.debug("get_lightcurve_ds_from_events_ds: axis %s" % axis)
    logging.debug("get_lightcurve_ds_from_events_ds: dt %f" % dt)

    cache_key = DaveEngine.get_lightcurve_ds_from_events_ds(
        destination, axis, dt)

    logging.debug(
        "get_lightcurve_ds_from_events_ds: Finish! cache_key ->  %s" %
        cache_key)

    return json.dumps(cache_key, cls=NPEncoder)
Exemple #31
0
def get_divided_lightcurve_ds(lc0_filename, lc1_filename, lc0_bck_filename,
                              lc1_bck_filename, target):
    lc0_destination = get_destination(lc0_filename, target)
    if not lc0_destination:
        return common_error("Invalid file or cache key for lc0 data")

    lc1_destination = get_destination(lc1_filename, target)
    if not lc1_destination:
        return common_error("Invalid file or cache key for lc1 data")

    lc0_bck_destination = ""
    if lc0_bck_filename:
        lc0_bck_destination = get_destination(lc0_bck_filename, target)
        if not lc0_bck_destination:
            return common_error("Invalid file or cache key for lc0_bck data")

    lc1_bck_destination = ""
    if lc1_bck_filename:
        lc1_bck_destination = get_destination(lc1_bck_filename, target)
        if not lc1_bck_destination:
            return common_error("Invalid file or cache key for lc1_bck data")

    logging.debug("get_divided_lightcurve_ds lc0: %s" % lc0_filename)
    logging.debug("get_divided_lightcurve_ds lc1: %s" % lc1_filename)
    logging.debug("get_divided_lightcurve_ds lc0_bck: %s" % lc0_bck_filename)
    logging.debug("get_divided_lightcurve_ds lc1_bck: %s" % lc1_bck_filename)

    cache_key = DaveEngine.get_divided_lightcurve_ds(lc0_destination,
                                                     lc1_destination,
                                                     lc0_bck_destination,
                                                     lc1_bck_destination)

    logging.debug("get_divided_lightcurve_ds: Finish! cache_key ->  %s" %
                  cache_key)

    return json.dumps(cache_key, cls=NPEncoder)
def get_plot_data_from_models(models, x_values):

    logging.debug("get_plot_data_from_models models: %s" % models)
    logging.debug("get_plot_data_from_models x_values: %s" % str(len(x_values)))

    data = DaveEngine.get_plot_data_from_models(models, x_values)

    logging.debug("get_plot_data_from_models: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #33
0
def get_plot_data_from_models(models, x_values):

    logging.debug("get_plot_data_from_models models: %s" % models)
    logging.debug("get_plot_data_from_models x_values: %s" %
                  str(len(x_values)))

    data = DaveEngine.get_plot_data_from_models(models, x_values)

    logging.debug("get_plot_data_from_models: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #34
0
def get_lightcurve(src_destination, bck_destination, gti_destination, filters, axis, dt):

    time_vals = []
    count_rate = []
    error_values = []

    try:
        if len(axis) != 2:
            logging.warn("Wrong number of axis")
            return None

        filters = FltHelper.get_filters_clean_color_filters(filters)
        filters = FltHelper.apply_bin_size_to_filters(filters, dt)

        filtered_ds = get_filtered_dataset(src_destination, filters, gti_destination)
        if not DsHelper.is_events_dataset(filtered_ds) \
            and not DsHelper.is_lightcurve_dataset(filtered_ds):
            logging.warn("Wrong dataset type")
            return None

        if DsHelper.is_events_dataset(filtered_ds):
            # Creates lightcurves by gti and joins in one
            logging.debug("Create lightcurve ....Event count: " + str(len(filtered_ds.tables["EVENTS"].columns["TIME"].values)))

            lc = get_lightcurve_from_dataset(filtered_ds, axis, bck_destination, filters, gti_destination, dt)
            filtered_ds = None  # Dispose memory

            if lc:
                logging.debug("Result time: " + str(len(lc.time)))
                time_vals = lc.time
                count_rate = lc.countrate
                error_values = []  # TODO: Implement error values on Stingray
                #lc = None  # Dispose memory

        elif DsHelper.is_lightcurve_dataset(filtered_ds):
            #If dataset is LIGHTCURVE type
            time_vals = filtered_ds.tables["RATE"].columns["TIME"].values
            count_rate = filtered_ds.tables["RATE"].columns["RATE"].values
            error_values = filtered_ds.tables["RATE"].columns["ERROR"].values

    except:
        logging.error(str(sys.exc_info()))

    # Preapares the result
    logging.debug("Result lightcurve .... " + str(len(time_vals)))
    result = push_to_results_array([], time_vals)
    result = push_to_results_array(result, count_rate)
    result = push_to_results_array(result, error_values)
    return result
Exemple #35
0
def get_events_fits_dataset_with_stingray(destination, hdulist, dsId='FITS',
                                   hduname='EVENTS', column=CONFIG.TIME_COLUMN,
                                   gtistring=CONFIG.GTI_STRING, extra_colums=[], time_offset=0):

    # Gets columns from fits hdu table
    logging.debug("Reading Events Fits columns")
    columns = get_fits_table_column_names(hdulist, hduname)

    header, header_comments = get_header(hdulist, hduname)

    # Closes the FITS file, further file data reads will be done via Stingray
    hdulist.close()

    # Prepares additional_columns
    additional_columns = []
    for i in range(len(columns)):
        if columns[i] != column:
            if len(extra_colums) == 0 or columns[i] in extra_colums:
                additional_columns.append(columns[i])

    # Reads fits data
    logging.debug("Reading Events Fits columns's data")
    fits_data = load_events_and_gtis(destination,
                                     additional_columns=additional_columns,
                                     gtistring=gtistring,
                                     hduname=hduname, column=column)

    event_list, events_start_time = substract_tstart_from_events(fits_data, time_offset)

    # Gets PI column data from eventlist if requiered and PHA not in additional_data
    if "PI" in additional_columns \
        and "PI" not in fits_data.additional_data \
        and "PHA" not in fits_data.additional_data:
        fits_data.additional_data["PI"] = event_list.pi

    dataset = DataSet.get_dataset_applying_gtis(dsId, header, header_comments,
                                                fits_data.additional_data, [],
                                                event_list.time, [],
                                                event_list.gti[:, 0], event_list.gti[:, 1],
                                                None, None, "EVENTS", column)

    # Stores the events_start_time in time column extra
    dataset.tables["EVENTS"].columns[column].set_extra("TSTART", events_start_time)

    logging.debug("Read Events fits with stingray file successfully: " + str(destination) + ", tstart: " + str(events_start_time))

    return dataset
def get_fit_lomb_scargle_result(src_filename, bck_filename, gti_filename, target,
                    filters, axis, dt, freq_range, nyquist_factor, ls_norm, samples_per_peak,
                    models, priors=None, sampling_params=None):

    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_fit_lomb_scargle_result src: %s" % src_filename)
    logging.debug("get_fit_lomb_scargle_result bck: %s" % bck_filename)
    logging.debug("get_fit_lomb_scargle_result gti: %s" % gti_filename)
    logging.debug("get_fit_lomb_scargle_result: filters %s" % filters)
    logging.debug("get_fit_lomb_scargle_result: axis %s" % axis)
    logging.debug("get_fit_lomb_scargle_result: dt %s" % dt)
    logging.debug("get_fit_lomb_scargle_result: freq_range %s" % freq_range)
    logging.debug("get_fit_lomb_scargle_result: nyquist_factor %s" % nyquist_factor)
    logging.debug("get_fit_lomb_scargle_result: ls_norm %s" % ls_norm)
    logging.debug("get_fit_lomb_scargle_result: samples_per_peak %s" % samples_per_peak)
    logging.debug("get_fit_lomb_scargle_result: models %s" % models)
    logging.debug("get_fit_lomb_scargle_result: priors %s" % priors)
    logging.debug("get_fit_lomb_scargle_result: sampling_params %s" % sampling_params)

    data = DaveEngine.get_fit_lomb_scargle_result(src_destination, bck_destination, gti_destination,
                                                filters, axis, dt, freq_range, nyquist_factor,
                                                ls_norm, samples_per_peak, models, priors, sampling_params)

    logging.debug("get_fit_lomb_scargle_result: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #37
0
def get_fit_powerspectrum_result(src_filename, bck_filename, gti_filename,
                                 target, filters, axis, dt, nsegm, segm_size,
                                 norm, pds_type, models):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_fit_powerspectrum_result src: %s" % src_filename)
    logging.debug("get_fit_powerspectrum_result bck: %s" % bck_filename)
    logging.debug("get_fit_powerspectrum_result gti: %s" % gti_filename)
    logging.debug("get_fit_powerspectrum_result: filters %s" % filters)
    logging.debug("get_fit_powerspectrum_result: axis %s" % axis)
    logging.debug("get_fit_powerspectrum_result: dt %f" % dt)
    logging.debug("get_fit_powerspectrum_result: nsegm %f" % nsegm)
    logging.debug("get_fit_powerspectrum_result: segm_size %f" % segm_size)
    logging.debug("get_fit_powerspectrum_result: norm %s" % norm)
    logging.debug("get_fit_powerspectrum_result: type %s" % pds_type)
    logging.debug("get_fit_powerspectrum_result: models %s" % models)

    data = DaveEngine.get_fit_powerspectrum_result(src_destination,
                                                   bck_destination,
                                                   gti_destination, filters,
                                                   axis, dt, nsegm, segm_size,
                                                   norm, pds_type, models)

    logging.debug("get_fit_powerspectrum_result: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #38
0
def get_covariance_spectrum(src_filename, bck_filename, gti_filename, filters,
                            target, dt, ref_band_interest, energy_range,
                            n_bands, std):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_covariance_spectrum src: %s" % src_filename)
    logging.debug("get_covariance_spectrum bck: %s" % bck_filename)
    logging.debug("get_covariance_spectrum gti: %s" % gti_filename)
    logging.debug("get_covariance_spectrum: filters %s" % filters)
    logging.debug("get_covariance_spectrum dt: %s" % dt)
    logging.debug("get_covariance_spectrum ref_band_interest: %s" %
                  ref_band_interest)
    logging.debug("get_phase_lag_spectrum: energy_range %s" % energy_range)
    logging.debug("get_covariance_spectrum n_bands: %s" % n_bands)
    logging.debug("get_covariance_spectrum std: %s" % std)

    data = DaveEngine.get_covariance_spectrum(src_destination, bck_destination,
                                              gti_destination, filters, dt,
                                              ref_band_interest, energy_range,
                                              n_bands, std)

    logging.debug("get_covariance_spectrum: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #39
0
def get_rms_spectrum(src_filename, bck_filename, gti_filename, target, filters,
                     axis, dt, nsegm, segm_size, norm, pds_type, freq_range,
                     energy_range, n_bands):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_rms_spectrum src: %s" % src_filename)
    logging.debug("get_rms_spectrum bck: %s" % bck_filename)
    logging.debug("get_rms_spectrum gti: %s" % gti_filename)
    logging.debug("get_rms_spectrum: filters %s" % filters)
    logging.debug("get_rms_spectrum: axis %s" % axis)
    logging.debug("get_rms_spectrum: dt %f" % dt)
    logging.debug("get_rms_spectrum: nsegm %f" % nsegm)
    logging.debug("get_rms_spectrum: segm_size %f" % segm_size)
    logging.debug("get_rms_spectrum: norm %s" % norm)
    logging.debug("get_rms_spectrum: type %s" % pds_type)
    logging.debug("get_rms_spectrum: freq_range %s" % freq_range)
    logging.debug("get_rms_spectrum: energy_range %s" % energy_range)
    logging.debug("get_rms_spectrum: n_bands %s" % n_bands)

    data = DaveEngine.get_rms_spectrum(src_destination, bck_destination,
                                       gti_destination, filters, axis, dt,
                                       nsegm, segm_size, norm, pds_type,
                                       freq_range, energy_range, n_bands)

    logging.debug("get_rms_spectrum: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_plot_data(src_filename, bck_filename, gti_filename, target, filters, styles, axis):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_plot_data src: %s" % src_filename)
    logging.debug("get_plot_data bck: %s" % bck_filename)
    logging.debug("get_plot_data gti: %s" % gti_filename)
    logging.debug("get_plot_data: filters %s" % filters)
    logging.debug("get_plot_data: styles %s" % styles)
    logging.debug("get_plot_data: axis %s" % axis)

    data = DaveEngine.get_plot_data(src_destination, bck_destination, gti_destination, filters, styles, axis)

    logging.debug("get_plot_data: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #41
0
def get_file_dataset(destination, time_offset=0):

    dataset = None
    cache_key = ""

    try:

        if destination:

            cache_key = get_cache_key_for_destination(destination, time_offset)
            if DsCache.contains(cache_key):
                logging.debug("get_file_dataset: returned cached dataset, cache_key: " + str(cache_key))
                return DsCache.get(cache_key), cache_key

            logging.debug("get_file_dataset: reading destination: " + str(destination))
            filename = os.path.splitext(destination)[0]
            file_extension_from_file = os.path.splitext(destination)[1]
            file_extension = magic.from_file(destination)
            logging.debug("File extension: %s" % file_extension)

            if file_extension.find("ASCII") == 0:

                table_id = "EVENTS"
                header_names = [CONFIG.TIME_COLUMN, "PHA", "Color1", "Color2"]
                dataset = get_txt_dataset(destination, table_id, header_names)

                table = dataset.tables[table_id]
                table.add_columns(["AMPLITUDE"])
                numValues = len(table.columns[CONFIG.TIME_COLUMN].values)
                random_values = np.random.uniform(-1, 1, size=numValues)
                table.columns["AMPLITUDE"].values = random_values

            elif file_extension.find("FITS") == 0 \
                 or file_extension.find("gzip") > -1:

                # Opening Fits
                hdulist = fits.open(destination, memmap=True)

                if get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist) != "":
                    # If EVENTS extension found, consider the Fits as EVENTS Fits
                    dataset = get_events_fits_dataset_with_stingray(destination, hdulist, dsId='FITS',
                                                       hduname=get_hdu_string_from_hdulist(CONFIG.EVENTS_STRING, hdulist),
                                                       column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING,
                                                       extra_colums=['PI', "PHA"], time_offset=time_offset)

                elif 'RATE' in hdulist:
                    # If RATE extension found, consider the Fits as LIGHTCURVE Fits
                    dataset = get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
                                                                column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                elif 'EBOUNDS' in hdulist:
                    # If EBOUNDS extension found, consider the Fits as RMF Fits
                    dataset = get_fits_dataset(hdulist, "RMF", ["EBOUNDS"])

                elif get_hdu_string_from_hdulist(CONFIG.GTI_STRING, hdulist) != "":
                    # If not EVENTS or RATE extension found, check if is GTI Fits
                    dataset = get_gti_fits_dataset_with_stingray(hdulist,gtistring=CONFIG.GTI_STRING, time_offset=time_offset)

                else:
                    logging.warn("Unsupported FITS type! Any table found: " + CONFIG.EVENTS_STRING + ", RATE, EBOUNDS or " + CONFIG.GTI_STRING)

            elif file_extension == "data" and (file_extension_from_file in [".p", ".nc"]):

                # If file is pickle object, tries to parse it as dataset
                dataset = load_dataset_from_intermediate_file(destination)

            else:
                logging.warn("Unknown file extension: " + str(file_extension) + " , " + str(file_extension_from_file))

            if dataset:
                DsCache.add(cache_key, dataset)
                logging.debug("get_file_dataset, dataset added to cache, cache_key: " + str(cache_key))

        else:
            logging.error("get_file_dataset: Destination is empty")

    except:
        logging.error(ExHelper.getException('get_file_dataset'))

    return dataset, cache_key
Exemple #42
0
def get_joined_lightcurves_from_colors(src_filename, bck_filename,
                                       gti_filename, target, filters, axis,
                                       dt):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_joined_lightcurves_from_colors src: %s" % src_filename)
    logging.debug("get_joined_lightcurves_from_colors bck: %s" % bck_filename)
    logging.debug("get_joined_lightcurves_from_colors gti: %s" % gti_filename)
    logging.debug("get_joined_lightcurves_from_colors: filters %s" % filters)
    logging.debug("get_joined_lightcurves_from_colors: axis %s" % axis)
    logging.debug("get_joined_lightcurves_from_colors: dt %f" % dt)

    data = DaveEngine.get_joined_lightcurves_from_colors(
        src_destination, bck_destination, gti_destination, filters, axis, dt)

    logging.debug("get_joined_lightcurves_from_colors: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_pulse_search(src_filename, bck_filename, gti_filename, target,
                    filters, axis, dt, freq_range, mode, oversampling,
                    nharm, nbin, segment_size):

    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_pulse_search src: %s" % src_filename)
    logging.debug("get_pulse_search bck: %s" % bck_filename)
    logging.debug("get_pulse_search gti: %s" % gti_filename)
    logging.debug("get_pulse_search: filters %s" % filters)
    logging.debug("get_pulse_search: axis %s" % axis)
    logging.debug("get_pulse_search: dt %s" % dt)
    logging.debug("get_pulse_search: freq_range %s" % freq_range)
    logging.debug("get_pulse_search: mode %s" % mode)
    logging.debug("get_pulse_search: oversampling %s" % oversampling)
    logging.debug("get_pulse_search: nharm %s" % nharm)
    logging.debug("get_pulse_search: nbin %s" % nbin)
    logging.debug("get_pulse_search: segment_size %s" % segment_size)

    data = DaveEngine.get_pulse_search(src_destination, bck_destination, gti_destination,
                                    filters, axis, dt, freq_range, mode, oversampling,
                                    nharm, nbin, segment_size)

    logging.debug("get_pulse_search: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #44
0
def get_bootstrap_results(src_filename, bck_filename, gti_filename, target,
                          filters, axis, dt, nsegm, segm_size, norm, pds_type,
                          models, n_iter, mean, red_noise, seed):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_bootstrap_results src: %s" % src_filename)
    logging.debug("get_bootstrap_results bck: %s" % bck_filename)
    logging.debug("get_bootstrap_results gti: %s" % gti_filename)
    logging.debug("get_bootstrap_results: filters %s" % filters)
    logging.debug("get_bootstrap_results: axis %s" % axis)
    logging.debug("get_bootstrap_results: dt %f" % dt)
    logging.debug("get_bootstrap_results: nsegm %f" % nsegm)
    logging.debug("get_bootstrap_results: segm_size %f" % segm_size)
    logging.debug("get_bootstrap_results: norm %s" % norm)
    logging.debug("get_bootstrap_results: type %s" % pds_type)
    logging.debug("get_bootstrap_results: models %s" % models)
    logging.debug("get_bootstrap_results: n_iter %s" % n_iter)
    logging.debug("get_bootstrap_results: mean %s" % mean)
    logging.debug("get_bootstrap_results: red_noise %s" % red_noise)
    logging.debug("get_bootstrap_results: seed %s" % seed)

    data = DaveEngine.get_bootstrap_results(src_destination, bck_destination,
                                            gti_destination, filters, axis, dt,
                                            nsegm, segm_size, norm, pds_type,
                                            models, n_iter, mean, red_noise,
                                            seed)

    logging.debug("get_bootstrap_results: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #45
0
def get_plot_data(filename, target, filters, styles, axis):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    logging.debug("get_plot_data: %s" % filename)
    logging.debug("get_plot_data: filters %s" % filters)
    logging.debug("get_plot_data: styles %s" % styles)
    logging.debug("get_plot_data: axis %s" % axis)

    data = DaveEngine.get_plot_data(destination, filters, styles, axis)

    logging.debug("get_plot_data: json.dumps...")

    jsonData = json.dumps(data, cls=NPEncoder)

    logging.debug("get_plot_data: Finish!")

    return jsonData
def get_cross_spectrum(src_filename1, bck_filename1, gti_filename1, filters1, axis1, dt1,
                       src_filename2, bck_filename2, gti_filename2, filters2, axis2, dt2,
                       target, nsegm, segm_size, norm, xds_type):

   src_destination1 = get_destination(src_filename1, target)
   if not src_destination1:
       return common_error("Invalid file or cache key for source data 1")

   bck_destination1 = ""
   if bck_filename1:
       bck_destination1 = get_destination(bck_filename1, target)
       if not bck_destination1:
           return common_error("Invalid file or cache key for backgrund data 1")

   gti_destination1 = ""
   if gti_filename1:
       gti_destination1 = get_destination(gti_filename1, target)
       if not gti_destination1:
           return common_error("Invalid file or cache key for gti data 1")

   src_destination2 = get_destination(src_filename2, target)
   if not src_destination2:
       return common_error("Invalid file or cache key for source data 2")

   bck_destination2 = ""
   if bck_filename2:
       bck_destination2 = get_destination(bck_filename2, target)
       if not bck_destination2:
           return common_error("Invalid file or cache key for backgrund data 2")

   gti_destination2 = ""
   if gti_filename2:
       gti_destination2 = get_destination(gti_filename2, target)
       if not gti_destination2:
           return common_error("Invalid file or cache key for gti data 2")

   logging.debug("get_cross_spectrum src 1: %s" % src_filename1)
   logging.debug("get_cross_spectrum bck 1: %s" % bck_filename1)
   logging.debug("get_cross_spectrum gti 1: %s" % gti_filename1)
   logging.debug("get_cross_spectrum: filters 1 %s" % filters1)
   logging.debug("get_cross_spectrum: axis 1 %s" % axis1)
   logging.debug("get_cross_spectrum: dt 1 %f" % dt1)
   logging.debug("get_cross_spectrum src 2: %s" % src_filename2)
   logging.debug("get_cross_spectrum bck 2: %s" % bck_filename2)
   logging.debug("get_cross_spectrum gti 2: %s" % gti_filename2)
   logging.debug("get_cross_spectrum: filters 2 %s" % filters2)
   logging.debug("get_cross_spectrum: axis 2 %s" % axis2)
   logging.debug("get_cross_spectrum: dt 2 %f" % dt2)
   logging.debug("get_cross_spectrum: nsegm %f" % nsegm)
   logging.debug("get_cross_spectrum: segm_size %f" % segm_size)
   logging.debug("get_cross_spectrum: norm %s" % norm)
   logging.debug("get_cross_spectrum: type %s" % xds_type)

   data = DaveEngine.get_cross_spectrum(src_destination1, bck_destination1, gti_destination1, filters1, axis1, dt1,
                                        src_destination2, bck_destination2, gti_destination2, filters2, axis2, dt2,
                                        nsegm, segm_size, norm, xds_type)

   logging.debug("get_cross_spectrum: Finish!")

   return json.dumps(data, cls=NPEncoder)
def get_covariance_spectrum(src_filename, bck_filename, gti_filename, filters, target, dt, ref_band_interest, energy_range, n_bands, std):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_covariance_spectrum src: %s" % src_filename)
    logging.debug("get_covariance_spectrum bck: %s" % bck_filename)
    logging.debug("get_covariance_spectrum gti: %s" % gti_filename)
    logging.debug("get_covariance_spectrum: filters %s" % filters)
    logging.debug("get_covariance_spectrum dt: %s" % dt)
    logging.debug("get_covariance_spectrum ref_band_interest: %s" % ref_band_interest)
    logging.debug("get_covariance_spectrum: energy_range %s" % energy_range)
    logging.debug("get_covariance_spectrum n_bands: %s" % n_bands)
    logging.debug("get_covariance_spectrum std: %s" % std)

    data = DaveEngine.get_covariance_spectrum(src_destination, bck_destination, gti_destination,
                                            filters, dt, ref_band_interest, energy_range, n_bands, std)

    logging.debug("get_covariance_spectrum: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_rms_spectrum(src_filename, bck_filename, gti_filename, target,
                    filters, axis, dt, nsegm, segm_size, norm, pds_type, df,
                    freq_range, energy_range, n_bands, white_noise_offset):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_rms_spectrum src: %s" % src_filename)
    logging.debug("get_rms_spectrum bck: %s" % bck_filename)
    logging.debug("get_rms_spectrum gti: %s" % gti_filename)
    logging.debug("get_rms_spectrum: filters %s" % filters)
    logging.debug("get_rms_spectrum: axis %s" % axis)
    logging.debug("get_rms_spectrum: dt %s" % dt)
    logging.debug("get_rms_spectrum: nsegm %f" % nsegm)
    logging.debug("get_rms_spectrum: segm_size %f" % segm_size)
    logging.debug("get_rms_spectrum: norm %s" % norm)
    logging.debug("get_rms_spectrum: type %s" % pds_type)
    logging.debug("get_rms_spectrum: df %s" % df)
    logging.debug("get_rms_spectrum: freq_range %s" % freq_range)
    logging.debug("get_rms_spectrum: energy_range %s" % energy_range)
    logging.debug("get_rms_spectrum: n_bands %s" % n_bands)
    logging.debug("get_rms_spectrum: white_noise_offset %s" % white_noise_offset)

    data = DaveEngine.get_rms_spectrum(src_destination, bck_destination, gti_destination,
                                        filters, axis, dt, nsegm, segm_size, norm, pds_type, df,
                                        freq_range, energy_range, n_bands, white_noise_offset)

    logging.debug("get_rms_spectrum: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_phaseogram(src_filename, bck_filename, gti_filename, target,
                    filters, axis, dt, f, nph, nt, fdot=0, fddot=0,
                    binary_parameters=None):

    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_phaseogram src: %s" % src_filename)
    logging.debug("get_phaseogram bck: %s" % bck_filename)
    logging.debug("get_phaseogram gti: %s" % gti_filename)
    logging.debug("get_phaseogram: filters %s" % filters)
    logging.debug("get_phaseogram: axis %s" % axis)
    logging.debug("get_phaseogram: dt %s" % dt)
    logging.debug("get_phaseogram: f %s" % f)
    logging.debug("get_phaseogram: nph %s" % nph)
    logging.debug("get_phaseogram: nt %s" % nt)
    logging.debug("get_phaseogram: fdot %s" % fdot)
    logging.debug("get_phaseogram: fddot %s" % fddot)
    logging.debug("get_phaseogram: binary_parameters %s" % binary_parameters)

    data = DaveEngine.get_phaseogram(src_destination, bck_destination, gti_destination,
                                    filters, axis, dt, f, nph, nt, fdot, fddot, binary_parameters)

    logging.debug("get_phaseogram: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_rms_vs_countrate(src_filename, bck_filename, gti_filename, target,
                    filters, axis, dt, nsegm, df, freq_range, energy_range,
                    white_noise_offset):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_rms_vs_countrate src: %s" % src_filename)
    logging.debug("get_rms_vs_countrate bck: %s" % bck_filename)
    logging.debug("get_rms_vs_countrate gti: %s" % gti_filename)
    logging.debug("get_rms_vs_countrate: filters %s" % filters)
    logging.debug("get_rms_vs_countrate: axis %s" % axis)
    logging.debug("get_rms_vs_countrate: dt %s" % dt)
    logging.debug("get_rms_vs_countrate: nsegm %f" % nsegm)
    logging.debug("get_rms_vs_countrate: df %s" % df)
    logging.debug("get_rms_vs_countrate: freq_range %s" % freq_range)
    logging.debug("get_rms_vs_countrate: energy_range %s" % energy_range)
    logging.debug("get_rms_vs_countrate: white_noise_offset %s" % white_noise_offset)

    data = DaveEngine.get_rms_vs_countrate(src_destination, bck_destination, gti_destination,
                                        filters, axis, dt, nsegm, df, freq_range, energy_range,
                                        white_noise_offset)

    logging.debug("get_rms_vs_countrate: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_divided_lightcurves_from_colors(src_filename, bck_filename, gti_filename, target, filters, axis, dt):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_divided_lightcurves_from_colors src: %s" % src_filename)
    logging.debug("get_divided_lightcurves_from_colors bck: %s" % bck_filename)
    logging.debug("get_divided_lightcurves_from_colors gti: %s" % gti_filename)
    logging.debug("get_divided_lightcurves_from_colors: filters %s" % filters)
    logging.debug("get_divided_lightcurves_from_colors: axis %s" % axis)
    logging.debug("get_divided_lightcurves_from_colors: dt %s" % dt)

    data = DaveEngine.get_divided_lightcurves_from_colors(src_destination, bck_destination, gti_destination, filters, axis, dt)

    logging.debug("get_divided_lightcurves_from_colors: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_joined_lightcurves(lc0_filename, lc1_filename, lc0_bck_filename, lc1_bck_filename,
                            target, filters, axis, dt):
    lc0_destination = get_destination(lc0_filename, target)
    if not lc0_destination:
        return common_error("Invalid file or cache key for lc0 data")

    lc1_destination = get_destination(lc1_filename, target)
    if not lc1_destination:
        return common_error("Invalid file or cache key for lc1 data")

    lc0_bck_destination = ""
    if lc0_bck_filename:
        lc0_bck_destination = get_destination(lc0_bck_filename, target)
        if not lc0_bck_destination:
            return common_error("Invalid file or cache key for lc0_bck data")

    lc1_bck_destination = ""
    if lc1_bck_filename:
        lc1_bck_destination = get_destination(lc1_bck_filename, target)
        if not lc1_bck_destination:
            return common_error("Invalid file or cache key for lc1_bck data")

    logging.debug("get_joined_lightcurves lc0: %s" % lc0_filename)
    logging.debug("get_joined_lightcurves lc1: %s" % lc1_filename)
    logging.debug("get_joined_lightcurves lc0_bck: %s" % lc0_bck_filename)
    logging.debug("get_joined_lightcurves lc1_bck: %s" % lc1_bck_filename)
    logging.debug("get_joined_lightcurves: filters %s" % filters)
    logging.debug("get_joined_lightcurves: axis %s" % axis)
    logging.debug("get_joined_lightcurves: dt %s" % dt)

    data = DaveEngine.get_joined_lightcurves(lc0_destination, lc1_destination,
                                             lc0_bck_destination, lc1_bck_destination,
                                             filters, axis, dt)

    logging.debug("get_joined_lightcurves: Finish!")

    return json.dumps(data, cls=NPEncoder)
Exemple #53
0
def get_joined_lightcurves(lc0_filename, lc1_filename, target, filters, axis,
                           dt):
    lc0_destination = get_destination(lc0_filename, target)
    if not lc0_destination:
        return common_error("Invalid file or cache key for lc0 data")

    lc1_destination = get_destination(lc1_filename, target)
    if not lc1_destination:
        return common_error("Invalid file or cache key for lc1 data")

    logging.debug("get_joined_lightcurves lc0: %s" % lc0_filename)
    logging.debug("get_joined_lightcurves lc1: %s" % lc1_filename)
    logging.debug("get_joined_lightcurves: filters %s" % filters)
    logging.debug("get_joined_lightcurves: axis %s" % axis)
    logging.debug("get_joined_lightcurves: dt %f" % dt)

    data = DaveEngine.get_joined_lightcurves(lc0_destination, lc1_destination,
                                             filters, axis, dt)

    logging.debug("get_joined_lightcurves: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_lightcurve(src_filename, bck_filename, gti_filename, target, filters, axis, dt,
                    baseline_opts, meanflux_opts, variance_opts):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_lightcurve src: %s" % src_filename)
    logging.debug("get_lightcurve bck: %s" % bck_filename)
    logging.debug("get_lightcurve gti: %s" % gti_filename)
    logging.debug("get_lightcurve: filters %s" % filters)
    logging.debug("get_lightcurve: axis %s" % axis)
    logging.debug("get_lightcurve: dt %s" % dt)
    logging.debug("get_lightcurve: baseline_opts %s" % baseline_opts)
    logging.debug("get_lightcurve: meanflux_opts %s" % meanflux_opts)
    logging.debug("get_lightcurve: variance_opts %s" % variance_opts)

    data = DaveEngine.get_lightcurve(src_destination, bck_destination, gti_destination,
                                    filters, axis, dt, baseline_opts, meanflux_opts, variance_opts)

    logging.debug("get_lightcurve: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_fit_powerspectrum_result(src_filename, bck_filename, gti_filename, target,
                                filters, axis, dt, nsegm, segm_size, norm, pds_type, df,
                                models, priors=None, sampling_params=None):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_fit_powerspectrum_result src: %s" % src_filename)
    logging.debug("get_fit_powerspectrum_result bck: %s" % bck_filename)
    logging.debug("get_fit_powerspectrum_result gti: %s" % gti_filename)
    logging.debug("get_fit_powerspectrum_result: filters %s" % filters)
    logging.debug("get_fit_powerspectrum_result: axis %s" % axis)
    logging.debug("get_fit_powerspectrum_result: dt %s" % dt)
    logging.debug("get_fit_powerspectrum_result: nsegm %f" % nsegm)
    logging.debug("get_fit_powerspectrum_result: segm_size %f" % segm_size)
    logging.debug("get_fit_powerspectrum_result: norm %s" % norm)
    logging.debug("get_fit_powerspectrum_result: type %s" % pds_type)
    logging.debug("get_fit_powerspectrum_result: df %s" % df)
    logging.debug("get_fit_powerspectrum_result: models %s" % models)
    logging.debug("get_fit_powerspectrum_result: priors %s" % priors)
    logging.debug("get_fit_powerspectrum_result: sampling_params %s" % sampling_params)

    data = DaveEngine.get_fit_powerspectrum_result(src_destination, bck_destination, gti_destination,
                                                filters, axis, dt, nsegm, segm_size, norm, pds_type, df,
                                                models, priors, sampling_params)

    logging.debug("get_fit_powerspectrum_result: Finish!")

    return json.dumps(data, cls=NPEncoder)
def get_bootstrap_results(src_filename, bck_filename, gti_filename, target,
                            filters, axis, dt, nsegm, segm_size, norm, pds_type, df,
                            models, n_iter, mean, red_noise, seed):
    src_destination = get_destination(src_filename, target)
    if not src_destination:
        return common_error("Invalid file or cache key for source data")

    bck_destination = ""
    if bck_filename:
        bck_destination = get_destination(bck_filename, target)
        if not bck_destination:
            return common_error("Invalid file or cache key for backgrund data")

    gti_destination = ""
    if gti_filename:
        gti_destination = get_destination(gti_filename, target)
        if not gti_destination:
            return common_error("Invalid file or cache key for gti data")

    logging.debug("get_bootstrap_results src: %s" % src_filename)
    logging.debug("get_bootstrap_results bck: %s" % bck_filename)
    logging.debug("get_bootstrap_results gti: %s" % gti_filename)
    logging.debug("get_bootstrap_results: filters %s" % filters)
    logging.debug("get_bootstrap_results: axis %s" % axis)
    logging.debug("get_bootstrap_results: dt %s" % dt)
    logging.debug("get_bootstrap_results: nsegm %f" % nsegm)
    logging.debug("get_bootstrap_results: segm_size %f" % segm_size)
    logging.debug("get_bootstrap_results: norm %s" % norm)
    logging.debug("get_bootstrap_results: type %s" % pds_type)
    logging.debug("get_bootstrap_results: df %s" % df)
    logging.debug("get_bootstrap_results: models %s" % models)
    logging.debug("get_bootstrap_results: n_iter %s" % n_iter)
    logging.debug("get_bootstrap_results: mean %s" % mean)
    logging.debug("get_bootstrap_results: red_noise %s" % red_noise)
    logging.debug("get_bootstrap_results: seed %s" % seed)

    data = DaveEngine.get_bootstrap_results(src_destination, bck_destination, gti_destination,
                                            filters, axis, dt, nsegm, segm_size, norm, pds_type, df,
                                            models, n_iter, mean, red_noise, seed)

    logging.debug("get_bootstrap_results: Finish!")

    return json.dumps(data, cls=NPEncoder)