Beispiel #1
0
def append_file_to_dataset(destination, next_destination):
    dataset = DaveReader.get_file_dataset(destination)
    if DsHelper.is_events_dataset(dataset):
        next_dataset = DaveReader.get_file_dataset(next_destination)
        if DsHelper.is_events_dataset(next_dataset):
            # Looks what dataset is earliest
            ds_start_time = DsHelper.get_events_dataset_start(dataset)
            next_ds_start_time = DsHelper.get_events_dataset_start(next_dataset)

            if next_ds_start_time < ds_start_time:
                #Swap datasets
                tmp_ds = dataset
                dataset = next_dataset
                next_dataset = tmp_ds

            #Join and cache joined dataset
            dataset.tables["EVENTS"] = dataset.tables["EVENTS"].join(next_dataset.tables["EVENTS"])
            dataset.tables["GTI"] = DsHelper.join_gti_tables(dataset.tables["GTI"], next_dataset.tables["GTI"])

            DsCache.remove(destination)  # Removes previous cached dataset for prev key
            new_cache_key = DsCache.get_key(destination + "|" + next_destination)
            DsCache.add(new_cache_key, dataset)  # Adds new cached dataset for new key
            return new_cache_key

    return ""
Beispiel #2
0
def get_lightcurve_ds_from_events_ds(destination, axis, dt):

    try:

        if len(axis) != 2:
            logging.warn("Wrong number of axis")
            return ""

        dataset = DaveReader.get_file_dataset(destination)
        lc = get_lightcurve_from_dataset(dataset, axis, "", [], "", dt)

        if lc:
            #Changes lc format to stingray_addons format
            tmp_lc = {}
            tmp_lc['lc'] = lc.countrate
            tmp_lc['elc'] = []  # TODO: Get error from lightcurve
            tmp_lc['time'] = lc.time
            tmp_lc['GTI'] = lc.gti

            lc_dataset = DataSet.get_lightcurve_dataset_from_stingray_lcurve(tmp_lc, dataset.tables["EVENTS"].header, dataset.tables["EVENTS"].header_comments,
                                                                            "RATE", "TIME")
            dataset = None  # Dispose memory
            lc = None  # Dispose memory

            new_cache_key = DsCache.get_key(destination + "|ligthcurve")
            DsCache.add(new_cache_key, dataset)  # Adds new cached dataset for new key
            return new_cache_key

    except:
        logging.error(str(sys.exc_info()))

    return ""
Beispiel #3
0
def get_plot_html(destination, filters, styles, axis):

    dataset = DaveReader.get_file_dataset(destination)

    logging.debug("get_plot_html apply_filters: %d" % len(dataset.tables[axis[0]["table"]].columns[axis[0]["column"]].values))

    filtered_dataset = dataset.apply_filters(filters)

    logging.debug("get_plot_html res apply_filters: %d" % len(filtered_dataset.tables[axis[0]["table"]].columns[axis[0]["column"]].values))

    if not "type" in styles:
        return "No plot type specified on styles";

    if not "labels" in styles:
        return "No plot labels specified on styles";

    if len(styles["labels"]) < 2:
        return "Wrong number of labels specified on styles";

    if len(axis) < 2:
        return "Wrong number of axis";


    x_values = filtered_dataset.tables[axis[0]["table"]].columns[axis[0]["column"]].values
    y_values = filtered_dataset.tables[axis[1]["table"]].columns[axis[1]["column"]].values
    x_error_values = filtered_dataset.tables[axis[2]["table"]].columns[axis[2]["column"]].values
    y_error_values = filtered_dataset.tables[axis[3]["table"]].columns[axis[3]["column"]].values

    if styles["type"] == "2d":
        return Plotter.get_plotdiv_xy(x_values, y_values,
                                      x_error_values, y_error_values,
                                      styles["labels"][0], styles["labels"][1])

    elif styles["type"] == "3d":

        if len(styles["labels"]) < 3:
            return "Wrong number of labels specified on styles";

        if len(axis) < 3:
            return "Wrong number of axis";

        z_values = filtered_dataset.tables[axis[4]["table"]].columns[axis[4]["column"]].values
        #z_error_values = filtered_dataset.tables[axis[5]["table"]].columns[axis[5]["column"]]
        colorArray = np.random.uniform(-5, 5, size=len(x_values))
        error = np.random.uniform(-8,8 , size=len(x_values))

        return Plotter.get_plotdiv_xyz(x_values, y_values, z_values,
                                        x_error_values, y_error_values, error, #z_error_values
                                        styles["labels"][0], styles["labels"][1],
                                        colorArray)

    elif styles["type"] == "scatter":

        newAmplitude = np.random.uniform(-5, 5, size=len(x_values))

        return Plotter.get_plotdiv_scatter( x_values, y_values, newAmplitude,
                                            styles["labels"][0], styles["labels"][1])

    else:
        return "Wrong plot type specified on styles";
Beispiel #4
0
def get_divided_lightcurve_ds(lc0_destination, lc1_destination):

    try:

        lc0_ds = DaveReader.get_file_dataset(lc0_destination)
        if not DsHelper.is_lightcurve_dataset(lc0_ds):
            logging.warn("Wrong dataset type for lc0")
            return ""

        count_rate_0 = np.array(lc0_ds.tables["RATE"].columns["RATE"].values)

        lc1_ds = DaveReader.get_file_dataset(lc1_destination)
        if not DsHelper.is_lightcurve_dataset(lc1_ds):
            logging.warn("Wrong dataset type for lc1")
            return ""

        count_rate_1 = np.array(lc1_ds.tables["RATE"].columns["RATE"].values)

        if count_rate_0.shape == count_rate_1.shape:

            ret_lc_ds = lc0_ds.clone(True)

            with np.errstate(all='ignore'): # Ignore divisions by 0 and others
                count_rate = np.nan_to_num(count_rate_0 / count_rate_1)
            count_rate[count_rate > BIG_NUMBER]=0

            ret_lc_ds.tables["RATE"].columns["RATE"].clear()
            ret_lc_ds.tables["RATE"].columns["RATE"].add_values(count_rate) # TODO: Set error from lightcurve

            lc0_ds = None  # Dispose memory
            lc1_ds = None  # Dispose memory
            count_rate_1 = None  # Dispose memory
            count_rate_0 = None  # Dispose memory
            count_rate = None  # Dispose memory

            new_cache_key = DsCache.get_key(lc0_destination + "|" + lc1_destination + "|ligthcurve")
            DsCache.add(new_cache_key, ret_lc_ds)  # Adds new cached dataset for new key
            return new_cache_key

        else:
            logging.warn("Lightcurves have different shapes.")
            return None

    except:
        logging.error(str(sys.exc_info()))

    return ""
Beispiel #5
0
def get_filtered_dataset(destination, filters, gti_destination=""):
    dataset = DaveReader.get_file_dataset(destination)
    if not dataset:
        logging.warn("get_filtered_dataset: destination specified but not loadable.")
        return None

    if gti_destination:
        gti_dataset = DaveReader.get_file_dataset(gti_destination)
        if gti_dataset:
            dataset = DsHelper.get_dataset_applying_gti_dataset(dataset, gti_dataset)
            if not dataset:
                logging.warn("get_filtered_dataset: dataset is none after applying gti_dataset.")
                return None
        else:
            logging.warn("get_filtered_dataset: Gti_destination specified but not loadable.")

    return dataset.apply_filters(filters)
Beispiel #6
0
def get_filtered_dataset(destination, filters):
    dataset = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    gti_filters = DsHelper.get_dataset_gti_as_filters(dataset, filters)
    filtered_ds = DsHelper.apply_gti_filters_to_dataset(dataset, gti_filters)
    filtered_ds = filtered_ds.apply_filters(filters)
    return filtered_ds
Beispiel #7
0
def test_get_eventlist_from_evt_dataset(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)

    if not FileUtils.is_valid_file(destination):
        return None

    dataset, cache_key = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    eventList = DsHelper.get_eventlist_from_evt_dataset(dataset)

    assert not os.path.isfile(destination) or len(eventList.time) > 0
Beispiel #8
0
def test_get_dataset_gti_as_filters(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)

    if not FileUtils.is_valid_file(destination):
        return None

    dataset = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    filter = FltHelper.createTimeFilter(80000325.0, 80000725.0)
    gti_filters = DsHelper.get_dataset_gti_as_filters(dataset, [filter])

    assert not os.path.isfile(destination) or len(gti_filters) > 0
Beispiel #9
0
def test_get_eventlist_from_dataset(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)

    if not FileUtils.is_valid_file(destination):
        return None

    dataset = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    axis = [dict() for i in range(2)]
    axis[0]["table"] = "EVENTS"
    axis[0]["column"] = "TIME"
    axis[1]["table"] = "EVENTS"
    axis[1]["column"] = "PI"

    eventList = DsHelper.get_eventlist_from_dataset(dataset, axis)

    assert not os.path.isfile(destination) or len(eventList.time) > 0
Beispiel #10
0
def get_dataset_schema(destination):
    dataset = DaveReader.get_file_dataset(destination)
    if dataset:
        return dataset.get_schema()
    else:
        return None
Beispiel #11
0
def get_plot_html(destination, filters, styles, axis):

    dataset = DaveReader.get_file_dataset(destination)

    logging.debug("get_plot_html apply_filters: %d" % len(
        dataset.tables[axis[0]["table"]].columns[axis[0]["column"]].values))

    filtered_dataset = dataset.apply_filters(filters)

    logging.debug("get_plot_html res apply_filters: %d" %
                  len(filtered_dataset.tables[axis[0]["table"]].columns[
                      axis[0]["column"]].values))

    if not "type" in styles:
        return "No plot type specified on styles"

    if not "labels" in styles:
        return "No plot labels specified on styles"

    if len(styles["labels"]) < 2:
        return "Wrong number of labels specified on styles"

    if len(axis) < 2:
        return "Wrong number of axis"

    x_values = filtered_dataset.tables[axis[0]["table"]].columns[
        axis[0]["column"]].values
    y_values = filtered_dataset.tables[axis[1]["table"]].columns[
        axis[1]["column"]].values
    x_error_values = filtered_dataset.tables[axis[2]["table"]].columns[
        axis[2]["column"]].values
    y_error_values = filtered_dataset.tables[axis[3]["table"]].columns[
        axis[3]["column"]].values

    if styles["type"] == "2d":
        return Plotter.get_plotdiv_xy(x_values, y_values, x_error_values,
                                      y_error_values, styles["labels"][0],
                                      styles["labels"][1])

    elif styles["type"] == "3d":

        if len(styles["labels"]) < 3:
            return "Wrong number of labels specified on styles"

        if len(axis) < 3:
            return "Wrong number of axis"

        z_values = filtered_dataset.tables[axis[4]["table"]].columns[
            axis[4]["column"]].values
        #z_error_values = filtered_dataset.tables[axis[5]["table"]].columns[axis[5]["column"]]
        colorArray = np.random.uniform(-5, 5, size=len(x_values))
        error = np.random.uniform(-8, 8, size=len(x_values))

        return Plotter.get_plotdiv_xyz(
            x_values,
            y_values,
            z_values,
            x_error_values,
            y_error_values,
            error,  #z_error_values
            styles["labels"][0],
            styles["labels"][1],
            colorArray)

    elif styles["type"] == "scatter":

        newAmplitude = np.random.uniform(-5, 5, size=len(x_values))

        return Plotter.get_plotdiv_scatter(x_values, y_values, newAmplitude,
                                           styles["labels"][0],
                                           styles["labels"][1])

    else:
        return "Wrong plot type specified on styles"
Beispiel #12
0
def test_get_file_dataset(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc")
    table_id = "lc_table"
    dataset = DaveReader.get_file_dataset(destination)
    assert dataset and (len(dataset.tables) == 1) and (table_id in dataset.tables)
Beispiel #13
0
def get_dataset_schema(destination):
    dataset = DaveReader.get_file_dataset(destination)
    return dataset.get_schema()