示例#1
0
def test_is_valid_file(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    try:
        assert FileUtils.is_valid_file(destination) == os.path.isfile(
            destination)
    except:
        assert FileUtils.is_valid_file(destination) == False
示例#2
0
def append_file_to_dataset(filename, nextfile, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not nextfile:
        return common_error("No nextfile setted")

    if not SessionHelper.is_file_uploaded(nextfile):
        if not FileUtils.file_exist(target, nextfile):
            logging.error("Filename not uploaded for nextfile %s" % nextfile)
            return common_error("Nextfile not uploaded")

    next_destination = FileUtils.get_destination(target, nextfile)
    if not FileUtils.is_valid_file(next_destination):
        return common_error("Invalid next file")

    logging.debug("append_file_to_dataset, destination: %s" % destination)
    logging.debug("append_file_to_dataset, next_destination: %s" % next_destination)

    new_filename = DaveEngine.append_file_to_dataset(destination, next_destination)

    logging.debug("append_file_to_dataset, cache_key: %s" % new_filename)

    return json.dumps(new_filename)
示例#3
0
def upload(files, target):

    if len(files) == 0:
        return common_error("No sent files")

    filenames = []

    for file in files:

        # Looks if same filename was previously uploaded
        if not FileUtils.file_exist(target, file.filename):
            destination = FileUtils.save_file(target, file)

            if not destination:
                return common_error("Error uploading file...")

            if not FileUtils.is_valid_file(destination):
                return common_error("File format is not supported...")

            logging.info("Uploaded filename: %s" % destination)
        else:
            destination = FileUtils.get_destination(target, file.filename)
            logging.info("Previously uploaded filename: %s" % destination)

        SessionHelper.add_uploaded_file_to_session(file.filename)
        filenames.append(file.filename)

    return json.dumps(filenames)
示例#4
0
def upload(files, target):

    if len(files) == 0:
        return common_error("No sent files")

    filenames = []

    for file in files:

        # Looks if same filename was previously uploaded
        if not FileUtils.file_exist(target, file.filename):
            destination = FileUtils.save_file(target, file)

            if not destination:
                return common_error("Error uploading file...")

            if not FileUtils.is_valid_file(destination):
                return common_error("File format is not supported...")

            logging.info("Uploaded filename: %s" % destination)
        else:
            destination = FileUtils.get_destination(target, file.filename)
            logging.info("Previously uploaded filename: %s" % destination)

        SessionHelper.add_uploaded_file_to_session(file.filename)
        filenames.append(file.filename)

    return json.dumps(filenames)
示例#5
0
def test_get_dataset_schema(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    schema = None

    if FileUtils.is_valid_file(destination):
        schema = DaveEngine.get_dataset_schema(destination)
        assert schema is not None
示例#6
0
def test_get_divided_lightcurve_ds(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc")
    result = ""

    if FileUtils.is_valid_file(destination):
        result = DaveEngine.get_divided_lightcurve_ds(destination, destination, "", "")
        assert len(result) > 0
示例#7
0
def append_file_to_dataset(filename, nextfile, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not nextfile:
        return common_error("No nextfile setted")

    if not SessionHelper.is_file_uploaded(nextfile):
        if not FileUtils.file_exist(target, nextfile):
            logging.error("Filename not uploaded for nextfile %s" % nextfile)
            return common_error("Nextfile not uploaded")

    next_destination = FileUtils.get_destination(target, nextfile)
    if not FileUtils.is_valid_file(next_destination):
        return common_error("Invalid next file")

    logging.debug("append_file_to_dataset, destination: %s" % destination)
    logging.debug("append_file_to_dataset, next_destination: %s" %
                  next_destination)

    new_filename = DaveEngine.append_file_to_dataset(destination,
                                                     next_destination)

    logging.debug("append_file_to_dataset, cache_key: %s" % new_filename)

    return json.dumps(new_filename)
示例#8
0
def get_dataset_schema(filename, target):
    if not filename:
        return common_error(error="No filename setted")

    if not session['uploaded_filename'] or session['uploaded_filename'] != filename:
        return common_error("Filename not uploaded")

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        return common_error("Invalid file")

    schema = DaveEngine.get_dataset_schema(destination)
    return json.dumps(schema, cls=NPEncoder)
示例#9
0
def get_intermediate_files(filepaths, target):
    filenames = []

    for filepath in filepaths:
        if not FileUtils.is_valid_file(filepath):
            logging.error("Filepath not found or invalid: %s" % filepath)
        else:
            filename = DaveBulk.get_intermediate_file(filepath, target)
            logging.debug("get_intermediate_files filename: %s" % filename)
            if filename:
                filenames.append(filename)

    return json.dumps(filenames, cls=NPEncoder)
示例#10
0
def get_intermediate_files(filepaths, target):
    filenames = []

    for filepath in filepaths:
        if not FileUtils.is_valid_file(filepath):
            logging.error("Filepath not found or invalid: %s" % filepath)
        else:
            filename = DaveBulk.get_intermediate_file(filepath, target)
            logging.debug("get_intermediate_files filename: %s" % filename)
            if filename:
                filenames.append(filename)

    return json.dumps(filenames, cls=NPEncoder)
示例#11
0
def test_get_power_density_spectrum(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    result = None

    axis = [dict() for i in range(2)]
    axis[0]["table"] = "EVENTS"
    axis[0]["column"] = "TIME"
    axis[1]["table"] = "EVENTS"
    axis[1]["column"] = "PHA"

    if FileUtils.is_valid_file(destination):
        result = DaveEngine.get_power_density_spectrum(destination, "", "", [], axis, 16., 1, 0, 'leahy', 'Sng')
        assert result is not None
示例#12
0
def apply_rmf_file_to_dataset(filename, rmf_filename, column, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not rmf_filename:
        return common_error("No rmf_filename setted")

    rmf_destination = FileUtils.get_destination(target, rmf_filename)
    if not FileUtils.is_valid_file(rmf_destination):
        return common_error("Invalid RMF file")

    result = DaveEngine.apply_rmf_file_to_dataset(destination, rmf_destination, column)
    return json.dumps(result)
示例#13
0
def test_get_lightcurve(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    result = None

    axis = [dict() for i in range(2)]
    axis[0]["table"] = "EVENTS"
    axis[0]["column"] = "TIME"
    axis[1]["table"] = "EVENTS"
    axis[1]["column"] = "PI"

    if FileUtils.is_valid_file(destination):
        result = DaveEngine.get_lightcurve(destination, "", "", [], axis, 16.)

    assert not os.path.isfile(destination) or result is not None
示例#14
0
def test_get_eventlist_from_evt_dataset(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)

    if not FileUtils.is_valid_file(destination):
        return None

    dataset, cache_key = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    eventList = DsHelper.get_eventlist_from_evt_dataset(dataset)

    assert not os.path.isfile(destination) or len(eventList.time) > 0
示例#15
0
def test_get_dataset_gti_as_filters(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)

    if not FileUtils.is_valid_file(destination):
        return None

    dataset = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    filter = FltHelper.createTimeFilter(80000325.0, 80000725.0)
    gti_filters = DsHelper.get_dataset_gti_as_filters(dataset, [filter])

    assert not os.path.isfile(destination) or len(gti_filters) > 0
示例#16
0
def apply_rmf_file_to_dataset(filename, rmf_filename, column, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not rmf_filename:
        return common_error("No rmf_filename setted")

    rmf_destination = FileUtils.get_destination(target, rmf_filename)
    if not FileUtils.is_valid_file(rmf_destination):
        return common_error("Invalid RMF file")

    result = DaveEngine.apply_rmf_file_to_dataset(destination, rmf_destination,
                                                  column)
    return json.dumps(result)
示例#17
0
def test_get_lightcurve_ds_from_events_ds(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    result = ""

    axis = [dict() for i in range(2)]
    axis[0]["table"] = "EVENTS"
    axis[0]["column"] = "TIME"
    axis[1]["table"] = "EVENTS"
    axis[1]["column"] = "PI"

    if FileUtils.is_valid_file(destination):
        result = DaveEngine.get_lightcurve_ds_from_events_ds(
            destination, axis, 16.)

    assert not os.path.isfile(destination) or len(result) > 0
示例#18
0
def upload(file, target):
    if not file.filename:
        return common_error("No sent file")

    if not FileUtils.is_valid_file(file.filename):
        return common_error("File extension is not supported...")

    destination = FileUtils.save_file (file, target)

    if not destination:
        return common_error("Error uploading file...")

    logging.debug("Uploaded filename: %s" % destination)
    session['uploaded_filename'] = file.filename

    return json.dumps( dict( filename = file.filename ) )
示例#19
0
def upload(file, target):
    if not file.filename:
        return common_error("No sent file")

    destination = FileUtils.save_file(file, target)

    if not destination:
        return common_error("Error uploading file...")

    if not FileUtils.is_valid_file(destination):
        return common_error("File extension is not supported...")

    logging.debug("Uploaded filename: %s" % destination)
    session['uploaded_filename'] = file.filename

    return json.dumps(dict(filename=file.filename))
示例#20
0
def get_plot_data(filename, target, filters, styles, axis):
    if not filename:
        return "No filename setted"

    if not session['uploaded_filename'] or session['uploaded_filename'] != filename:
        return "Filename not uploaded"

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        return "Invalid file"

    logging.debug("get_plot_data: %s" % filename)
    logging.debug("get_plot_data: filters %s" % filters)
    logging.debug("get_plot_data: styles %s" % styles)
    logging.debug("get_plot_data: axis %s" % axis)

    data = DaveEngine.get_plot_data(destination, filters, styles, axis)
    return json.dumps(data, cls=NPEncoder)
示例#21
0
def get_ligthcurve(filename, target, filters, axis, dt):
    if not filename:
        return "No filename setted"

    if not session['uploaded_filename'] or session['uploaded_filename'] != filename:
        return "Filename not uploaded"

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        return "Invalid file"

    logging.debug("get_ligthcurve: %s" % filename)
    logging.debug("get_ligthcurve: filters %s" % filters)
    logging.debug("get_ligthcurve: axis %s" % axis)
    logging.debug("get_ligthcurve: dt %f" % dt)

    data = DaveEngine.get_ligthcurve(destination, filters, axis, dt)
    return json.dumps(data, cls=NPEncoder)
示例#22
0
def test_get_lightcurve(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    result = None

    axis = [dict() for i in range(2)]
    axis[0]["table"] = "EVENTS"
    axis[0]["column"] = "TIME"
    axis[1]["table"] = "EVENTS"
    axis[1]["column"] = "PHA"

    baseline_opts = dict()
    baseline_opts["niter"] = 10
    baseline_opts["lam"] = 1000
    baseline_opts["p"] = 0.01

    if FileUtils.is_valid_file(destination):
        result = DaveEngine.get_lightcurve(destination, "", "", [], axis, 16., baseline_opts)

    assert not os.path.isfile(destination) or result is not None
示例#23
0
def append_file_to_dataset(filename, nextfile, target):
    destination = get_destination(filename, target)
    if not destination:
        return common_error("Invalid file or cache key")

    if not nextfile:
        return common_error(error="No nextfile setted")

    if not SessionHelper.is_file_uploaded(nextfile):
        return common_error("Nextfile not uploaded")

    next_destination = FileUtils.get_destination(target, nextfile)
    if not FileUtils.is_valid_file(next_destination):
        return common_error("Invalid next file")

    new_filename = DaveEngine.append_file_to_dataset(destination,
                                                     next_destination)

    return json.dumps(new_filename)
示例#24
0
def test_get_eventlist_from_dataset(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)

    if not FileUtils.is_valid_file(destination):
        return None

    dataset = DaveReader.get_file_dataset(destination)

    if not dataset:
        return None

    axis = [dict() for i in range(2)]
    axis[0]["table"] = "EVENTS"
    axis[0]["column"] = "TIME"
    axis[1]["table"] = "EVENTS"
    axis[1]["column"] = "PI"

    eventList = DsHelper.get_eventlist_from_dataset(dataset, axis)

    assert not os.path.isfile(destination) or len(eventList.time) > 0
示例#25
0
def get_destination(filename, target):
    if not filename:
        logging.error("No filename or cache key setted for filename %s" % filename)
        return None

    if not SessionHelper.is_file_uploaded(filename):
        if not DsCache.contains(filename):
            if not FileUtils.file_exist(target, filename):
                logging.error("Filename not uploaded or not found in cache for filename %s" % filename)
                return None

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        if not DsCache.contains(filename):
            logging.error("Invalid file or not found in cache filename %s" % filename)
            return None
        else:
            destination = filename # Filename represents only a joined dataset key, not a real file

    return destination
示例#26
0
def upload(files, target):

    if len(files) == 0:
        return common_error("No sent files")

    filenames = []

    for file in files:

        destination = FileUtils.save_file(file, target)

        if not destination:
            return common_error("Error uploading file...")

        if not FileUtils.is_valid_file(destination):
            return common_error("File extension is not supported...")

        logging.info("Uploaded filename: %s" % destination)
        SessionHelper.add_uploaded_file_to_session(file.filename)
        filenames.append(file.filename)

    return json.dumps(filenames)
示例#27
0
def get_destination(filename, target):
    if not filename:
        logging.error("No filename or cache key setted for filename %s" %
                      filename)
        return None

    if not SessionHelper.is_file_uploaded(filename):
        if not DsCache.contains(filename):
            logging.error(
                "Filename not uploaded or not found in cache for filename %s" %
                filename)
            return None

    destination = FileUtils.get_destination(target, filename)
    if not FileUtils.is_valid_file(destination):
        if not DsCache.contains(filename):
            logging.error("Invalid file or not found in cache filename %s" %
                          filename)
            return None
        else:
            destination = filename  # Filename represents only a joined dataset key, not a real file

    return destination
示例#28
0
def test_is_valid_file(s):
    assert FileUtils.is_valid_file(s) == (s.endswith(".txt")
                                          or s.endswith(".lc"))
示例#29
0
def test_is_valid_file(s):
    destination = FileUtils.get_destination(TEST_RESOURCES, s)
    try:
        assert FileUtils.is_valid_file(destination) == os.path.isfile(destination)
    except:
        assert FileUtils.is_valid_file(destination) == False