def upload(files, target): if len(files) == 0: return common_error("No sent files") filenames = [] for file in files: # Looks if same filename was previously uploaded if not FileUtils.file_exist(target, file.filename): destination = FileUtils.save_file(target, file) if not destination: return common_error("Error uploading file...") if not FileUtils.is_valid_file(destination): return common_error("File format is not supported...") logging.info("Uploaded filename: %s" % destination) else: destination = FileUtils.get_destination(target, file.filename) logging.info("Previously uploaded filename: %s" % destination) SessionHelper.add_uploaded_file_to_session(file.filename) filenames.append(file.filename) return json.dumps(filenames)
def test_get_txt_dataset(s): destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_1.txt") table_id = "txt_table" header_names = [ "Time", "Error_time", "Rate", "Error_rate", "color1", "Error_color1", "color2", "Error_color2" ] dataset = DaveReader.get_txt_dataset(destination, table_id, header_names) num_rows = 10 assert dataset and (len(dataset.tables) == 1) and (table_id in dataset.tables) and (len(dataset.tables[table_id].columns) == len(header_names)) and (len(dataset.tables[table_id].columns[header_names[0]].values) == num_rows)
def test_is_valid_file(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) try: assert FileUtils.is_valid_file(destination) == os.path.isfile( destination) except: assert FileUtils.is_valid_file(destination) == False
def test_get_dataset_schema(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) schema = None if FileUtils.is_valid_file(destination): schema = DaveEngine.get_dataset_schema(destination) assert schema is not None
def append_file_to_dataset(filename, nextfile, target): destination = get_destination(filename, target) if not destination: return common_error("Invalid file or cache key") if not nextfile: return common_error("No nextfile setted") if not SessionHelper.is_file_uploaded(nextfile): if not FileUtils.file_exist(target, nextfile): logging.error("Filename not uploaded for nextfile %s" % nextfile) return common_error("Nextfile not uploaded") next_destination = FileUtils.get_destination(target, nextfile) if not FileUtils.is_valid_file(next_destination): return common_error("Invalid next file") logging.debug("append_file_to_dataset, destination: %s" % destination) logging.debug("append_file_to_dataset, next_destination: %s" % next_destination) new_filename = DaveEngine.append_file_to_dataset(destination, next_destination) logging.debug("append_file_to_dataset, cache_key: %s" % new_filename) return json.dumps(new_filename)
def test_get_divided_lightcurve_ds(s): destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc") result = "" if FileUtils.is_valid_file(destination): result = DaveEngine.get_divided_lightcurve_ds(destination, destination, "", "") assert len(result) > 0
def test_get_fits_table_column_names(s): destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt") # Opening Fits hdulist = fits.open(destination) column_names = DaveReader.get_fits_table_column_names(hdulist, "EVENTS") assert len(column_names) == 2
def test_get_dataset_schema(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) schema = None if destination: schema = DaveEngine.get_dataset_schema(destination) assert not os.path.isfile(destination) or schema != None
def test_get_file_dataset(s): destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc") ds_id = "fits_table" table_ids = ["Primary", "RATE", "STDGTI"] dataset = DaveReader.get_fits_dataset(destination, ds_id, table_ids) assert dataset assert len(dataset.tables) == 2 assert table_ids[1] in dataset.tables
def test_get_lightcurve_fits_dataset_with_stingray(s): destination = FileUtils.get_destination(TEST_RESOURCES, "PN_source_lightcurve_raw.lc") # Opening Fits hdulist = fits.open(destination) dataset = DaveReader.get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE', column='TIME', gtistring='GTI,STDGTI') assert dataset
def test_get_file_dataset(s): destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc") ds_id = "fits_table" table_ids = ["Primary", "RATE", "STDGTI"] hdulist = fits.open(destination) dataset = DaveReader.get_fits_dataset(hdulist, ds_id, table_ids) assert dataset assert len(dataset.tables) == 2 assert table_ids[1] in dataset.tables
def test_get_fits_dataset_with_stingray(s): destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt") ds_id = "fits_table" table_ids = ["Primary", "EVENTS", "GTI"] dataset = DaveReader.get_fits_dataset_with_stingray(destination) assert dataset assert len(dataset.tables) == 2 assert table_ids[1] in dataset.tables assert len(dataset.tables[table_ids[1]].columns) == 2
def test_get_fits_dataset_lc(s): destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc") ds_id = "fits_table" table_ids = ["Primary", "RATE", "STDGTI"] hdulist = fits.open(destination) dataset = DaveReader.get_fits_dataset(hdulist, ds_id, table_ids) assert dataset assert len(dataset.tables) == 2 assert table_ids[1] in dataset.tables assert len(dataset.tables[table_ids[1]].columns) == 4
def test_get_fits_dataset_evt(s): destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt") ds_id = "fits_table" table_ids = ["Primary", "EVENTS", "GTI"] hdulist = fits.open(destination) dataset = DaveReader.get_fits_dataset(hdulist, ds_id, table_ids) assert dataset assert len(dataset.tables) == 2 assert table_ids[1] in dataset.tables assert len(dataset.tables[table_ids[1]].columns) == 2
def test_get_events_fits_dataset_with_stingray(s): destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt") ds_id = "fits_table" table_ids = ["Primary", "EVENTS", "GTI"] # Opening Fits hdulist = fits.open(destination) dataset = DaveReader.get_events_fits_dataset_with_stingray(destination, hdulist) assert dataset assert len(dataset.tables) == 2 assert table_ids[1] in dataset.tables assert len(dataset.tables[table_ids[1]].columns) == 2
def get_dataset_schema(filename, target): if not filename: return common_error(error="No filename setted") if not session['uploaded_filename'] or session['uploaded_filename'] != filename: return common_error("Filename not uploaded") destination = FileUtils.get_destination(target, filename) if not FileUtils.is_valid_file(destination): return common_error("Invalid file") schema = DaveEngine.get_dataset_schema(destination) return json.dumps(schema, cls=NPEncoder)
def get_dataset_schema (filename, target): if not filename: return common_error(error = "No filename setted" ) if not session['uploaded_filename'] or session['uploaded_filename'] != filename: return common_error("Filename not uploaded" ) destination = FileUtils.get_destination(target, filename) if not destination: return common_error("Error opening file") schema = DaveEngine.get_dataset_schema(destination) return json.dumps( schema )
def test_get_power_density_spectrum(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) result = None axis = [dict() for i in range(2)] axis[0]["table"] = "EVENTS" axis[0]["column"] = "TIME" axis[1]["table"] = "EVENTS" axis[1]["column"] = "PHA" if FileUtils.is_valid_file(destination): result = DaveEngine.get_power_density_spectrum(destination, "", "", [], axis, 16., 1, 0, 'leahy', 'Sng') assert result is not None
def test_get_lightcurve(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) result = None axis = [dict() for i in range(2)] axis[0]["table"] = "EVENTS" axis[0]["column"] = "TIME" axis[1]["table"] = "EVENTS" axis[1]["column"] = "PI" if FileUtils.is_valid_file(destination): result = DaveEngine.get_lightcurve(destination, "", "", [], axis, 16.) assert not os.path.isfile(destination) or result is not None
def apply_rmf_file_to_dataset(filename, rmf_filename, column, target): destination = get_destination(filename, target) if not destination: return common_error("Invalid file or cache key") if not rmf_filename: return common_error("No rmf_filename setted") rmf_destination = FileUtils.get_destination(target, rmf_filename) if not FileUtils.is_valid_file(rmf_destination): return common_error("Invalid RMF file") result = DaveEngine.apply_rmf_file_to_dataset(destination, rmf_destination, column) return json.dumps(result)
def test_get_txt_dataset(s): destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_1.txt") table_id = "EVENTS" header_names = ["TIME", "PHA", "Color1", "Color2"] dataset = DaveReader.get_txt_dataset(destination, table_id, header_names) num_rows = 10 assert dataset assert len(dataset.tables) == 2 assert table_id in dataset.tables table = dataset.tables[table_id] assert len(table.columns) == len(header_names) assert len(table.columns[header_names[0]].values) == num_rows
def test_get_eventlist_from_evt_dataset(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) if not FileUtils.is_valid_file(destination): return None dataset, cache_key = DaveReader.get_file_dataset(destination) if not dataset: return None eventList = DsHelper.get_eventlist_from_evt_dataset(dataset) assert not os.path.isfile(destination) or len(eventList.time) > 0
def get_dataset_schema(filename, target): if not filename: return common_error(error="No filename setted") if not session[ 'uploaded_filename'] or session['uploaded_filename'] != filename: return common_error("Filename not uploaded") destination = FileUtils.get_destination(target, filename) if not destination: return common_error("Error opening file") schema = DaveEngine.get_dataset_schema(destination) return json.dumps(schema)
def test_get_dataset_gti_as_filters(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) if not FileUtils.is_valid_file(destination): return None dataset = DaveReader.get_file_dataset(destination) if not dataset: return None filter = FltHelper.createTimeFilter(80000325.0, 80000725.0) gti_filters = DsHelper.get_dataset_gti_as_filters(dataset, [filter]) assert not os.path.isfile(destination) or len(gti_filters) > 0
def test_get_lightcurve_ds_from_events_ds(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) result = "" axis = [dict() for i in range(2)] axis[0]["table"] = "EVENTS" axis[0]["column"] = "TIME" axis[1]["table"] = "EVENTS" axis[1]["column"] = "PI" if FileUtils.is_valid_file(destination): result = DaveEngine.get_lightcurve_ds_from_events_ds( destination, axis, 16.) assert not os.path.isfile(destination) or len(result) > 0
def get_plot_data(filename, target, filters, styles, axis): if not filename: return "No filename setted" if not session['uploaded_filename'] or session['uploaded_filename'] != filename: return "Filename not uploaded" destination = FileUtils.get_destination(target, filename) if not FileUtils.is_valid_file(destination): return "Invalid file" logging.debug("get_plot_data: %s" % filename) logging.debug("get_plot_data: filters %s" % filters) logging.debug("get_plot_data: styles %s" % styles) logging.debug("get_plot_data: axis %s" % axis) data = DaveEngine.get_plot_data(destination, filters, styles, axis) return json.dumps(data, cls=NPEncoder)
def get_ligthcurve(filename, target, filters, axis, dt): if not filename: return "No filename setted" if not session['uploaded_filename'] or session['uploaded_filename'] != filename: return "Filename not uploaded" destination = FileUtils.get_destination(target, filename) if not FileUtils.is_valid_file(destination): return "Invalid file" logging.debug("get_ligthcurve: %s" % filename) logging.debug("get_ligthcurve: filters %s" % filters) logging.debug("get_ligthcurve: axis %s" % axis) logging.debug("get_ligthcurve: dt %f" % dt) data = DaveEngine.get_ligthcurve(destination, filters, axis, dt) return json.dumps(data, cls=NPEncoder)
def test_get_lightcurve(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) result = None axis = [dict() for i in range(2)] axis[0]["table"] = "EVENTS" axis[0]["column"] = "TIME" axis[1]["table"] = "EVENTS" axis[1]["column"] = "PHA" baseline_opts = dict() baseline_opts["niter"] = 10 baseline_opts["lam"] = 1000 baseline_opts["p"] = 0.01 if FileUtils.is_valid_file(destination): result = DaveEngine.get_lightcurve(destination, "", "", [], axis, 16., baseline_opts) assert not os.path.isfile(destination) or result is not None
def get_plot_html (filename, target, filters, styles, axis): if not filename: return "No filename setted" if not session['uploaded_filename'] or session['uploaded_filename'] != filename: return "Filename not uploaded" destination = FileUtils.get_destination(target, filename) if not destination: return "Error opening file" logging.debug("get_plot_html: %s" % filename) logging.debug("get_plot_html: filters %s" % filters) logging.debug("get_plot_html: styles %s" % styles) logging.debug("get_plot_html: axis %s" % axis) plot_html = DaveEngine.get_plot_html(destination, filters, styles, axis) return Markup(plot_html)
def append_file_to_dataset(filename, nextfile, target): destination = get_destination(filename, target) if not destination: return common_error("Invalid file or cache key") if not nextfile: return common_error(error="No nextfile setted") if not SessionHelper.is_file_uploaded(nextfile): return common_error("Nextfile not uploaded") next_destination = FileUtils.get_destination(target, nextfile) if not FileUtils.is_valid_file(next_destination): return common_error("Invalid next file") new_filename = DaveEngine.append_file_to_dataset(destination, next_destination) return json.dumps(new_filename)
def get_plot_html(filename, target, filters, styles, axis): if not filename: return "No filename setted" if not session[ 'uploaded_filename'] or session['uploaded_filename'] != filename: return "Filename not uploaded" destination = FileUtils.get_destination(target, filename) if not destination: return "Error opening file" logging.debug("get_plot_html: %s" % filename) logging.debug("get_plot_html: filters %s" % filters) logging.debug("get_plot_html: styles %s" % styles) logging.debug("get_plot_html: axis %s" % axis) plot_html = DaveEngine.get_plot_html(destination, filters, styles, axis) return Markup(plot_html)
def test_get_eventlist_from_dataset(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) if not FileUtils.is_valid_file(destination): return None dataset = DaveReader.get_file_dataset(destination) if not dataset: return None axis = [dict() for i in range(2)] axis[0]["table"] = "EVENTS" axis[0]["column"] = "TIME" axis[1]["table"] = "EVENTS" axis[1]["column"] = "PI" eventList = DsHelper.get_eventlist_from_dataset(dataset, axis) assert not os.path.isfile(destination) or len(eventList.time) > 0
def get_destination(filename, target): if not filename: logging.error("No filename or cache key setted for filename %s" % filename) return None if not SessionHelper.is_file_uploaded(filename): if not DsCache.contains(filename): if not FileUtils.file_exist(target, filename): logging.error("Filename not uploaded or not found in cache for filename %s" % filename) return None destination = FileUtils.get_destination(target, filename) if not FileUtils.is_valid_file(destination): if not DsCache.contains(filename): logging.error("Invalid file or not found in cache filename %s" % filename) return None else: destination = filename # Filename represents only a joined dataset key, not a real file return destination
def get_destination(filename, target): if not filename: logging.error("No filename or cache key setted for filename %s" % filename) return None if not SessionHelper.is_file_uploaded(filename): if not DsCache.contains(filename): logging.error( "Filename not uploaded or not found in cache for filename %s" % filename) return None destination = FileUtils.get_destination(target, filename) if not FileUtils.is_valid_file(destination): if not DsCache.contains(filename): logging.error("Invalid file or not found in cache filename %s" % filename) return None else: destination = filename # Filename represents only a joined dataset key, not a real file return destination
def test_is_valid_file(s): destination = FileUtils.get_destination(TEST_RESOURCES, s) try: assert FileUtils.is_valid_file(destination) == os.path.isfile(destination) except: assert FileUtils.is_valid_file(destination) == False
def test_get_fits_table_column_names(s): destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt") column_names = DaveReader.get_fits_table_column_names( destination, "EVENTS") assert len(column_names) == 2