def drop_audit_trail(): """ Remove PGMemento audit trail """ db = get_database() db.exec_sql(relative_path(__file__, "drop-audit.sql")) drop_audit_columns(db)
def geojson_view(self): db = self.app.database p = Path(relative_path(__file__, "geojson.sql")) ## returns array of objects, we don't want the key geojson = run_sql_query_file(db.session, p) a = [v for v, in geojson.fetchall()] ## very strange tuple unpacking return JSONResponse(a) # a is a list of geojson objects
def metrics_view(self): db = self.app.database p = Path(relative_path(__file__, "metrics.sql")) sqlfile = open(p, "r") query = sqlfile.read() metrics = db.exec_query(query) res = metrics.to_json(orient='records') return JSONResponse(json.loads(res))
def __init__(self, db, data_dir, **kwargs): super().__init__(db) metadata_file = data_dir / "Data_Reduction_Sheet.xlsx" self.image_folder = data_dir / "Photographs and Measurement Data" self.verbose = kwargs.pop("verbose", False) spec = relative_path(__file__, "column-spec.yaml") with open(spec) as f: self.column_spec = load(f) self.iterfiles([metadata_file], **kwargs)
def import_agecalc_ml(test=True): """ Import Matlab save file for E2 in bulk. """ if not test: echo(f"Only test data supported for now") return fn = relative_path(__file__,'../test-data/Test_E2_Export.mat') print(fn) # Load file as a pre-7.3 matlab file mat = loadmat(fn) embed()
def test_webscrape_app_sims(self, db, client, token): """ This test webscrapes an html page, taken from online. # sims_pub_url = "http://www.geology.wisc.edu/~wiscsims/publications.html" This can also be achieved by accessing the html through a network connection directly. # from urllib.request import urlopen as uReqfrom # # page = uReq(sims_pub_url) # page_html = page.read() # page.close() """ route = "/api/v2/models/project" page = relative_path(__file__, "fixtures/wiscsims_publications.html") with open(page, "r") as f: page_html = f.read() page_soup = soup(page_html, "html.parser") content = page_soup.findAll("p", {"class": "item article"}) title_list = [] doi_list = [] for pub in content: title = pub.findAll("span", {"class": "body"})[0].text title_list.append(title) doi = pub.findAll("span", {"class": "doi"})[0].text doi_list.append(doi) proj_titles = [] for i, title in enumerate(title_list): proj_titles.append({ "name": title, "publications": [{ "title": title, "doi": doi_list[i] }] }) res = client.post(route, headers={"Authorization": token}, json=proj_titles) up_json = res.json() assert len(up_json["data"]) > 0
def on_finalize_database_schema(self, db): procedures = [] if not has_audit_schema(db): # Create the schema to hold audited tables # NOTE: this drops all transaction history, so we don't run # it if pgMemento tables already exist. procedures.append("SCHEMA") # Basic setup procedures procedures += [ "SETUP", "LOG_UTIL", "DDL_LOG", "RESTORE", "REVERT", "SCHEMA_MANAGEMENT", ] for id in procedures: fp = relative_path(__file__, "pg-memento", "src", id + ".sql") db.exec_sql(fp) db.exec_sql(relative_path(__file__, "start-logging.sql"))
def import_map(redo=False, stop_on_error=False, verbose=False, show_data=False): """ Import WiscAr MAP spectrometer data (ArArCalc files) in bulk. """ data_path = get_data_directory() / "MAP-Irradiations" app, db = construct_app(minimal=True) importer = MAPImporter(db, verbose=verbose, show_data=show_data) importer.iterfiles(data_path.glob("**/*.xls"), redo=redo) # Clean up data inconsistencies fp = relative_path(__file__, "sql", "clean-data.sql") db.exec_sql(fp)
def import_map(redo=False, stop_on_error=False, verbose=False, show_data=False): """ Import WiscAr MAP spectrometer data (ArArCalc files) in bulk. """ data_base = get_data_directory() data_path = data_base / "MAP-Irradiations" # Make sure we are working in the data directory (for some reason this is important) # TODO: fix in sparrow chdir(str(data_base)) app = get_sparrow_app() db = app.database importer = MAPImporter(db, verbose=verbose, show_data=show_data) importer.iterfiles(data_path.glob("**/*.xls"), redo=redo) # Clean up data inconsistencies fp = relative_path(__file__, "sql", "clean-data.sql") db.exec_sql(fp)
def on_database_ready(self, db): p = Path(relative_path(__file__, "favorite_rock.sql")) db.exec_sql(p)
def load_relative(*pth): fn = relative_path(*pth) with open(fn) as fp: return load(fp)
def test_large_sims_dataset(self, db): fn = relative_path(__file__, "fixtures", "2_20140602_d18O_KennyBefus.json.gz") with gzip.open(fn, "rb") as zipfile: data = json.loads(zipfile.read()) db.load_data("session", data)
def import_dz_test_data(): importer = DetritalZirconTableImporter() fn = relative_path(__file__, "fixtures", "detrital-zircon-F-90.csv") df = read_csv(fn) return importer(df)
def remove_analytical_data(): """Remove all analytical data from the Sparrow database""" db = sparrow.get_database() qfile = relative_path(__file__, "remove-analytical-data.sql") db.exec_sql(qfile)