def df_to_hdf5(file_name, df): if not os.path.splitext(file_name)[1] == '.hdf': file_name = os.path.join(file_name, '.hdf') sp = os.path.splitext(file_name)[0] local_storage_dir = mimic_login.get_local_storage_dir() file_path = os.path.join(local_storage_dir, file_name) store = HdfDataStore(PartialSchema(sp), file_path) store.store(df) return FileLink(os.path.relpath(file_path), result_html_prefix='Right-click and save: ')
def store_csv(schema, condition=None): file_name = schema.name if not os.path.splitext(file_name)[1] == '.csv': file_name = file_name + '.csv' local_storage_dir = mimic_login.get_local_storage_dir() file_path = os.path.join(local_storage_dir, file_name) loader = _get_table_loader(schema, condition) loader.to_csv(file_path) return FileLink(os.path.relpath(file_path), result_html_prefix='Right-click and save: ')
def load_table(schema, condition=None): loader = _get_table_loader(schema, condition) local_storage_dir = mimic_login.get_local_storage_dir() if local_storage_dir: query_f_name = schema.name if condition is not None: query_f_name += '_' + condition query_f_name +='.hdf' query_f_name = os.path.join(local_storage_dir, query_f_name) cache = HdfDataStore(schema, query_f_name, fixed=True) loader = CachingDataStore(schema, loader, cache) return loader.load()
def load_table(schema, condition=None): loader = _get_table_loader(schema, condition) local_storage_dir = mimic_login.get_local_storage_dir() if local_storage_dir: query_f_name = schema.name if condition is not None: query_f_name += '_' + condition query_f_name += '.hdf' query_f_name = os.path.join(local_storage_dir, query_f_name) cache = HdfDataStore(schema, query_f_name, fixed=True) loader = CachingDataStore(schema, loader, cache) return loader.load()
def load_table(schema, condition=None): loader = _get_table_loader(schema, condition) local_storage_dir = mimic_login.get_local_storage_dir() if local_storage_dir: query_f_name = schema.name if condition is not None: db_file = mimic_login.get_db_file() with shelve.open(db_file) as db: qs_k = 'queries_' + schema.name if qs_k not in db: db[qs_k] = () condition = slugify(condition) if condition not in db[qs_k]: db[qs_k] += (condition, ) q_idx = db[qs_k].index(condition) query_f_name += '_query_' + str(q_idx) query_f_name += '.hdf' query_f_name = os.path.join(local_storage_dir, query_f_name) cache = AppendableHdfDataStore(schema, query_f_name) loader = CachingDataStore(schema, loader, cache) with sql_exception(): return loader.load()
def load_table(schema, condition=None): loader = _get_table_loader(schema, condition) local_storage_dir = mimic_login.get_local_storage_dir() if local_storage_dir: query_f_name = schema.name if condition is not None: db_file = mimic_login.get_db_file() with shelve.open(db_file) as db: qs_k = 'queries_'+schema.name if qs_k not in db: db[qs_k] = () condition = slugify(condition) if condition not in db[qs_k]: db[qs_k] += (condition,) q_idx = db[qs_k].index(condition) query_f_name += '_query_' + str(q_idx) query_f_name +='.hdf' query_f_name = os.path.join(local_storage_dir, query_f_name) cache = AppendableHdfDataStore(schema, query_f_name) loader = CachingDataStore(schema, loader, cache) with sql_exception(): return loader.load()