def download_all(self): path_fn = 'tmp/' t1 = time() if not self.fake: download_file(self.url_all_tpl, path_fn) session = ctx.Session() # save parsed entries into database print("Parsing HMDB finished! Took {} seconds".format(round(time() - t1, 2))) session.commit() session.close()
if refs_multiple: meta.ref_etc = refs_multiple.copy() if 'hmdb_id' in refs_multiple: for hmdb_id in refs_multiple['hmdb_id']: if len(hmdb_id) == 9 and 'HMDB00'+hmdb_id[4:] in meta.ref_etc['hmdb_id']: # redundant secondary HMDB id meta.ref_etc['hmdb_id'].remove(hmdb_id) return meta path_fn = '../../tmp/ChEBI_complete.sdf' i = 0 session = ctx.Session() print("Starting ChEBI bulk import prototype") t1 = time() # migrate DB if not ctx.table_exists('chebi_data'): ctx.create_database() else: print("Truncated data") ctx.truncate('chebi_data') for me in parse_iter_sdf(path_fn): meta: CHEBIData = sdfdict_to_entity(me)