def __init__(self, taxon=None, max_taxon_dist=None, taxon_dist_scale=None, include_variants=False, temperature=37., temperature_std=1., ph=7.5, ph_std=0.3, cache_dirname=None): """ Args: taxon (:obj:`str`, optional): target taxon max_taxon_dist (:obj:`int`, optional): maximum taxonomic distance to include taxon_dist_scale (:obj:`float`, optional): The scale of the taxonomic distance scoring distribution. This determines how quickly the score falls to zero away from zero. include_variants (:obj:`bool`, optional): if :obj:`True`, also include observations from mutant taxa temperature (:obj:`float`, optional): desired temperature to search for temperature_std (:obj:`float`, optional): how much to penalize observations from other temperatures ph (:obj:`float`, optional): desired pH to search for ph_std (:obj:`float`, optional): how much to penalize observations from other pHs """ super(ProteinAbundanceQuery, self).__init__(taxon=taxon, max_taxon_dist=max_taxon_dist, taxon_dist_scale=taxon_dist_scale, include_variants=include_variants, temperature=temperature, temperature_std=temperature_std, ph=ph, ph_std=ph_std, data_source=common_schema.CommonSchema( cache_dirname=cache_dirname))
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() flk = common_schema.CommonSchema(cache_dirname=cls.cache_dirname) cls.proline = flk.session.query(models.Metabolite).filter_by(metabolite_name = 'L-Proline').first() q = metabolite_concentrations.MetaboliteConcentrationQuery(cache_dirname=cls.cache_dirname, include_variants=True) cls.obs = q.run(cls.proline)
def __init__(self, cache_dirname=DATA_CACHE_DIR, params=None, data=None): self.data_source = common_schema.CommonSchema( cache_dirname=cache_dirname) self.params = params self.data = data self.run()
def _default(self): pargs = self.app.pargs common_schema.CommonSchema(clear_content=True, restore_backup_data=pargs.restore_data, restore_backup_schema=pargs.restore_schema, restore_backup_exit_on_error=pargs.exit_on_error, load_content=False, verbose=True)
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() flk = common_schema.CommonSchema(cache_dirname=cls.cache_dirname) cls.arnt = flk.session.query( models.ProteinSubunit).filter_by(uniprot_id='P53762').first() cls.q = dpi.ProteintoDNAInteractionQuery( cache_dirname=cls.cache_dirname)
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() cls.cs = common_schema.CommonSchema( name = 'TestCommonSchema', cache_dirname = cls.cache_dirname, clear_content=True, load_content=True, max_entries=10, verbose=True, test=True)
def aggregate(self): pargs = self.app.pargs # todo: set restore_backup_schema=False after fixing Alembic issue with migrations # todo: restore_backup_exit_on_error=True after fixing Alembic issue with migrations common_schema.CommonSchema(clear_content=True, restore_backup_data=True, restore_backup_schema=False, load_content=False, verbose=True)
def _default(self): pargs = self.app.pargs # todo: set restore_backup_schema=False after fixing Alembic issue with migrations # todo: restore_backup_exit_on_error=True after fixing Alembic issue with migrations common_schema.CommonSchema(load_content=True, restore_backup_data=True, restore_backup_schema=True, restore_backup_exit_on_error=False, max_entries=pargs.max_entries, verbose=pargs.verbose)
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() flk = common_schema.CommonSchema(cache_dirname=cls.cache_dirname) cls.protein_P00323 = flk.session.query( models.ProteinSubunit).filter_by(uniprot_id='P00323').first() # cls.protein_Q42025 = flk.session.query(models.ProteinSubunit).filter_by(uniprot_id = 'Q42025').first() # print(cls.protein_Q42025) cls.q = protein_abundance.ProteinAbundanceQuery( cache_dirname=cls.cache_dirname)
def build(): old_stdout = sys.stdout log_filename = pkg_resources.resource_filename( 'datanator', "builds/logs/{}.txt".format(str(datetime.datetime.now()))) with open(log_filename, "w") as log_file: sys.stdout = log_file cs = common_schema.CommonSchema(load_content=True, verbose=True, load_entire_small_dbs=True) cs.upload_backup() sys.stdout = old_stdout
def restore_db(restore_data=True, restore_schema=False, exit_on_error=True): """ Restore the content of the database Args: restore_data (:obj:`bool`, optional): if :obj:`True`, restore the data restore_schema (:obj:`bool`, optional): if :obj:`True`, restore the schema exit_on_error (:obj:`bool`, optional): if :obj:`True`, exit on error """ if not restore_data and not restore_schema: raise Exception('One or more of the data and schema must be restored') common_schema.CommonSchema(clear_content=True, restore_backup_data=restore_data, restore_backup_schema=restore_schema, restore_backup_exit_on_error=exit_on_error, load_content=False, verbose=True)
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() flk = common_schema.CommonSchema(cache_dirname=cls.cache_dirname) cls.protein_Q9CWF2 = flk.session.query( models.ProteinSubunit).filter_by(uniprot_id='Q9CWF2').first() cls.protein_p53622 = flk.session.query( models.ProteinSubunit).filter_by(uniprot_id='P53622').first() cls.protein_p49418 = flk.session.query( models.ProteinSubunit).filter_by(uniprot_id='P49418').first() cls.rhino_complex = flk.session.query(models.ProteinComplex).filter_by( complex_name='Rhino-Deadlock-Cutoff Complex').first() cls.collagen = flk.session.query(models.ProteinComplex).filter_by( complex_name='Collagen type III trimer').first() cls.q = ppi.ProteinInteractionandComplexQuery( cache_dirname=cls.cache_dirname)
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() cls.flk = common_schema.CommonSchema(cache_dirname=cls.cache_dirname) cls.q = reaction_kinetics.ReactionKineticsQuery( cache_dirname=cls.cache_dirname, include_variants=True) cls.reaction = data_model.Reaction( participants = [ data_model.ReactionParticipant( specie = data_model.Specie( id = 'Dihydrofolate', structure = 'InChI=1S/C19H21N7O6/c20-19-25-15-14(17(30)26-19)23-11(8-22-15)'+\ '7-21-10-3-1-9(2-4-10)16(29)24-12(18(31)32)5-6-13(27)28/h1-4,12,21H,5-8H2,'+\ '(H,24,29)(H,27,28)(H,31,32)(H4,20,22,25,26,30)'), coefficient = -1), data_model.ReactionParticipant( specie = data_model.Specie( id = 'NADPH', structure = 'InChI=1S/C21H30N7O17P3/c22-17-12-19(25-7-24-17)28(8-26-12)21-16'+\ '(44-46(33,34)35)14(30)11(43-21)6-41-48(38,39)45-47(36,37)40-5-10-13(29)15(31)'+\ '20(42-10)27-3-1-2-9(4-27)18(23)32/h1,3-4,7-8,10-11,13-16,20-21,29-31H,2,5-6H2,'+\ '(H2,23,32)(H,36,37)(H,38,39)(H2,22,24,25)(H2,33,34,35)'), coefficient = -1), data_model.ReactionParticipant( specie = data_model.Specie( id = 'H+', structure = 'InChI=1S/H'), coefficient = -1), data_model.ReactionParticipant( specie = data_model.Specie( id = 'NADP+', structure = 'InChI=1S/C21H28N7O17P3/c22-17-12-19(25-7-24-17)28(8-26-12)21-16('+\ '44-46(33,34)35)14(30)11(43-21)6-41-48(38,39)45-47(36,37)40-5-10-13(29)15(31)20'+\ '(42-10)27-3-1-2-9(4-27)18(23)32/h1-4,7-8,10-11,13-16,20-21,29-31H,5-6H2,(H7-,22'+\ ',23,24,25,32,33,34,35,36,37,38,39)/p+1'), coefficient = 1), data_model.ReactionParticipant( specie = data_model.Specie( id = '5,6,7,8-Tetrahydrofolate', structure = 'InChI=1S/C19H23N7O6/c20-19-25-15-14(17(30)26-19)23-11(8-22-15)7-21-'+\ '10-3-1-9(2-4-10)16(29)24-12(18(31)32)5-6-13(27)28/h1-4,11-12,21,23H,5-8H2,(H,24,29'+\ ')(H,27,28)(H,31,32)(H4,20,22,25,26,30)'), coefficient = 1) ])
import sys sys.path.append("/Users/pochis01/Desktop/GitHub/datanator") import datetime # old_stdout = sys.stdout # log_file = open("datanator/builds/logs/{}.txt".format(str(datetime.datetime.now())),"w") # sys.stdout = log_file from datanator.core import common_schema cs = common_schema.CommonSchema(load_content=True, clear_content=True, verbose=True, test=True, max_entries=10) # sys.stdout = old_stdout # log_file.close()
import sys sys.path.append("/Users/pochis01/Desktop/GitHub/datanator") import datetime from datanator.core import common_schema old_stdout = sys.stdout log_file = open( "datanator/builds/logs/{}.txt".format(str(datetime.datetime.now())), "w") sys.stdout = log_file cs = common_schema.CommonSchema(load_content=True, verbose=True, load_entire_small_dbs=True) cs.upload_backup() sys.stdout = old_stdout log_file.close()
def __init__(self, cache_dirname=None): self.data_source = common_schema.CommonSchema( cache_dirname=cache_dirname)
def __init__(self, cache_dirname=DATA_CACHE_DIR): self.cache_dirname = cache_dirname self.flask = common_schema.CommonSchema(cache_dirname=cache_dirname)
def __init__(self, db_cache_dirname = os.getcwd()): flaskdb = common_schema.CommonSchema(cache_dirname = db_cache_dirname) self.q = reaction_kinetics.ReactionKineticsQuery(cache_dirname=db_cache_dirname, include_variants=True)
def __init__(self, cache_dirname=DATA_CACHE_DIR): self.data_source = common_schema.CommonSchema( cache_dirname=cache_dirname)
def setUpClass(cls): cls.cache_dirname = tempfile.mkdtemp() cls.sesh = text_search.TextSearchSession(db_cache_dirname=cls.cache_dirname) flaskdb = common_schema.CommonSchema(cache_dirname = cls.cache_dirname)
def setUpClass(cls): cls.flk = common_schema.CommonSchema()