def main(): from dbconnect import db_connect testCONN = db_connect(os.path.realpath(__file__)) assembly = "hg19" #assembly = "mm10" pw = PostgresWrapper(testCONN) queries = ["BAP1", "HBB", "Actin alpha 1", "chr1:10-100"] queries = ["BAP1"] queries = ["Actin alpha 1", "HBB"] queries = ["HBB"] queries = ["Actin alpha 1"] queries = ["EH37E1177528"] for q in queries: print("***************", q) ps = ParseSearch(pw, assembly, {"q": q}) output = ps.parse() keys = sorted(output.keys()) for k in keys: v = output[k] if "genes" == k: for g in v: print(g) else: print(k + ':', v)
def main(): import sys import os sys.path.append(os.path.join(os.path.dirname(__file__), "..")) sys.path.append(os.path.join(os.path.dirname(__file__), "../common")) sys.path.append(os.path.join(os.path.dirname(__file__), "../../common")) from postgres_wrapper import PostgresWrapper from pg import PGsearch from dbconnect import db_connect from cached_objects import CachedObjects testCONN = db_connect(os.path.realpath(__file__)) ps = PostgresWrapper(testCONN) # GE for assembly in ["hg19", "mm10"]: pgSearch = PGsearch(ps, assembly) cache = CachedObjects(ps, assembly) tissueToColor = cache.colors["tissues"] for t in pgSearch.geneExpressionTissues(): if t not in tissueToColor: print("missing", t) # RAMPAGE for assembly in ["hg19"]: pgSearch = PGsearch(ps, assembly) cache = CachedObjects(ps, assembly) ri = pgSearch.rampage_info() tissueToColor = cache.colors["tissues"] for fileID, info in ri.items(): t = info["tissue"] if t not in tissueToColor: print("missing", t)
def __init__(self, DBCONN, assembly): self.assembly = assembly self.ps = PostgresWrapper(DBCONN) self.pgSearch = PGsearch(self.ps, self.assembly) self.cache = CachedObjects(self.ps, self.assembly) self._load() pg = PGcommon(self.ps, self.assembly) self.rankMethodToIDxToCellType = pg.rankMethodToIDxToCellType()
def main(): testCONN = db_connect(os.path.realpath(__file__)) ps = PostgresWrapper(testCONN) cache = CachedObjects(ps, "mm10") pgSearch = PGsearch(ps, "mm10") n = pgSearch.datasets("DNase") for k, v in cache.assaymap["dnase"].items(): print(k, v, n[k])
def run(args, DBCONN): assemblies = Config.assemblies if args.assembly: assemblies = [args.assembly] for assembly in assemblies: print('***********', assembly) pg = PostgresWrapper(DBCONN) with getcursor(DBCONN, "dropTables") as curs: icg = MoreTracks(curs, assembly, pg) icg.run()
def run(args, DBCONN): assemblies = Config.assemblies if args.assembly: assemblies = [args.assembly] for assembly in assemblies: print('***********', assembly) pg = PostgresWrapper(DBCONN) with getcursor(DBCONN, "dropTables") as curs: icg = TopAccessions(curs, assembly, pg) icg.run() with db_connect_single(os.path.realpath(__file__)) as conn: if 0: vacumnAnalyze(conn, assembly + "_cre_all", [])
def main(): from utils import Utils, eprint, AddPath AddPath(__file__, '../../common/') from dbconnect import db_connect from postgres_wrapper import PostgresWrapper AddPath(__file__, '../../website/common/') from pg import PGsearch from cached_objects import CachedObjects from pg_common import PGcommon testCONN = db_connect(os.path.realpath(__file__)) ps = PostgresWrapper(testCONN) pgSearch = PGsearch(ps, "hg19") ds = Datasets("hg19", pgSearch) for ctn, vs in ds.byCellType.items(): for v in vs: print(ctn, v)
def main(): args = parse_args() AddPath(__file__, '../../common/') from dbconnect import db_connect from postgres_wrapper import PostgresWrapper AddPath(__file__, '../../api/common/') from pg import PGsearch from cached_objects import CachedObjects from pg_common import PGcommon from db_trackhub import DbTrackhub from cached_objects import CachedObjectsWrapper DBCONN = db_connect(os.path.realpath(__file__)) ps = PostgresWrapper(DBCONN) cacheW = CachedObjectsWrapper(ps) db = DbTrackhub(DBCONN) tdb = TrackhubDb(ps, cacheW, db, UCSC) for assembly in ["hg19", "mm10"]: tdb.makeAllTracks(assembly)
def main(): import sys import os sys.path.append(os.path.join(os.path.dirname(__file__), "..")) sys.path.append(os.path.join(os.path.dirname(__file__), "../common")) sys.path.append(os.path.join(os.path.dirname(__file__), "../../common")) from postgres_wrapper import PostgresWrapper from pg import PGsearch from dbconnect import db_connect from cached_objects import CachedObjects testCONN = db_connect(os.path.realpath(__file__)) ps = PostgresWrapper(testCONN) assembly = "GRCh38" acc = "EH38E1516978" pgSearch = PGsearch(ps, assembly) cache = CachedObjects(ps, assembly) mp = MiniPeaks(assembly, pgSearch, cache, 0, 4) ret = mp.getMinipeaksForAssays(["dnase"], [acc]) print(ret)
def main(): args = parse_args() if args.production: args.dev = False DBCONN = db_connect(os.path.realpath(__file__)) ps = PostgresWrapper(DBCONN) cow = CachedObjectsWrapper(ps) wsconfig = WebServerConfig("main", args.production) main = Apis(args, wsconfig.viewDir, wsconfig.staticDir, ps, cow) cherrypy.tree.mount(main, '/', wsconfig.getRootConfig()) cherrypy.config.update({ 'server.socket_host': '0.0.0.0', 'server.socket_port': int(args.port), 'tools.encode.text_only': False }) if args.dev: cherrypy.config.update({ 'server.environment': "development", 'server.socket_queue_size': 128, 'server.thread_pool': 8, }) if args.production: cherrypy.config.update({ 'server.socket_queue_size': 512, 'server.thread_pool': 30, 'log.screen': False, 'log.access_file': "", 'log.error_file': wsconfig.errorFnp }) cherrypy.engine.start() cherrypy.engine.block()