def betameain():
    studies, prefs = meta_studies(path="../config.json")
    indices = [[2, 5, 6], [1, 3, 4]]

    for study, idx in zip(studies, indices):
        print(idx)
        chnk_iterator = chunker.init(study, pickle=False)
        corr_chnk = client.map(operation.check_n_correct, chnk_iterator)
def add_main():
    # returns 'study' object with metadata as attributes
    studies, prefs = meta_studies(path="../config.json")

    for study in studies:
        chnk_iterator = chunker.init(study, pickle=False)
        for chnk in chnk_iterator:
            corr_chnk = operation.check_n_correct(chnk)
            lift_chnk = operation.liftover(corr_chnk)
            ref_chnk = operation.reference(lift_chnk)
            operation.write_db(ref_chnk)

    db = redis.StrictRedis(db=8)
    print(db.dbsize())
Example #3
0
def run():
    config_path = "/home/sevvy/PycharmProjects/CardioDiscover/test_config.json"
    studies = reader.meta_studies(config_path)

    for study in studies:

    study = check_n_correct.init()
# # check and correct the GWAS file
# study = check_n_correct.init_check_correct(study)
# # make iterator in liftover and 1000ref
# study = liftover.iterator()
# stuy = reference_check.iterator()
# study = ''
# #call the stuff
def dif_main():
    # returns 'study' object with metadata as attributes
    studies, prefs = meta_studies(path="../config.json")

    for study in studies:
        chnk_iterator = chunker.init(study, pickle=False)
        corr_chnk = delayed(operation.check_n_correct)(chnk_iterator)
        lift_chnk = delayed(operation.liftover)(corr_chnk)
        ref_chnk = delayed(operation.reference)(lift_chnk)
        db_insert = delayed(operation.write_db)(ref_chnk)

    db_insert.compute()

    db = redis.StrictRedis(db=8)
    print(db.dbsize())
def main():

    # returns 'study' object with metadata as attributes
    studies, prefs = meta_studies(path="../config.json")

    def study_tasker(study):
        chnk_iterator = client.submit(chunker.init, study, pickle=False)
        corr_chnk = client.map(operation.check_n_correct, chnk_iterator)
        lift_chnk = client.map(operation.liftover, corr_chnk)
        ref_chnk = client.map(operation.reference, lift_chnk)
        client.map(operation.write_db, ref_chnk)

    start = client.map(study_tasker, studies)

    db = redis.StrictRedis(db=8)
    print(db.dbsize())
Example #6
0
def main():
    # original doc containing the metadata
    meta_doc = read_meta(path="")
    # returns 'study' object with metadata as attributes
    files = meta_studies(path="")

    for this_study in files:
        # read the study file, find separator, create
        GWASin = reader.init_reader(this_study)
        # do something with headers, write to df or something
        classifier.init_classifier(GWASin)
        # additional meta data about study to perform update
        add_meta = checker.init_check_correct(GWASin, this_study)

        meta_doc = update_meta(meta_doc, add_meta)

    write_meta(meta_doc)
def alt_main():

    studies, prefs = meta_studies(path="../config.json")

    study = studies[0]

    chnk_iterator = chunker.init(study, pickle=False)
    corr_chnk = client.map(operation.check_n_correct, chnk_iterator)

    lift_chnk = client.map(operation.liftover, corr_chnk)
    ref_chnk = client.map(operation.reference, lift_chnk)

    for future in as_completed(ref_chnk):
        print(future)
        chnk = future.result()
        db_submit = client.submit(operation.write_db, chnk)

    db = redis.StrictRedis(db=8)
    print(db.dbsize())
def an_main():

    studies, prefs = meta_studies(path="../config.json")
    indices = [[2, 5, 6], [1, 3, 4]]

    for study, idx in zip(studies, indices):
        print(idx)
        chnk_iterator = chunker.init(study, pickle=False)
        corr_chnk = client.map(operation.check_n_correct, chnk_iterator)

        lift_chnk = client.map(operation.liftover, corr_chnk)
        ref_chnk = client.map(operation.reference, lift_chnk)

        for future in as_completed(ref_chnk):
            print(future)
            chnk = future.result()
            client.submit(operation.write_db, chnk)

    db = redis.StrictRedis(db=8)
    print(db.dbsize())