Exemple #1
0
 def load_blacklist(self):
     filename = self.blacklist_filename()
     if not os.path.exists(filename):
         blacklist = set()
     else:
         blacklist = utils.load_obj(filename)
     return blacklist
 def load_blacklist(self):
     filename = self.blacklist_filename()
     if not os.path.exists(filename):
         blacklist = set()
     else:
         blacklist = utils.load_obj(filename)
     return blacklist
    def load_dicts(self, variant):
        filename = self.cache_filename(variant)
        if not os.path.exists(filename):
            cache = self.default_cache()
        else:
            cache = utils.load_obj(filename)

        return cache
Exemple #4
0
 def load_all_models(self):
     db = self.fetch_model_db()
     #first empty current models by saving
     self.save_all_live_models()
     for name, model_path in db.items():
         model = load_obj(model_path)
         #add to live models
         self.live_models.append(model)
Exemple #5
0
 def load_model(self, name):
     #grab model path
     db = self.fetch_model_db()
     model_path = db[name]
     model = load_obj(model_path)
     #add to live models
     self.live_models.append(model)
     return model
Exemple #6
0
    def load_dicts(self, variant):
        filename = self.cache_filename(variant)
        if not os.path.exists(filename):
            cache = self.default_cache()
        else:
            cache = utils.load_obj(filename)

        return cache
def r2_compare(modeldb_path, impute_dir, y, exportpath=None, SpecialTag=None):
    tag = SpecialTag
    if os.path.isfile(modeldb_path):
        modeldb = load_obj(modeldb_path)
    else:
        print("modeldb not found")
        return

    cols = modeldb.columns.tolist()
    if "test_r2" not in cols:
        curr_db = modeldb
    elif tag:
        query = "r2_test > 0 | SpecialTag == " + str(tag)
        curr_db = modeldb.query(query)
    else:
        curr_db = modeldb.query("r2_test > 0")

    #load imputed data
    cooked_data_file = impute_dir + "/imputed.pk"
    train_fp = impute_dir + "/train.pk"
    test_fp = impute_dir + "/test.pk"
    cooked_df = load_obj(cooked_data_file)
    train_i = load_obj(train_fp)
    train_df = cooked_df.iloc[train_i]
    test_i = load_obj(test_fp)
    test_df = cooked_df.iloc[test_i]

    #get all metrics from DF
    temp_metrics_df = curr_db.apply(
        lambda row: r2_model(row["FullPath"], row["TransformTag"], y, row[
            'ModelNum'], train_df, test_df),
        axis=1)

    new_columns = ['ModelNum', 'r2_test', 'mse_test', 'r2_train', 'mse_train']
    temp_metrics_df.columns = new_columns

    modeldb = pd.merge(modeldb, temp_metrics_df, how='left', on='ModelNum')
Exemple #8
0
 def load_model_object(self):
     model_object = load_obj(self.model_object_path)
     self.model_object = model_object
Exemple #9
0
 def load_df(self):
     df = load_obj(self.dataframe_path)
     self.df = df
Exemple #10
0
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

    args = parser.parse_args()
    args = vars(args)
    in_coord_sys = args['in_coord_sys']
    out_coord_sys = args['out_coord_sys']
    exp_name = args['exp_name']

    assert (in_coord_sys in ['OASIS', 'NYU', 'SNOW'])
    assert (out_coord_sys in ['OASIS', 'NYU', 'SNOW'])

    if args['model_file'].find(".bin") >= 0:
        mode = 'model'
        training_args = load_obj(
            os.path.join(os.path.dirname(os.path.dirname(args['model_file'])),
                         'args.pkl'))

        print(
            "#######################################################################"
        )
        print("Testing a model, args: {}".format(args))
        print(
            "#######################################################################"
        )

        NetworkType = {"NIPSSurface": NIPSSurfaceNetwork}

        model = NetworkType[training_args['model_name']]().to(device)
        model_name = training_args['model_name']
Exemple #11
0
def default_jobs():
    return {
        'match_queue': job_queue.JobQueue(),
        'split_queue': job_queue.JobQueue(),
        'number_of_match_job': 0,
        'number_of_split_job': 0
    }


if __name__ == "__main__":
    try:
        cache_dir = 'match_and_split_text_layer'
        if not os.path.exists(os.path.expanduser('~/cache/' + cache_dir)):
            os.mkdir(os.path.expanduser('~/cache/' + cache_dir))
        # qdel send a SIGUSR2 if -notify is used when starting the job.
        # import signal
        #signal.signal(signal.SIGUSR2, on_exit)
        try:
            jobs = utils.load_obj("wsdaemon.jobs")
        except:
            jobs = default_jobs()

        thread.start_new_thread(job_thread, (jobs['match_queue'], do_match))
        thread.start_new_thread(job_thread, (jobs['split_queue'], do_split))
        bot_listening()
    except KeyboardInterrupt:
        pywikibot.stopme()
        os._exit(1)
    finally:
        pywikibot.stopme()
Exemple #12
0
def default_jobs():
    return { 
        'match_queue' : job_queue.JobQueue(),
        'split_queue' : job_queue.JobQueue(),
        'number_of_match_job' : 0,
        'number_of_split_job' : 0
        }

if __name__ == "__main__":
    try:
        cache_dir = 'match_and_split_text_layer'
        if not os.path.exists(os.path.expanduser('~/cache/' + cache_dir)):
            os.mkdir(os.path.expanduser('~/cache/' + cache_dir))
        # qdel send a SIGUSR2 if -notify is used when starting the job.
        # import signal
        #signal.signal(signal.SIGUSR2, on_exit)
        try:
            jobs = utils.load_obj("wsdaemon.jobs")
        except:
            jobs = default_jobs()

        thread.start_new_thread(job_thread, (jobs['match_queue'], do_match))
        thread.start_new_thread(job_thread, (jobs['split_queue'], do_split))
        bot_listening()
    except KeyboardInterrupt:
        pywikibot.stopme()
        os._exit(1)
    finally:
        pywikibot.stopme()
Exemple #13
0
 def load_dataset(self, name):
     #grab dataset path
     db = self.fetch_dataset_db()
     ds_path = db[name]
     ds = load_obj(ds_path)
     return ds