def analyze_models_from_cache(file_paths): models = [] for f in file_paths: models.append(pickle.load(open(f, 'rb'))) print('Available Models: ', len(models)) models = [m for m in models if m.vm30 is not None] print('use-able Available Models: ', len(models)) #import pdb #ipdb.set_trace() models_bag = db.from_sequence(models, npartitions=8) list(models_bag.map(model_analysis).compute())
def run_on_allen(number_d_sets=2): try: with open('allen_data.pkl', 'rb') as f: data_sets = pickle.load(f) except: data_sets = get_data_sets(number_d_sets=number_d_sets) with open('allen_data.pkl', 'wb') as f: pickle.dump(data_sets, f) models = [] for data_set in data_sets: models.append(allen_to_model_and_features(data_set)) models = [mod for mod in models if mod is not None] models = [mod[0] for mod in models] three_feature_sets = [] for model in models: #if model is not None: model_analysis(model)
def run_on_allen(number_d_sets=2): try: with open('allen_data.pkl', 'rb') as f: data_sets = pickle.load(f) except: data_sets = get_data_sets(number_d_sets=number_d_sets) with open('allen_data.pkl', 'wb') as f: pickle.dump(data_sets, f) models = [] for data_set in data_sets: models.append(allen_to_model_and_features(data_set)) models = [mod for mod in models if mod is not None] models = [mod[0] for mod in models] three_feature_sets = [] for model in models: #if model is not None: temp_path = str('allen_three_feature_folder') + str('/') + str( model.name) + str('.p') if not os.path.exists(temp_path): model_analysis(model)
def analyze_models_from_cache(file_paths): models = [] for f in file_paths: models.append(pickle.load(open(f, 'rb'))) models_bag = db.from_sequence(models, npartitions=8) list(models_bag.map(model_analysis).compute())