def _cache_locally(self, df: pd.DataFrame, name: str) -> str: def clean_cache(): LOGGER.info('Cleaning cache. This may take some while...') # For complete local execution do not clean cache! exit(-1) shutil.rmtree(self.work_dir) Path(self.work_dir).mkdir(parents=True, exist_ok=True) self.cache_total, self.cache_used, free = shutil.disk_usage( self.work_dir) LOGGER.info('Deleted local cache. Using {} of cache'.format( self.cache_used / self.cache_total)) try: local_file = store_data(df, self.work_dir, name) except IOError: clean_cache() local_file = store_data(df, self.work_dir, name) self.cache_used += os.stat(local_file).st_size if self.cache_used / self.cache_total > self.cache_percentage: clean_cache() return local_file
def store_output_files(): data.store_data(cname_dir + cname_dict_filename, cnames) data.store_data(cname_dir + auth_dict_filename, auths) list_data.store_data(cname_dir + cname_filename, list(cname_list)) list_data.store_data(cname_dir + auth_filename, list(auth_list)) data.store_data(cname_dir + fail_cname_dict_filename, fail_cnames) data.store_data(cname_dir + fail_auth_dict_filename, fail_auths) list_data.store_data(cname_dir + fail_cname_filename, list(fail_cname_list)) list_data.store_data(cname_dir + fail_auth_filename, list(fail_auth_list))
def store_output_files(): data.store_data(output_dir + cname_dict_filename, cnames) data.store_data(output_dir + auth_dict_filename, auths) list_data.store_data(output_dir + cname_filename, list(cname_list)) list_data.store_data(output_dir + auth_filename, list(auth_list)) data.store_data(output_dir + fail_cname_dict_filename, fail_cnames) data.store_data(output_dir + fail_auth_dict_filename, fail_auths) list_data.store_data(output_dir + fail_cname_filename, list(fail_cname_list)) list_data.store_data(output_dir + fail_auth_filename, list(fail_auth_list))
def store_output_files(): data.store_data(ips_dir + ip_dict_filename, ips) ip_list = dict_ips_to_list(ips) list_data.store_data(ips_dir + ip_filename, ip_list)
def store_output_files(): data.store_data(geo_dir + geo_dict_filename, geo) geo_list = geo_dict_to_csv(geo) list_data.store_data(geo_dir + geo_filename, geo_list)
def store_output_files(): data.store_data(geo_db_dir + ipinfo_db_filename, located_ips)
################### ## Read IP to Geolocation Database ################### if DEBUG2: print "Read IP to Geolocation Database" if os.path.exists(geo_db_dir + my_geo_db_filename): if DEBUG3: print " read from my data structure" geo_db = data.load_data(geo_db_dir + my_geo_db_filename) else: if DEBUG3: print " read from orig db" geo_db = read_ip2geo_db(geo_db_dir + geo_db_filename) data.store_data(geo_db_dir + my_geo_db_filename, geo_db) located_ips = data.load_data(geo_dir + exist_ip_filename) if DEBUG3: print " # records=%d" % (len(geo_db)) # try: # address = search_in_db("8.8.8.8", geo_db) # print address # except Exception as e: # print " [1] Search Exception " + type(e) + ": " + e ################### ## Locate IP ################### if DEBUG2: print "Locate IP"
def store_output_files(): data.store_data(geo_dir + geo_dict_filename, geo) geo_list = geo_dict_to_csv(geo) list_data.store_data(geo_dir + geo_filename, geo_list) data.store_data(geo_dir + exist_ip_filename, located_ips)
def store_output_files(): # ips = merge_ips(ips, data.load_data(ips_dir + ip_dict_filename)) data.store_data(ips_dir + ip_dict_filename, ips) ip_list = dict_ips_to_list(ips) list_data.store_data(ips_dir + ip_filename, ip_list)
plt.savefig(os.path.join(args.evalf, 'cor_raw_%d.pdf' % idx_rel)) plt.show() # store data for plotting if args.env in ['Ball']: # edge_acc_over_time: n_roll x n_timestep record_names = ['edge_acc_over_time', 'edge_cor_over_time', 'fwd_loss_mse'] if args.baseline == 1: record_path = os.path.join(args.evalf, 'rec_%d_baseline.h5' % args.n_kp) else: record_path = os.path.join(args.evalf, 'rec_%d.h5' % args.n_kp) store_data( record_names, [edge_acc_over_time_record, edge_cor_over_time_record, fwd_loss_mse], record_path) print() print('Edge Accuracy') print('%.2f%%, std: %.6f' % (np.mean(edge_acc_over_time_record[:, -1]) * 100., np.std(edge_acc_over_time_record[:, -1]))) print() print('Correlation on Attributes') for i in range(len(edge_cor_over_time_record)): print('#%d:' % i, edge_cor_over_time_record[i][-1]) elif args.env in ['Cloth']: