def test_catalog_creation(self): path = os.path.dirname(utils.__file__) fullpath = os.path.join(path, 'utils.py') (size, date) = utils.get_file_info(fullpath) self.assertTrue(size) self.assertTrue(date) return
def draw_lineplot(filename, save_dir, type='mean', node_no=0, save_plots=False, plot_time=5): file_desc = utils.get_file_info(filename) layer = file_desc['layer_name'] means, stds = utils.load_mean_std_from_file(filename) data = means if type == 'mean' else stds means = [] for i in range(len(data)): sample = data[i].reshape((file_desc['batch_size'], -1)) means.append(np.mean(sample[:, node_no])) x = np.hstack([np.arange(0, file_desc['number_of_epochs'], 1 / freq)]) sns.lineplot(x, means) plt.title(f'Average value of {type} for node {node_no} of {layer}') plt.xlabel('Epoch Number') plt.ylabel(f'Average {type}s') plt.show(block=False) plt.pause(plot_time) if save_plots: plt.savefig(save_dir + f'{layer}-node_{node_no}-{type}-lineplot.jpg')
def create_factory(url): file_info = utils.get_file_info(url) f_size = file_info.get('file_size') f_name = file_info.get('file_name') work_size = utils.get_work_size(f_size) res = db.init_factory(url, f_name, f_size, work_size) print('[INTERFACE CREATE FACTORY] done.') return res
def updateLocalPath(self, newCwd=None): if newCwd is not None: if not os.path.isdir(newCwd): return self.cwd = newCwd self.pathEditSetText(self.cwd) self.files.clear() self.completeWordList = [] # back to parent dir if self.cwd != '/': item = QTreeWidgetItem() icon = QIcon('icons/folder.png') item.setIcon(0, icon) item.setText(0, '..') for i in range(1, 4): item.setText(i, '') self.addItem(item) for f in os.listdir(self.cwd): # hidden files if f.startswith('.'): continue path = os.path.join(self.cwd, f) info = utils.get_file_info(path) if info.mode.startswith('d'): self.completeWordList.append(path) icon = QIcon('icons/folder.png') else: icon = QIcon('icons/file.png') item = QTreeWidgetItem() item.setIcon(0, icon) item.setText(0, info.filename) item.setText(1, info.size) item.setText(2, info.mode) item.setText(3, info.date) self.addItem(item) self.completerModel.setStringList(self.completeWordList)
def generate_design(save_folder, hab_path, metrics, bins, mask_path, nsp): # make results folder to save output save_path = 'results/{}'.format(save_folder) if not os.path.exists(save_path): os.mkdir(save_path) # get geo info and habitat map from tif file habmap, n_bins, res, geo_t, prj_info = get_file_info(hab_path) if mask_path is not None: mask = extract_raster(mask_path) else: mask = np.ones((habmap.shape[0], habmap.shape[1])) metric_list = [habmap] bins_list = [n_bins] for i in range(len(metrics)): metric = extract_raster(metrics[i]) metric_list.append(metric) bins_list.append(int(bins[i])) binned_metrics, combo_df, bin_breaks = bin_metrics(metric_list, mask, bins_list) all_layers, id_df, s_opt = generate_all_layers(binned_metrics, mask, combo_df, nsp) id_im, unique_ids = generate_id_im(all_layers, id_df) id_mix, id_df = generate_id_list(unique_ids, s_opt, nsp, id_df) print(id_df.head()) # generate design x_unif, y_unif = generate_uniform_design(id_mix, id_im) # plot design in pop up plot_uniform(id_im, mask, x_unif, y_unif) # save results to csv save_uniform(x_unif, y_unif, id_mix, id_df, id_im, prj_info, geo_t, save_path) return
def generate_design(save_folder, mask_path, nsp): # make results folder to save output save_path = 'results/{}'.format(save_folder) if not os.path.exists(save_path): os.mkdir(save_path) print('Results will be saved to {}'.format(save_path)) # get geo info and mask from tif file mask, n_bins, res, geo_t, prj_info = get_file_info(mask_path) # generate design x_strat, y_strat = generate_stratified_design(mask, nsp) # plot design in pop up (please close plot to continue) plot_stratified(mask, x_strat, y_strat) # save results to csv save_stratified(x_strat, y_strat, prj_info, geo_t, save_path) return
def generate_design(save_folder, original_mask_path, csv_path, radius): # make results folder to save output save_path = 'results/{}'.format(save_folder) if not os.path.exists(save_path): os.mkdir(save_path) savefiles = np.load(npz_path) ID_im = savefiles['ID_im'] metrics = savefiles['immetrics'] binned_metrics = savefiles['binned_metrics'] site_df = pd.read_csv(csv_path) # get geo info and mask from path original_mask, nbins, res, GeoT, auth_code = get_file_info( original_mask_path) updated_mask = update_mask(site_df, original_mask, radius, res) sampled_df, nsp, id_mix_unsampled, save_IDs, unique_IDs, nsampled = get_sampling_info( csv_path) store_masks = store_layers(ID_im, updated_mask, unique_IDs) # generate design x_adpt, y_adpt = update_uniform_design(sampled_df, id_mix_unsampled, store_masks) # plot design in pop up plot_design(updated_mask, x_adpt, y_adpt) # save results to csv save_uniform(x_adpt, y_adpt, GeoT, auth_code, save_path, nsampled=0, updated='') return
def generate_design(save_folder, updated_mask_path, csv_path): # make results folder to save output save_path = 'results/{}'.format(save_folder) if not os.path.exists(save_path): os.mkdir(save_path) print('Results will be saved to {}'.format(save_path)) # get geo info and mask from path updated_mask, n_bins, res, geo_t, prj_info = get_file_info( updated_mask_path) sampled_csv = pd.read_csv(csv_path) # generate design x_adpt, y_adpt = update_stratified_design(updated_mask, sampled_csv) # plot design in pop up plot_adapted_stratified(updated_mask, x_adpt, y_adpt, sampled_csv) # save results to csv save_stratified(x_adpt, y_adpt, prj_info, geo_t, save_path, sampled_csv) return
def inline_buttons_handler(bot, update): from app import app, db query = update.callback_query chat_id = query.message.chat_id logger.debug("Got an inline button action: %s" % query.data) bot.send_chat_action(chat_id=chat_id, action=telegram.ChatAction.TYPING) # Try to get params try: params = json.loads(query.data) action = params.get("action") userfile_id = int(params.get("uf")) except Exception as e: logger.error(e) bot.send_message( chat_id=chat_id, text="\n".join( [ "Упс! Что-то пошло не так 😱", "Передайте это администратору, чтобы он все исправил:", "Query data: %s" % query.data, "Exception: %s" % e, ] ), ) raise # Try to get info about file from db file_info = get_file_info(bot, userfile_id) if action in ACTIONS_MAPPING: outfile = os.path.join( app.config["PROCESSED_DIR"], "%s %s %s.zip" % ( remove_extension(file_info["filename"]), file_info["userfile_id"], action, ), ) bot.send_message(text="Сейчас посмотрю...⏳", chat_id=chat_id) try: extract_file(bot, chat_id, file_info) statuses = ACTIONS_MAPPING[action](file_info["extract_path"]) if any(statuses.values()): zipdir(file_info["extract_path"], outfile) bot.send_message(chat_id=chat_id, text="Готово!🚀") bot.send_document( chat_id=chat_id, document=open(outfile, "rb"), filename=os.path.basename(outfile), reply_to_message_id=file_info["message_id"], ) if not all(statuses.values()): message = "⚠️ Следующие файлы не удалось обработать: ⚠️\n" for file, status in statuses.items(): if not status: file_path = os.path.relpath( file, file_info["extract_path"] ) # Telegram has limit for message length, so we # split the message in case it is too long (> 4096) if len(message) + len(file_path) + 10 < 4096: message += f"\n ❌ {file_path}" else: bot.send_message(chat_id=chat_id, text=message) message = f" ❌ {file_path}" bot.send_message(chat_id=chat_id, text=message) else: bot.send_message( chat_id=chat_id, text="Не удалось обработать данные. Проверьте, что файлы предоставлены в нужном формате.", ) except Exception as e: logger.error(e) bot.send_message( chat_id=chat_id, text="\n".join( [ "Упс! Что-то пошло не так 😱", "Передайте это администратору, чтобы он все исправил:", "Query data: %s" % query.data, "Exception: %s" % e, ] ), ) raise else: bot.send_message( chat_id=chat_id, text="Данная команда в процессе реализации и пока не доступна 😞", ) return "OK"
def draw_distributions(filename, save_dir, type='mean', node_no=0, save_plots=False, plot_time=0.5): file_desc = utils.get_file_info(filename) layer = file_desc['layer_name'] batch_size = file_desc['batch_size'] freq = file_desc['recording_frequency_per_epoch'] means, stds = utils.load_mean_std_from_file(filename) frames = [] if type == 'both': fig = plt.figure() ax = fig.add_subplot(111) for i in range(len(means)): mean = np.mean(means[i].reshape((batch_size, -1))[:, node_no]) std = np.sum( np.square(stds[i].reshape( (batch_size, -1))[:, node_no])) / batch_size sns.distplot(np.random.normal(loc=mean, scale=std, size=1000), ax=ax, hist=False) ax.axvline(mean, color='r', linestyle='-') iteration = i % freq epoch = i // freq plt.title( f'Distribution for {layer} node {node_no}: Epoch-{epoch} Iteration-{iteration}' ) plt.xlabel(f'Value') plt.ylabel('Density') fig.canvas.draw() if save_plots: frame = np.array(fig.canvas.renderer.buffer_rgba()) frames.append(frame) plt.pause(0.1) ax.clear() plt.close() else: data = means if type == 'mean' else stds fig = plt.figure() ax = fig.add_subplot(111) for i in range(len(data)): sample = data[i].reshape((batch_size, -1)) sample = sample[:, node_no] sns.distplot(sample, norm_hist=True, ax=ax) ax.axvline(np.mean(sample), color='r', linestyle='-') iteration = i % freq epoch = i // freq plt.title( f'Distribution for {layer} node {node_no}: Epoch-{epoch} Iteration-{iteration}' ) plt.xlabel(f'Value of {type}') plt.ylabel('Density') fig.canvas.draw() if save_plots: frame = np.array(fig.canvas.renderer.buffer_rgba()) frames.append(frame) plt.pause(0.1) ax.clear() plt.close() if save_plots: imageio.mimsave(save_dir + f'{layer}-node_{node_no}-{type}-distplot.gif', frames, fps=1 / plot_time)