def run(config, num_checkpoint, epoch_end, output_filename): task = get_task(config) preprocess_opt = task.get_preprocess_opt() dataloader = get_dataloader(config, 'train', get_transform(config, 'dev', **preprocess_opt)) model = task.get_model() checkpoints = get_checkpoints(config, num_checkpoint, epoch_end) print('checkpoints:') print('\n'.join(checkpoints)) utils.checkpoint.load_checkpoint(model, None, checkpoints[0]) for i, checkpoint in enumerate(checkpoints[1:]): model2 = get_task(config).get_model() last_epoch, _ = utils.checkpoint.load_checkpoint( model2, None, checkpoint) swa.moving_average(model, model2, 1. / (i + 2)) with torch.no_grad(): swa.bn_update(dataloader, model) output_name = '{}.{}.{:03d}'.format(output_filename, num_checkpoint, last_epoch) print('save {}'.format(output_name)) utils.checkpoint.save_checkpoint( config, model, None, 0, 0, name=output_name, weights_dict={'state_dict': model.state_dict()})
def setup_graph(args, graph, x_region, y_region, region, source=None, gpu=None, do_output=True, index=0, make_reader=True): backproject = get_task('general-backproject', processing_node=gpu) if do_output: if args.dry_run: sink = get_task('null', processing_node=gpu, download=True) else: sink = get_writer(args) sink.props.filename = '{}-{:>03}-%04i.tif'.format(args.output, index) backproject.props.parameter = args.z_parameter if args.burst: backproject.props.burst = args.burst backproject.props.z = args.z backproject.props.region = region backproject.props.x_region = x_region backproject.props.y_region = y_region backproject.props.center_position_x = (args.center_position_x or [args.width / 2.]) backproject.props.center_position_z = (args.center_position_z or [args.height / 2.]) backproject.props.source_position_x = args.source_position_x backproject.props.source_position_y = args.source_position_y backproject.props.source_position_z = args.source_position_z backproject.props.detector_position_x = args.detector_position_x backproject.props.detector_position_y = args.detector_position_y backproject.props.detector_position_z = args.detector_position_z backproject.props.detector_angle_x = args.detector_angle_x backproject.props.detector_angle_y = args.detector_angle_y backproject.props.detector_angle_z = args.detector_angle_z backproject.props.axis_angle_x = args.axis_angle_x backproject.props.axis_angle_y = args.axis_angle_y backproject.props.axis_angle_z = args.axis_angle_z backproject.props.volume_angle_x = args.volume_angle_x backproject.props.volume_angle_y = args.volume_angle_y backproject.props.volume_angle_z = args.volume_angle_z backproject.props.num_projections = args.number backproject.props.compute_type = args.compute_type backproject.props.result_type = args.result_type backproject.props.store_type = args.store_type backproject.props.overall_angle = args.overall_angle backproject.props.addressing_mode = args.genreco_padding_mode backproject.props.gray_map_min = args.slice_gray_map[0] backproject.props.gray_map_max = args.slice_gray_map[1] source = create_preprocessing_pipeline(args, graph, source=source, processing_node=gpu, cone_beam_weight=not args.disable_cone_beam_weight, make_reader=make_reader) if source: graph.connect_nodes(source, backproject) else: source = backproject if do_output: graph.connect_nodes(backproject, sink) last = sink else: last = backproject return (source, last)
def test_update_task(): task_id = tasks.save_task({"description": "before update", "status": "1"}) task = tasks.get_task(task_id) assert task['description'] == "before update" tasks.update_task(task_id, "after update") task = tasks.get_task(task_id) assert task['description'] == "after update"
def export_backup_status(): exports = dbstat.query('select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate) error = "" finish=not runnings_backups() if get_task() != None and finish: status = get_task().get() if status != "ok": error = "Export failing with error: "+status return jsonify(data=exports,finish=finish,error=error)
def _setup_task_data(self, task_enum, mols): """Setup target labels and attributions.""" task = att_tasks.get_task(task_enum) if task_enum in TASKS_SKIP_DATA_LOAD: proxy_task = att_tasks.get_task(att_tasks.Task.benzene) y_true = np.zeros((len(mols), task.n_outputs)) y_true[:, 0] = 1.0 att_true = proxy_task.get_true_attributions(mols) else: y_true = task.get_true_predictions(mols) att_true = task.get_true_attributions(mols) return task, y_true, att_true
def export_backup_status(): exports = dbstat.query( 'select * from stats where TYPE="EXPORT" and backup_start>="%s"' % mindate) error = "" finish = not runnings_backups() if get_task() != None and finish: status = get_task().get() if status != "ok": error = "Export failing with error: " + status return jsonify(data=exports, finish=finish, error=error)
def run(config): train_dir = config.train.dir task = get_task(config) optimizer = get_optimizer(config, task.get_model().parameters()) checkpoint = utils.checkpoint.get_initial_checkpoint(config) if checkpoint is not None: last_epoch, step = utils.checkpoint.load_checkpoint( task.get_model(), optimizer, checkpoint) else: last_epoch, step = -1, -1 print('from checkpoint: {} last epoch:{}'.format(checkpoint, last_epoch)) scheduler = get_scheduler(config, optimizer, last_epoch) preprocess_opt = task.get_preprocess_opt() dataloaders = { split: get_dataloader(config, split, get_transform(config, split, **preprocess_opt)) for split in ['train', 'dev'] } writer = SummaryWriter(config.train.dir) train(config, task, dataloaders, optimizer, scheduler, writer, last_epoch + 1)
def test_update(): """ test updating a record """ tasks.add_task(datetime.date(2020, 4, 1), "Wake up") tasks.edit_task(datetime.date(2020, 4, 1), 1, "Make coffee") assert tasks.get_task(datetime.date(2020, 4, 1), 0) == "Make coffee"
def start_one(index): gpu_index, region = regions[index] scheduler = Ufo.FixedScheduler() scheduler.set_resources(resources[index]) graph = Ufo.TaskGraph() gpu = scheduler.get_resources().get_gpu_nodes()[gpu_index] region_index = run_number * len(resources) + index geometry = CTGeometry(args) if (len(args.center_position_z) == 1 and np.modf(args.center_position_z[0])[0] == 0 and geometry.is_simple_parallel_tomo): LOG.info('Simple tomography with integer z center, changing to center_position_z + 0.5 ' 'to avoid interpolation') geometry.args.center_position_z = (geometry.args.center_position_z[0] + 0.5,) if not args.disable_projection_crop: if not args.dry_run and (args.y or args.height): LOG.debug('--y or --height specified, not optimizing projection region') else: geometry.optimize_args(region=region) opt_args = geometry.args if args.dry_run: source = get_task('dummy-data', number=args.number, width=args.width, height=args.height) else: source = None setup_graph(opt_args, graph, x_region, y_region, region, source=source, gpu=gpu, index=region_index, make_reader=True) LOG.debug('Pass: %d, device: %d, region: %s', run_number + 1, gpu_index, region) scheduler.run(graph) return scheduler.props.time
def tweet_progress(): tasks = request.get_json() finished = 0 for task_id in tasks: res = get_task(task_id) # print(res.status) if res.ready(): finished += 1 return jsonify(finished)
def handler(chat_id, text): if text.lstrip().startswith("/"): if text == "/new_task": task, answer = tasks.get_task() _stored_chats[chat_id] = answer return task elif chat_id in _stored_chats: return check_answer(chat_id, text) return config.TEXT_NO_HANDLER
def __reset(self) -> None: if self.__context: self.__context.task.close() task = get_task(self.__task_name, True) self.__genotype = Genotype( 0, FunctionSet(task.settings.action_number, task.settings.perception_number)) self.__genotype.create_from_nodes(self.__chromosome) self.__context = Phenotype.get_context(self.__genotype, task)
def tweet_result(): count = Counter({}) tasks = request.get_json() for task_id in tasks: res = get_task(task_id) count += Counter(res.get()) labels = list(dict(count).keys()) total = dict(count)["total"] data = [d / total for d in dict(count).values()] return jsonify({"label": labels, "data": data})
async def on_message(message): if message.content.startswith(BOT_SIGN): command = message.content[len(BOT_SIGN):].strip() reply = 'Unknown command: \"' + command + '\"' if command == '': reply = get_task() await client.send_message(message.channel, embed=discord.Embed(description=reply))
def airfoil_result(task_id): task = get_task(task_id) if task.ready(): data = task.get() persisted_result = f"{RESULT_FOLDER}/a{data['angle']}n{data['n_nodes']}l{data['n_levels']}s{data['speed']}t{data['time']}.json" with open(persisted_result, 'w') as outfile: json.dump(data, outfile) return jsonify({"status": "RESULT", "data": data}) else: return jsonify({"status": task.status})
def __best_changed_function(self, algorithm: IAlgorithm) -> None: if not self.best or not isinstance(self.best, IGenotype): raise Exception('Unexpected Error') genotype: IGenotype = cast(IGenotype, self.best) self.__cloned_task = get_task(self.__task_name) Phenotype.while_end(genotype, Phenotype.get_context(genotype, self.__cloned_task)) for func in self.__best_changed: if callable(func): func(algorithm, self.__cloned_task, self.best)
def test_get_task(): task_id = tasks.save_task({ "description": "this is a test task", "status": "1" }) assert type(task_id) is str task = tasks.get_task(task_id) print(task) print(type(task)) assert task['description'] == "this is a test task"
def main(port): collateral = os.getenv("COLLATERAL").split(",") df = os.getenv("DF").split(",") name = os.getenv("NAME").split(",") task = os.getenv("TASK") num_layers = int(os.getenv("NUM_LAYERS")) hostname = socket.gethostname() ip_address = socket.gethostbyname(hostname) model_names = name if name and len(name) == len(collateral) else "" Task = get_task(task, num_layers, model_names, collateral, df) app = appConfiguration.configureApp(Task) appConfiguration.printPageLink(hostname, port) app.server.run(debug=True, threaded=True, host=ip_address, port=int(port))
def evaluate(args: Tuple[str, List[int], float]) -> Tuple[float, float, float]: task_name, chromosomes, sleep = args task = get_task(task_name) dataset = TestDataset(task.ga_settings.test_number, TestData(task_name)) functions = FunctionSet(task.settings.action_number, task.settings.perception_number) genotype = Genotype(0, functions) genotype.create_from_nodes(chromosomes) step, action_step, fitness = Phenotype.run_episodes(genotype, dataset) time.sleep(sleep) return fitness, step, action_step
def __init__(self, task_name: str, *best_changed: Callable[[IAlgorithm, ITask, IChromosome], None]) -> None: self.__task_name = task_name self.__best_changed = best_changed task = get_task(task_name) settings = task.ga_settings super().__init__( 1 if isinstance(task, AbstractAtariTask) else 3, # type: ignore self.__best_changed_function, self.__get_islands(task_name, task), Termination(settings.terminate_offspring_number), Migration(settings.migration_rate, settings.migration_interval) ) self.__cloned_task = None
def run(config, split, checkpoint_name, output_path): train_dir = config.train.dir task = get_task(config) checkpoint = utils.checkpoint.get_checkpoint(config, checkpoint_name) last_epoch, step = utils.checkpoint.load_checkpoint( task.get_model(), None, checkpoint) print('from checkpoint: {} last epoch:{}'.format(checkpoint, last_epoch)) preprocess_opt = task.get_preprocess_opt() dataloader = get_dataloader(config, split, get_transform(config, split, **preprocess_opt)) df = inference(config, task, dataloader) df.to_csv(output_path, index=False)
def start_one(index): gpu_index, region = regions[index] scheduler = Ufo.FixedScheduler() scheduler.set_resources(resources[index]) graph = Ufo.TaskGraph() gpu = scheduler.get_resources().get_gpu_nodes()[gpu_index] region_index = run_number * len(resources) + index geometry = CTGeometry(args) if (len(args.center_position_z) == 1 and np.modf(args.center_position_z[0])[0] == 0 and geometry.is_simple_parallel_tomo): LOG.info( 'Simple tomography with integer z center, changing to center_position_z + 0.5 ' 'to avoid interpolation') geometry.args.center_position_z = ( geometry.args.center_position_z[0] + 0.5, ) if not args.disable_projection_crop: if not args.dry_run and (args.y or args.height): LOG.debug( '--y or --height specified, not optimizing projection region' ) else: geometry.optimize_args(region=region) opt_args = geometry.args if args.dry_run: source = get_task('dummy-data', number=args.number, width=args.width, height=args.height) else: source = None setup_graph(opt_args, graph, x_region, y_region, region, source=source, gpu=gpu, index=region_index, make_reader=True) LOG.debug('Pass: %d, device: %d, region: %s', run_number + 1, gpu_index, region) scheduler.run(graph) return scheduler.props.time
def _setup_experiment(self): """Setup graphs and smiles if needed.""" smiles = ['CO', 'CCC', 'CN1C=NC2=C1C(=O)N(C(=O)N2C)C'] n = len(smiles) smiles_to_mol = functools.partial( featurization.smiles_to_mol, infer_hydrogens=True) tensorizer = featurization.MolTensorizer(preprocess_fn=smiles_to_mol) train_index, test_index = np.arange(n - 1), np.arange(n - 1, n) mol_list = [smiles_to_mol(smi) for smi in smiles] x = graph_utils.smiles_to_graphs_tuple(smiles, tensorizer) task = tasks.get_task(tasks.Task.crippen) y = task.get_true_predictions(mol_list) atts = task.get_true_attributions(mol_list) exp = experiments.ExperimentData.from_data_and_splits( x, y, atts, train_index, test_index) model = experiments.GNN(5, 3, 10, 1, models.BlockType.gcn, 'relu', templates.TargetType.globals, 2) model(x) method = techniques.CAM() return exp, model, task, method
def generate_multireport(tasks, output_dir, task_kwargs=None, **kwargs): """Make a pdf report experiments either by loading them or recomputing them on multiple tasks. Args: tasks (list of Task or str): list of helper objects containing meta information of the task. If list of str, then each element should be the name of a task which will be given to `get_task`. output_dir (str): directory containing the different models. task_kwargs (list of dictionnaries, optional): list of task specific arguments that update the kwargs for a specific task. kwargs: Additional arguments to `generate_report` and `train`. Returns: models (dictionary): dictionary containing the trained model of the last run for each task. others (dictionary): dictionary containing additional information for the last run of each task. """ models = {} others = {} pdf = None print() for i, task in enumerate(tasks): if isinstance(task, str): task = get_task(task) print("----- TASK : {} -----".format(task.name)) print() task_kwarg = task.task_kwargs task_kwarg.update(kwargs) if task_kwargs is not None: task_kwarg.update(task_kwargs[i]) models[task.name], pdf, others[task.name] = generate_report( task, output_dir, _is_multiple_tasks=True, _pdf=pdf, **task_kwarg) pdf.close() return models, others
def _run(self): while not empty(): url, kwargs, callback, retries = get_task() # Default request method to GET kwargs.setdefault("method", "GET") # Include our OAuth hook kwargs.update(self.hooks) try: # Construct and send request request = requests.Request(url, **kwargs) request.send() response = request.response except requests.RequestException as e: sys.stderr.write("Error requesting %s: %s, " % (request.full_url, e.message)) if retries < self.max_retries: # Retry... sys.stderr.write("retrying...\n") add_task(url, kwargs, callback, retries + 1) else: # Give up sys.stderr.write("giving up...\n") else: # Invoke callback if callable(callback): callback(response, self) # Stay within rate limits throttle(response) sys.stderr.write("%s exiting...\n" % str(self))
def construct_main_loop(name, task_name, batch_size, max_epochs, patience_epochs, learning_rate, hyperparameters, **kwargs): task = tasks.get_task(**hyperparameters) hyperparameters["n_channels"] = task.n_channels extensions = [] print "constructing graphs..." graphs, outputs, updates = construct_graphs(task=task, **hyperparameters) print "setting up main loop..." from blocks.model import Model model = Model(outputs["train"]["cost"]) from blocks.algorithms import GradientDescent, CompositeRule, StepClipping, Adam algorithm = GradientDescent( cost=outputs["train"]["cost"], parameters=graphs["train"].parameters, step_rule=CompositeRule([Adam(learning_rate=learning_rate), StepClipping(1e3)]), on_unused_sources="warn") algorithm.add_updates(updates["train"]) extensions.extend(construct_monitors( algorithm=algorithm, task=task, model=model, graphs=graphs, outputs=outputs, updates=updates, **hyperparameters)) from blocks.extensions import FinishAfter, Printing, ProgressBar, Timing from blocks.extensions.stopping import FinishIfNoImprovementAfter from blocks.extensions.training import TrackTheBest from blocks.extensions.saveload import Checkpoint from dump import DumpBest, LightCheckpoint, PrintingTo extensions.extend([ TrackTheBest("valid_error_rate", "best_valid_error_rate"), FinishIfNoImprovementAfter("best_valid_error_rate", epochs=patience_epochs), FinishAfter(after_n_epochs=max_epochs), DumpBest("best_valid_error_rate", name+"_best.zip"), Checkpoint(hyperparameters["checkpoint_save_path"], on_interrupt=False, every_n_epochs=5, before_training=True, use_cpickle=True), ProgressBar(), Timing(), Printing(), PrintingTo(name+"_log")]) from blocks.main_loop import MainLoop main_loop = MainLoop(data_stream=task.get_stream("train"), algorithm=algorithm, extensions=extensions, model=model) # note blocks will crash and burn because it cannot deal with an # already-initialized Algorithm, so this should be enabled only for # debugging if False: with open("graph", "w") as graphfile: algorithm.initialize() theano.printing.debugprint(algorithm._function, file=graphfile) from tabulate import tabulate print "parameter sizes:" print tabulate((key, "x".join(map(str, value.get_value().shape)), value.get_value().size) for key, value in main_loop.model.get_parameter_dict().items()) return main_loop
def construct_main_loop(name, task_name, patch_shape, batch_size, n_spatial_dims, n_patches, max_epochs, patience_epochs, learning_rate, gradient_limiter, hyperparameters, **kwargs): task = tasks.get_task(**hyperparameters) hyperparameters["n_channels"] = task.n_channels extensions = [] # let theta noise decay as training progresses for key in "location_std scale_std".split(): hyperparameters[key] = theano.shared(hyperparameters[key], name=key) extensions.append( util.ExponentialDecay(hyperparameters[key], hyperparameters["%s_decay" % key], after_batch=True)) print "constructing graphs..." graphs, outputs, updates = construct_graphs(task=task, **hyperparameters) print "setting up main loop..." from blocks.model import Model model = Model(outputs["train"]["cost"]) from blocks.algorithms import GradientDescent, CompositeRule, StepClipping, Adam, RMSProp from extensions import Compressor if gradient_limiter == "clip": limiter = StepClipping(1.) elif gradient_limiter == "compress": limiter = Compressor() else: raise ValueError() algorithm = GradientDescent( cost=outputs["train"]["cost"], parameters=graphs["train"].parameters, step_rule=CompositeRule([limiter, Adam(learning_rate=learning_rate)])) algorithm.add_updates(updates["train"]) extensions.extend( construct_monitors(algorithm=algorithm, task=task, model=model, graphs=graphs, outputs=outputs, updates=updates, **hyperparameters)) from blocks.extensions import FinishAfter, Printing, ProgressBar, Timing from blocks.extensions.stopping import FinishIfNoImprovementAfter from blocks.extensions.training import TrackTheBest from blocks.extensions.saveload import Checkpoint from dump import DumpBest, LightCheckpoint, PrintingTo, DumpGraph, DumpLog extensions.extend([ TrackTheBest("valid_error_rate", "best_valid_error_rate"), FinishIfNoImprovementAfter("best_valid_error_rate", epochs=patience_epochs), FinishAfter(after_n_epochs=max_epochs), DumpBest("best_valid_error_rate", name + "_best.zip"), Checkpoint(hyperparameters["checkpoint_save_path"], on_interrupt=False, every_n_epochs=10, use_cpickle=True), DumpLog("log.pkl", after_epoch=True), ProgressBar(), Timing(), Printing(), PrintingTo(name + "_log"), DumpGraph(name + "_grad_graph") ]) from blocks.main_loop import MainLoop main_loop = MainLoop(data_stream=task.get_stream("train"), algorithm=algorithm, extensions=extensions, model=model) from tabulate import tabulate print "parameter sizes:" print tabulate( (key, "x".join(map(str, value.get_value().shape)), value.get_value().size) for key, value in main_loop.model.get_parameter_dict().items()) return main_loop
def hello(): return render_template('dash.html', tasks=tasks.get_task())
def prepare_tasks(): result = tasks.get_task() print result return simplejson.dumps({"result": result})
def main(_): # set up logger tf.logging.set_verbosity(tf.logging.INFO) tf.logging.info("Welcome Using Zero :)") params = global_params # try loading parameters # priority: command line > saver > default # 1. load latest path to load parameters if os.path.exists(flags.FLAGS.config): params.override_from_dict(eval(open(flags.FLAGS.config).read())) params = load_parameters(params, params.output_dir) # 2. refine with command line parameters if os.path.exists(flags.FLAGS.config): params.override_from_dict(eval(open(flags.FLAGS.config).read())) params.parse(flags.FLAGS.parameters) # set up random seed random.seed(params.random_seed) np.random.seed(params.random_seed) tf.set_random_seed(params.random_seed) # loading vocabulary tf.logging.info("Begin Loading Vocabulary") start_time = time.time() full_task = get_task(params, True) if not os.path.exists(params.word_vocab_file): params.word_vocab = Vocab(lower=params.lower) params.word_vocab.make_vocab( full_task, use_char=False, embedding_path=params.pretrain_word_embedding_file) else: params.word_vocab = Vocab(lower=params.lower, vocab_file=params.word_vocab_file) if params.use_char: if not os.path.exists(params.char_vocab_file): params.char_vocab = Vocab(lower=False) params.char_vocab.make_vocab(full_task, use_char=True, embedding_path=None) else: params.char_vocab = Vocab(lower=False, vocab_file=params.char_vocab_file) tf.logging.info("End Loading Vocabulary, Word Vocab Size {}, " "Char Vocab Size {}, within {} seconds".format( params.word_vocab.size(), params.char_vocab.size() if params.use_char else 0, time.time() - start_time)) if flags.FLAGS.mode == "vocab": save_parameters(params, params.output_dir) return # save parameters if flags.FLAGS.mode == "train": save_parameters(params, params.output_dir) # loading bert config if params.enable_bert: bert_config = bert.load_config(params.bert_dir) params.bert = tc.training.HParams(**bert_config) # loading vocabulary tf.logging.info("Begin Loading Vocabulary") start_time = time.time() params.bert.vocab = bert.load_vocab(params.bert_dir) tf.logging.info( "End Loading Vocabulary, Vocab Size {}, within {} seconds".format( params.bert.vocab.size, time.time() - start_time)) # loading task label information params.label_size = full_task.get_label_size() # print parameters print_parameters(params) # print the used datasets tf.logging.info("Task {} is performed with data {}".format( params.task, full_task.data_path)) mode = flags.FLAGS.mode if mode == "train": # load the recorder params = setup_recorder(params) graph.train(params) elif mode == "test": graph.evaluate(params) else: tf.logging.error("Invalid mode: {}".format(mode))
def test_create(): con = psycopg2.connect(**DATABASE) tasks.add_task(con, datetime.date(2020, 4, 1), "Wake up") assert tasks.get_task(con, datetime.date(2020, 4, 1), 1) == "Wake up"
def test_delete(): con = psycopg2.connect(**DATABASE) tasks.add_task(con, datetime.date(2020, 4, 1), "Wake up") tasks.delete_task(con, datetime.date(2020, 4, 1), 1) with pytest.raises(TypeError): tasks.get_task(con, datetime.date(2020, 4, 1), 1)
def test_update(): con = psycopg2.connect(**DATABASE) tasks.add_task(con, datetime.date(2020, 4, 1), "Wake up") tasks.edit_task(con, datetime.date(2020, 4, 1), 1, "Make coffee") assert tasks.get_task(con, datetime.date(2020, 4, 1), 1) == "Make coffee"
def construct_main_loop(name, task_name, patch_shape, batch_size, n_spatial_dims, n_patches, max_epochs, patience_epochs, learning_rate, gradient_limiter, hyperparameters, **kwargs): task = tasks.get_task(**hyperparameters) hyperparameters["n_channels"] = task.n_channels extensions = [] # let theta noise decay as training progresses for key in "location_std scale_std".split(): hyperparameters[key] = theano.shared(hyperparameters[key], name=key) extensions.append(util.ExponentialDecay( hyperparameters[key], hyperparameters["%s_decay" % key], after_batch=True)) print "constructing graphs..." graphs, outputs, updates = construct_graphs(task=task, **hyperparameters) print "setting up main loop..." from blocks.model import Model model = Model(outputs["train"]["cost"]) from blocks.algorithms import GradientDescent, CompositeRule, StepClipping, Adam, RMSProp from extensions import Compressor if gradient_limiter == "clip": limiter = StepClipping(1.) elif gradient_limiter == "compress": limiter = Compressor() else: raise ValueError() algorithm = GradientDescent( cost=outputs["train"]["cost"], parameters=graphs["train"].parameters, step_rule=CompositeRule([limiter, Adam(learning_rate=learning_rate)])) algorithm.add_updates(updates["train"]) extensions.extend(construct_monitors( algorithm=algorithm, task=task, model=model, graphs=graphs, outputs=outputs, updates=updates, **hyperparameters)) from blocks.extensions import FinishAfter, Printing, ProgressBar, Timing from blocks.extensions.stopping import FinishIfNoImprovementAfter from blocks.extensions.training import TrackTheBest from blocks.extensions.saveload import Checkpoint from dump import DumpBest, LightCheckpoint, PrintingTo, DumpGraph, DumpLog extensions.extend([ TrackTheBest("valid_error_rate", "best_valid_error_rate"), FinishIfNoImprovementAfter("best_valid_error_rate", epochs=patience_epochs), FinishAfter(after_n_epochs=max_epochs), DumpBest("best_valid_error_rate", name+"_best.zip"), Checkpoint(hyperparameters["checkpoint_save_path"], on_interrupt=False, every_n_epochs=10, use_cpickle=True), DumpLog("log.pkl", after_epoch=True), ProgressBar(), Timing(), Printing(), PrintingTo(name+"_log"), DumpGraph(name+"_grad_graph")]) from blocks.main_loop import MainLoop main_loop = MainLoop(data_stream=task.get_stream("train"), algorithm=algorithm, extensions=extensions, model=model) from tabulate import tabulate print "parameter sizes:" print tabulate((key, "x".join(map(str, value.get_value().shape)), value.get_value().size) for key, value in main_loop.model.get_parameter_dict().items()) return main_loop
def runnings_backups(): task = get_task() is_runnig = (task != None) finish = ( is_runnig and task.get() != None) return is_runnig and not finish
def args2tasks(args): """Given the parsed arguments, return teh correct list of tasks.""" kwargs = {} if args.mode == "small": kwargs["is_small"] = True elif args.mode == "mini": kwargs["is_mini"] = True if args.tasks == "important": tasks = [ get_task("long lookup", **kwargs), get_task("long lookup reverse", **kwargs), get_task("noisy long lookup single", **kwargs), get_task("long lookup intermediate noise", **kwargs), get_task("noisy long lookup multi", **kwargs), get_task("scan", **kwargs), get_task("symbol rewriting", is_small=True) ] elif args.tasks == "all": tasks = [ get_task("lookup", **kwargs), get_task("long lookup", **kwargs), get_task("long lookup jump", **kwargs), get_task("long lookup oneshot", **kwargs), get_task("long lookup reverse", **kwargs), get_task("noisy long lookup single", **kwargs), get_task("long lookup intermediate noise", **kwargs), get_task("noisy long lookup multi", **kwargs), get_task("scan", **kwargs), get_task("symbol rewriting", **kwargs) ] else: tasks = [ get_task(task.replace("_", " "), **kwargs) for task in args.tasks ] # will be removed after because None args.tasks = None args.mode = None return tasks
def test_create(): tasks.add_task(datetime.date(2020, 4, 1), "Wake up") assert tasks.get_task(datetime.date(2020, 4, 1), 0) == "Wake up"
def test_delete(): tasks.add_task(datetime.date(2020, 4, 1), "Wake up") tasks.delete_task(datetime.date(2020, 4, 1), 0) with pytest.raises(KeyError): tasks.get_task(datetime.date(2020, 4, 1), 0)