def create_experiments(hyperparam_tuples): experiments = [] # experiment_names = {} for hyperparam_tuple in hyperparam_tuples: experiment_name = experiment_name_base experiment_name += "__opt__%s__%s" % (rand_string(32), int(time.time())) experiment_name = experiment_name.replace('.', '_') workspace_new = fs_tracker.get_artifact_cache( 'workspace', experiment_name) current_artifacts = artifacts.copy() current_artifacts.update({ 'workspace': { 'local': workspace_new, 'mutable': True } }) rsync_cp(workspace_orig, workspace_new, ignore_arg, logger) # shutil.copytree(workspace_orig, workspace_new) for param_name, param_value in six.iteritems(hyperparam_tuple): if isinstance(param_value, np.ndarray): array_filepath = '/tmp/%s.npy' % rand_string(32) np.save(array_filepath, param_value) assert param_name not in current_artifacts current_artifacts[param_name] = {'local': array_filepath, 'mutable': False} else: with open(os.path.join(workspace_new, exec_filename), 'r') as f: script_text = f.read() script_text = re.sub( '\\b' + param_name + '\\b(?=[^=]*\\n)', str(param_value), script_text) with open(os.path.join(workspace_new, exec_filename), 'w') as f: f.write(script_text) experiments.append(create_experiment( filename=exec_filename, args=other_args, experiment_name=experiment_name, project=project, artifacts=current_artifacts, resources_needed=resources_needed, metric=runner_args.metric, max_duration=runner_args.max_duration, )) return experiments
def insert_user_startup_script(user_startup_script, startup_script_str, logger): if user_startup_script is None: return startup_script_str try: with open(os.path.abspath( os.path.expanduser(user_startup_script))) as f: user_startup_script_lines = f.read().splitlines() except BaseException: if user_startup_script is not None: logger.warn("User startup script (%s) cannot be loaded" % user_startup_script) return startup_script_str startup_script_lines = startup_script_str.splitlines() new_startup_script_lines = [] whitespace = " " * INDENT for line in startup_script_lines: if line.startswith("studio remote worker") or \ line.startswith("studio-remote-worker"): curr_working_dir = "curr_working_dir_%s" % rand_string(32) func_name = "user_script_%s" % rand_string(32) new_startup_script_lines.append("%s=$(pwd)\n" % curr_working_dir) new_startup_script_lines.append("cd ~\n") new_startup_script_lines.append("%s()(\n" % func_name) for user_line in user_startup_script_lines: if user_line.startswith("#!"): continue new_startup_script_lines.append("%s%s\n" % (whitespace, user_line)) new_startup_script_lines.append("%scd $%s\n" % (whitespace, curr_working_dir)) new_startup_script_lines.append("%s%s\n" % (whitespace, line)) new_startup_script_lines.append(")\n") new_startup_script_lines.append("%s\n" % func_name) else: new_startup_script_lines.append("%s\n" % line) new_startup_script = "".join(new_startup_script_lines) logger.info('Inserting the following user startup script' ' into the default startup script:') logger.info("\n".join(user_startup_script_lines)) # with open("/home/jason/Desktop/script.sh", 'wb') as f: # f.write(new_startup_script) # sys.exit() return new_startup_script
def refresh_token(self, email, refresh_token): api_key = os.path.join(TOKEN_DIR, self.firebase.api_key) self.user = self.firebase.auth().refresh(refresh_token) self.user['email'] = email self.expired = False # Rename to ensure atomic writes to json file # (technically more safe, but slower) tmp_api_key = os.path.join(tempfile.gettempdir(), "api_key_%s" % rand_string(32)) with open(tmp_api_key, 'wb') as f: json.dump(self.user, f) f.flush() os.fsync(f.fileno()) f.close() os.rename(tmp_api_key, api_key)
def refresh_token(self, email, refresh_token): api_key = os.path.join(TOKEN_DIR, self.firebase.api_key) self.user = self.firebase.auth().refresh(refresh_token) self.user['email'] = email self.user['expiration'] = time.time() + API_KEY_COOLDOWN self.expired = False # if not os.path.exists(api_key) or \ # time.time() - os.path.getmtime(api_key) > HALF_HOUR: # Rename to ensure atomic writes to json file # (technically more safe, but slower) tmp_api_key = os.path.join(tempfile.gettempdir(), "api_key_%s" % rand_string(32)) with open(tmp_api_key, 'w') as f: f.write(json.dumps(self.user)) f.flush() os.fsync(f.fileno()) f.close() os.rename(tmp_api_key, api_key)
def test_error_received_on_exception_in_execute(self): mock_execption_return = (False, util.rand_string(10)) error_strat = AbstractErrorStrategy() error_strat.handle_exception = MagicMock( return_value=mock_execption_return ) with patch('hermes.components.Component.execute', side_effect=Exception): self.component.error_strategy = error_strat # Start the component and let it settle self.component.start() sleep(1) self.notif_queue.put(True) exception = self.error_queue.get(timeout=1) self.assertEqual(mock_execption_return, exception)
def test_execute_done_called_on_notification(self): error_string = util.rand_string(10) def mock_func(*args, **kwargs): """ The process will have isolated this function, as well as the error queue. """ self.component.error_queue.put(error_string) self.component.post_execute = MagicMock() self.component.post_execute.side_effect = mock_func self.component.execute = MagicMock() self.component.start() self.notif_queue.put(1) return_string = self.error_queue.get() self.assertEqual(error_string, return_string)
def test_execute_called_on_notification(self): error_string = util.rand_string(10) def mock_func(*args, **kwargs): """ The process will have isolated this function, as well as the error queue. """ self.component.error_queue.put(error_string) with patch('hermes.components.Component.execute') as mock_execute: mock_execute.side_effect = mock_func self.component.start() sleep(2) self.assertTrue(self.component.is_alive()) self.notif_queue.put(1) return_string = self.error_queue.get(timeout=2) self.assertEqual(error_string, return_string)