def setup(self, add_bad_host=False): from environment import run, binary_args, vtlogroot # pylint: disable=g-import-not-at-top,g-multiple-import import utils # pylint: disable=g-import-not-at-top self.assign_ports() run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'init']) # Create toplevel directories for global ZK, and one per cell. run(binary_args('zk') + ['-server', self.addr, 'touch', '-p', '/global']) run(binary_args('zk') + ['-server', self.addr, 'touch', '-p', '/test_nj']) run(binary_args('zk') + ['-server', self.addr, 'touch', '-p', '/test_ny']) run(binary_args('zk') + ['-server', self.addr, 'touch', '-p', '/test_ca']) # Create the cell configurations using 'vtctl AddCellInfo' utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_nj', '-server_address', self.addr, 'test_nj']) utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_ny', '-server_address', self.addr, 'test_ny']) ca_addr = self.addr if add_bad_host: ca_addr += ',does.not.exists:1234' utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_ca', '-server_address', ca_addr, 'test_ca'])
def setup(self, add_bad_host=False): from environment import run, binary_args, vtlogroot # pylint: disable=g-import-not-at-top,g-multiple-import import utils # pylint: disable=g-import-not-at-top self.assign_ports() run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'init']) # Create the cell configurations using 'vtctl AddCellInfo' utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_nj', '-server_address', self.addr, 'test_nj']) utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_ny', '-server_address', self.addr, 'test_ny']) ca_addr = self.addr if add_bad_host: ca_addr += ',does.not.exists:1234' # Use UpdateCellInfo for this one, more coverage. utils.run_vtctl_vtctl(['UpdateCellInfo', '-root', '/test_ca', '-server_address', ca_addr, 'test_ca'])
def setup(self): from environment import run, binary_args, vtlogroot # pylint: disable=g-import-not-at-top,g-multiple-import import utils # pylint: disable=g-import-not-at-top self.assign_ports() run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'init']) # Create the cell configurations using 'vtctl AddCellInfo' utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_nj', '-server_address', self.addr, 'test_nj']) utils.run_vtctl_vtctl(['AddCellInfo', '-root', '/test_ny', '-server_address', self.addr, 'test_ny']) ca_addr = self.addr # Use UpdateCellInfo for this one, more coverage. utils.run_vtctl_vtctl(['UpdateCellInfo', '-root', '/test_ca', '-server_address', ca_addr, 'test_ca'])
def teardown(self): from environment import run, binary_args, vtlogroot # pylint: disable=g-import-not-at-top,g-multiple-import import utils # pylint: disable=g-import-not-at-top self.assign_ports() run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'shutdown' if utils.options.keep_logs else 'teardown'], raise_on_error=False)
def teardown(self): from environment import run, binary_args, vtlogroot import utils run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'shutdown' if utils.options.keep_logs else 'teardown' ], raise_on_error=False)
def teardown(self): from environment import run, binary_args, vtlogroot import utils self.assign_ports() run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'shutdown' if utils.options.keep_logs else 'teardown'], raise_on_error=False)
def test_environment_mismatch(monkeypatch, capsys): adjust_stdin_json_obj["control"]["environment"]["mode"] = "mainline" with monkeypatch.context() as m: m.setattr(sys, 'stdin', io.StringIO(json.dumps(adjust_stdin_json_obj))) run() updated_dep = pytest.apps_client.read_namespaced_deployment('web-main', 'opsani') assert updated_dep.metadata.annotations.get('opsani-desired-mode') == "mainline", \ 'Desired mode annotation not found in updated deployment metadata. Dep: {}'.format(updated_dep) assert json.loads(capsys.readouterr().out)['status'] == 'environment-mismatch'
def setup(self, add_bad_host=False): from environment import run, binary_args, vtlogroot, tmproot self.assign_ports() run(binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'init']) config = tmproot + '/test-zk-client-conf.json' with open(config, 'w') as f: ca_server = 'localhost:%u' % (self.zk_client_port) if add_bad_host: ca_server += ',does.not.exists:1234' zk_cell_mapping = { 'test_nj': 'localhost:%u' % (self.zk_client_port), 'test_ny': 'localhost:%u' % (self.zk_client_port), 'test_ca': ca_server, 'global': 'localhost:%u' % (self.zk_client_port), } json.dump(zk_cell_mapping, f) os.environ['ZK_CLIENT_CONFIG'] = config logging.debug('Using ZK_CLIENT_CONFIG=%s', str(config)) run(binary_args('zk') + ['touch', '-p', '/zk/test_nj/vt']) run(binary_args('zk') + ['touch', '-p', '/zk/test_ny/vt']) run(binary_args('zk') + ['touch', '-p', '/zk/test_ca/vt'])
def setup(self, add_bad_host=False): from environment import run, binary_args, vtlogroot, tmproot # pylint: disable=g-import-not-at-top,g-multiple-import self.assign_ports() run( binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'init' ]) config = tmproot + '/test-zk-client-conf.json' with open(config, 'w') as f: ca_server = 'localhost:%d' % (self.zk_client_port) if add_bad_host: ca_server += ',does.not.exists:1234' zk_cell_mapping = { 'test_nj': 'localhost:%d' % (self.zk_client_port), 'test_ny': 'localhost:%d' % (self.zk_client_port), 'test_ca': ca_server, 'global': 'localhost:%d' % (self.zk_client_port), } json.dump(zk_cell_mapping, f) os.environ['ZK_CLIENT_CONFIG'] = config logging.debug('Using ZK_CLIENT_CONFIG=%s', str(config)) run(binary_args('zk') + ['touch', '-p', '/zk/test_nj/vt']) run(binary_args('zk') + ['touch', '-p', '/zk/test_ny/vt']) run(binary_args('zk') + ['touch', '-p', '/zk/test_ca/vt'])
def wipe(self): from environment import run, binary_args # pylint: disable=g-import-not-at-top,g-multiple-import # Only delete keyspaces/ in the global topology service, to keep # the 'cells' directory. So we don't need to re-add the CellInfo records. run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/global/keyspaces']) run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/test_nj/*']) run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/test_ny/*']) run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/test_ca/*'])
def wipe(self): from environment import run, binary_args # pylint: disable=g-import-not-at-top,g-multiple-import run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/zk/test_nj/vt/*']) run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/zk/test_ny/vt/*']) run(binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/zk/global/vt/*'])
def setup(self, add_bad_host=False): from environment import reserve_ports, run, binary_args, vtlogroot, tmproot self.zk_port_base = reserve_ports(3) self.zkocc_port_base = reserve_ports(3) self.hostname = socket.gethostname() self.zk_ports = ':'.join(str(self.zk_port_base + i) for i in range(3)) self.zk_client_port = self.zk_port_base + 2 run( binary_args('zkctl') + [ '-log_dir', vtlogroot, '-zk.cfg', '1@%s:%s' % (self.hostname, self.zk_ports), 'init' ]) config = tmproot + '/test-zk-client-conf.json' with open(config, 'w') as f: ca_server = 'localhost:%u' % (self.zk_client_port) if add_bad_host: ca_server += ',does.not.exists:1234' zk_cell_mapping = { 'test_nj': 'localhost:%u' % (self.zk_client_port), 'test_ny': 'localhost:%u' % (self.zk_client_port), 'test_ca': ca_server, 'global': 'localhost:%u' % (self.zk_client_port), 'test_nj:_zkocc': 'localhost:%u,localhost:%u,localhost:%u' % tuple(self.zkocc_port_base + i for i in range(3)), 'test_ny:_zkocc': 'localhost:%u' % (self.zkocc_port_base), 'test_ca:_zkocc': 'localhost:%u' % (self.zkocc_port_base), 'global:_zkocc': 'localhost:%u' % (self.zkocc_port_base), } json.dump(zk_cell_mapping, f) os.environ['ZK_CLIENT_CONFIG'] = config logging.debug('Using ZK_CLIENT_CONFIG=%s', str(config)) run(binary_args('zk') + ['touch', '-p', '/zk/test_nj/vt']) run(binary_args('zk') + ['touch', '-p', '/zk/test_ny/vt']) run(binary_args('zk') + ['touch', '-p', '/zk/test_ca/vt'])
def wipe(self): from environment import run, binary_args # pylint: disable=g-import-not-at-top,g-multiple-import run( binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/zk/test_nj/vt/*']) run( binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/zk/test_ny/vt/*']) run( binary_args('zk') + ['-server', self.addr, 'rm', '-rf', '/zk/global/vt/*'])
def wipe(self): from environment import run, binary_args # Work around safety check on recursive delete. run(binary_args('zk') + ['rm', '-rf', '/zk/test_nj/vt/*']) run(binary_args('zk') + ['rm', '-rf', '/zk/test_ny/vt/*']) run(binary_args('zk') + ['rm', '-rf', '/zk/global/vt/*']) run(binary_args('zk') + ['rm', '-f', '/zk/test_nj/vt']) run(binary_args('zk') + ['rm', '-f', '/zk/test_ny/vt']) run(binary_args('zk') + ['rm', '-f', '/zk/global/vt'])
def wipe(self): from environment import run, binary_args # pylint: disable=g-import-not-at-top,g-multiple-import # Work around safety check on recursive delete. run(binary_args('zk') + ['rm', '-rf', '/zk/test_nj/vt/*']) run(binary_args('zk') + ['rm', '-rf', '/zk/test_ny/vt/*']) run(binary_args('zk') + ['rm', '-rf', '/zk/global/vt/*']) run(binary_args('zk') + ['rm', '-f', '/zk/test_nj/vt']) run(binary_args('zk') + ['rm', '-f', '/zk/test_ny/vt']) run(binary_args('zk') + ['rm', '-f', '/zk/global/vt'])
# -*- coding: utf-8 -*- """ Created on Mon Oct 21 10:30:43 2019 @author: Wei-Jie """ import dirlister import environment print dirlister.run() print environment.run()
def train(self): env = environment.SplendorEnv(self.batch_size) perturbed_model = type(self.model)() noise_model = type(self.model)() if cuda_on: perturbed_model = perturbed_model.cuda() noise_model = noise_model.cuda() if self.half: self.model = self.model.half() perturbed_model = perturbed_model.half() noise_model = noise_model.half() ave_delta = .1 # opt = torch.optim.AdamW(self.model.parameters(), lr=self.lr, weight_decay = 1e-3, eps=1e-3) opt = torch.optim.SGD(self.model.parameters(), lr=self.lr, weight_decay=0) for epoch in range(self.epochs): print("Epoch:", epoch) total_reward = 0.0 total_game_length = 0.0 total_cards = 0.0 total_points = 0.0 for _ in trange(self.batches_per_epoch): perturbed_model.load_state_dict(self.model.state_dict()) perturbed_player = random.randrange(2) player_1, player_2 = ( self.model, perturbed_model) if perturbed_player else (perturbed_model, self.model) init_score = 6 * random.randrange(2) top = random.randrange(2) with torch.no_grad(): #based on https://github.com/kayuksel/pytorch-ars/blob/master/ars_multiprocess.py for p_param, n_param in zip(perturbed_model.parameters(), noise_model.parameters()): n_param.data.normal_(std=self.noise_scale) p_param.add_(n_param.data) add_result, add_turns, add_cards, add_points = environment.run( player_1, player_2, size=self.batch_size, init_score=init_score, top=top) add_score = add_result[perturbed_player].sum( ) / self.batch_size for p_param, n_param in zip(perturbed_model.parameters(), noise_model.parameters()): p_param.sub_(2 * n_param.data) sub_result, sub_turns, sub_cards, sub_points = environment.run( player_1, player_2, size=self.batch_size, init_score=init_score, top=top) sub_score = sub_result[perturbed_player].sum( ) / self.batch_size total_reward += add_score + sub_score total_game_length += add_turns.sum() + sub_turns.sum() total_cards += add_cards + sub_cards total_points += add_points + sub_points - init_score * 4 * self.batch_size reward_delta = sub_score - add_score step_size = reward_delta / (ave_delta + 1e-5) ave_delta = self.ave_delta_rate * ave_delta + ( 1 - self.ave_delta_rate) * abs(reward_delta) for param, n_param in zip(self.model.parameters(), noise_model.parameters()): param.grad = ((step_size / self.noise_scale) * n_param.data) # print((param.grad**2).mean()) # exit() self.noise_scale *= self.noise_scale_decay opt.step() # for param in self.model.parameters(): # print ((param.data**2).mean()) print("Average Reward:", total_reward / (2 * self.batches_per_epoch)) print( "Average Game Length:", total_game_length.float() / (2 * self.batches_per_epoch * self.batch_size)) print( "Average Cards:", total_cards.float() / (2 * self.batches_per_epoch * self.batch_size)) print( "Average Points:", total_points.float() / (2 * self.batches_per_epoch * self.batch_size)) fname = os.path.join(self.checkpoints_dir, "epoch_" + str(epoch) + ".pkl") torch.save(self.model, fname)
from sys import path as syspath, argv from os import path syspath.append(path.join(path.dirname(__file__), '..', 'shared')) import environment from configurationCustomized import location from platform import node, system from utilities import message message(mode='INFO', text='On {} {} {}'.format(location, node(), system())) environment = environment.Environment(computer=argv[1], user=argv[2], code=argv[3], branch=argv[4], id_local_process=argv[5], type_list=[argv[6]], case_list=[[argv[7]]], configuration_list=[argv[8]], flow_process_list=[argv[9]], element_type_list=[[argv[10]]], operation_type=argv[11], operation=argv[12], db_user=str(), db_password=str(), db_host=str(), db_schema=str()) environment.run()
def train(self): self.model.batch_size = self.batch_size if half_precision: self.model = self.model.half() perturbed_model = model.PerturbedModel(self.model) ave_delta = .005 * self.batch_size opt = torch.optim.AdamW(self.model.parameters(), lr=self.lr, weight_decay=0, eps=1e-3) # opt = torch.optim.SGD(self.model.parameters(), lr=self.lr, weight_decay=0) for epoch in range(self.epochs): print("Epoch:", epoch) total_reward = 0.0 total_game_length = 0.0 total_cards = 0.0 total_points = 0.0 for _ in trange(self.batches_per_epoch): perturbed_player = random.randrange(2) player_1, player_2 = ( self.model, perturbed_model) if perturbed_player else (perturbed_model, self.model) init_score = 6 * random.randrange(2) top = random.randrange(2) with torch.no_grad(): perturbed_model.set_seed() perturbed_model.set_noise_scale(self.noise_scale) result, turns, cards, points = environment.run( player_1, player_2, size=self.batch_size, init_score=init_score, top=top) if cuda_on: result = result[perturbed_player].cuda().float() else: result = result[perturbed_player].float() total_reward += result.mean() total_game_length += turns.sum() total_cards += cards.sum() total_points += points.sum( ) - init_score * 2 * self.batch_size result = result.view(self.directions, -1) repeat_size = result.size(1) reward_delta = result[:, repeat_size // 2:].sum( dim=1) - result[:, :repeat_size // 2].sum(dim=1) step_size = reward_delta / ( (ave_delta + 1e-5) * self.noise_scale) ave_delta = self.ave_delta_rate * ave_delta + ( 1 - self.ave_delta_rate) * (reward_delta.norm(p=1)) perturbed_model.set_grad(step_size) # for param in self.model.parameters(): # if param.grad is not None: # print(param.grad.abs().mean()) self.noise_scale *= self.noise_scale_decay opt.step() # for param in self.model.parameters(): # # print(param.data.abs().mean()) print("Average Reward:", total_reward / (self.batches_per_epoch)) print( "Average Game Length:", total_game_length.float() / (self.batches_per_epoch * self.batch_size)) print( "Average Cards:", total_cards.float() / (self.batches_per_epoch * self.batch_size)) print( "Average Points:", total_points.float() / (self.batches_per_epoch * self.batch_size)) fname = os.path.join(self.checkpoints_dir, "epoch_" + str(epoch) + ".pkl") perturbed_model.clear_noise() torch.save(self.model, fname)
def test_environment_ok(monkeypatch, capsys): with monkeypatch.context() as m: m.setattr(sys, 'stdin', io.StringIO(json.dumps(adjust_stdin_json_obj))) run() assert json.loads(capsys.readouterr().out)['status'] == 'ok'