def test_gettimeofday_failed_call(self, mock_log, mock_apply, mock_cint, mock_noop): """ Ensure we raise not implemented error when we get a call with a return value indicating failure (-1) """ mock_cint.EBX = 5 mock_cint.peek_register_unsigned = mock.Mock(return_value=666) mock_populate_timeval_structure = mock.Mock() syscall_id = 4 syscall_object = bunch.Bunch() syscall_object.args = [None, None, None] arg0_obj = bunch.Bunch() arg0_obj.value = '{tv_sec=11223344, ' arg1_obj = bunch.Bunch() arg1_obj.value = 'tv_usec=55667788}' arg2_obj = bunch.Bunch() arg2_obj.value = 'NULL' syscall_object.args[0] = arg0_obj syscall_object.args[1] = arg1_obj syscall_object.args[2] = arg2_obj syscall_object.ret = (-1, ) pid = 555 # We don't want to hard code in the debug message here in case it # changes with self.assertRaises(NotImplementedError): syscallreplay.time_handlers.gettimeofday_entry_handler( syscall_id, syscall_object, pid) mock_log.assert_called() mock_noop.assert_not_called() mock_cint.peek_register_unsigned.assert_not_called() mock_cint.populate_timeval_structure.assert_not_called() mock_apply.assert_not_called()
def test_namebunch(): """py.test for namebunch""" thedata = ( (bunch.Bunch(dict(Name="", a=5)), "yay", "yay"), # abunch, aname, thename (bunch.Bunch(dict(Name=None, a=5)), "yay", None), # abunch, aname, thename ) for abunch, aname, thename in thedata: result = modeleditor.namebunch(abunch, aname) assert result.Name == thename
def GetParams(filename, mode, expdir): param_filename = os.path.join(expdir, 'params.json') if mode == 'train': with open(filename, 'r') as f: param_dict = json.load(f) params = bunch.Bunch(param_dict) with open(param_filename, 'w') as f: json.dump(param_dict, f) else: with open(param_filename, 'r') as f: params = bunch.Bunch(json.load(f)) return params
def test_readlink_happy_case(self, mock_log, mock_apply, mock_cint, mock_cleanup, mock_noop): """ Test that the readlink entry handler works under usual conditions """ mock_cint.EBX = 5 mock_cint.ECX = 6 mock_cint.ORIG_EAX = 1 def _peek_register(pid, reg): # fake filename buffer if reg == mock_cint.EBX: return 6666 # fake output buffer if reg == mock_cint.ECX: return 7777 mock_cint.peek_register = mock.Mock(side_effect=_peek_register) mock_populate_char_buffer = mock.Mock() mock_cint.copy_string = mock.Mock(return_value='test_filename.txt') syscall_id = 85 syscall_object = bunch.Bunch() syscall_object.args = [None, None, None] arg0_obj = bunch.Bunch() arg0_obj.value = '\"test_filename.txt\"' syscall_object.args[0] = arg0_obj arg1_obj = bunch.Bunch() arg1_obj.value = '\"test_filename.txt\"' syscall_object.args[1] = arg1_obj syscall_object.ret = (0, ) pid = 555 syscallreplay.file_handlers.readlink_entry_handler( syscall_id, syscall_object, pid) # We don't want to hard code in the debug message here in case it # changes mock_log.assert_called() mock_noop.assert_called_with(pid) peek_register_calls = [ mock.call(pid, mock_cint.EBX), mock.call(pid, mock_cint.ECX) ] mock_cint.peek_register.assert_has_calls(peek_register_calls) mock_cleanup.assert_called_with('\"test_filename.txt\"') mock_cint.populate_char_buffer.assert_called_with( pid, 7777, 'test_filename.txt') mock_apply.assert_called_with(pid, syscall_object)
def get_eigen(self): mean_vector = self.vector_matrix.mean(axis=1) for ii in range(self.vector_matrix.shape[1]): self.vector_matrix[:, ii] -= mean_vector shape = self.vector_matrix.shape if (shape[0] < shape[1]): _, lamb, u = np.linalg.svd( np.dot(self.vector_matrix, self.vector_matrix.T)) u = u.T else: _, lamb, v = np.linalg.svd( np.dot(self.vector_matrix.T, self.vector_matrix)) v = v.T u = np.dot(self.vector_matrix, v) norm = np.linalg.norm(u, axis=0) u = u / norm standard_deviation = lamb**2 / float(len(lamb)) variance_value = standard_deviation / np.sum(standard_deviation) eigen = bunch.Bunch() eigen.lamb = lamb eigen.u = u eigen.variance_value = variance_value eigen.mean_vector = mean_vector self.eigen = eigen self.K = self.get_n_components_2_variance(self.variance_pct) print("Get the n_components to preserve variance: var=%.2f, K=%d" % (self.variance_pct, self.K))
def run(self, function, recipe_attributes=None, input=None): """It runs the chef executor. :param function: The function :param recipe_attributes: recipe attributes :param input: """ self._valid_module_name() try: self._configure_chef() self._generate_manifest(self.module_name, self.module_recipe, recipe_attributes) except Exception as e: result = {'exitCode': 2, 'stdout': None, 'stderr': e.strerror} raise muranoagent.exceptions.CustomException( 0, message='Cookbook {0} returned error code {1}: {2}'.format( self.module_name, self.module_recipe, e.strerror, ), additional_data=result) solo_file = os.path.join(self._path, "files", "solo.rb") command = 'chef-solo -j node.json -c {0}'.format(solo_file) result = self._execute_command(command) return bunch.Bunch(result)
def get_template_downloable_git(self): return bunch.Bunch( ID='ID', Files={ 'mycoockbook': { 'Name': 'mycoockbook.txt', 'URL': 'https://github.com/tomcat.git', 'Type': 'Downloadable' } }, Scripts={ 'deploy': { 'EntryPoint': 'cookbook/recipe', 'Files': [ 'https://github.com/tomcat.git', {'java': 'https://github.com/java.git'} ], 'Options': { 'captureStderr': True, 'captureStdout': True }, 'Type': 'Chef', 'Version': '1.0.0' } } )
def cli(ctx, host, timeout, ports_config_file, color, verbose): ''' Command line tools for Open/R. ''' # Default config options ctx.obj = bunch.Bunch({ 'client_id': platform_types.FibClient.OPENR, 'config_store_url': Consts.CONFIG_STORE_URL, 'decision_rep_port': Consts.DECISION_REP_PORT, 'enable_color': color, 'fib_agent_port': Consts.FIB_AGENT_PORT, 'fib_rep_port': Consts.FIB_REP_PORT, 'health_checker_cmd_port': Consts.HEALTH_CHECKER_CMD_PORT, 'host': host, 'kv_pub_port': Consts.KVSTORE_PUB_PORT, 'kv_rep_port': Consts.KVSTORE_REP_PORT, 'lm_cmd_port': Consts.LINK_MONITOR_CMD_PORT, 'monitor_pub_port': Consts.MONITOR_PUB_PORT, 'monitor_rep_port': Consts.MONITOR_REP_PORT, 'prefix_mgr_cmd_port': Consts.PREFIX_MGR_CMD_PORT, 'proto_factory': Consts.PROTO_FACTORY, 'timeout': timeout, 'verbose': verbose, 'zmq_ctx': zmq.Context(), }) # Get override port configs if ports_config_file: with open(ports_config_file, 'r') as f: override_ports_config = json.load(f) for key, value in override_ports_config.items(): ctx.obj[key] = value
def _parse_intf_info(info): return bunch.Bunch(**{ 'isUp': info.isUp, 'ifIndex': info.ifIndex, 'v4Addrs': [sprint_addr(v.addr) for v in info.v4Addrs], 'v6Addrs': [sprint_addr(v.addr) for v in info.v6LinkLocalAddrs], })
def __call__(self): '''Returns a possible cause of death at a possible date''' return bunch.Bunch({ 'ADDRESS': self._possibleStreetAddress(), 'CAUSE': self._possibleCauseOfDeath(), 'DATE': self._possibleDateOfDeath() })
def from_build_info_to_build_short_info(result): build_info = bunch.bunchify(result) def is_param(x): klass = x.get('_class', None) return klass == "hudson.model.ParametersAction" params = next( iter(x.parameters for x in filter(is_param, build_info.actions)), {}) parameters_real = {p.name: p.value for p in params} info = bunch.Bunch({ 'queueId': result['queueId'], 'number': result['number'], 'timestamp': result['timestamp'], 'displayName': result['displayName'], 'artifacts': result['artifacts'], 'duration': result['duration'], 'result': result['result'], 'fullDisplayName': result['fullDisplayName'], 'parametersReal': parameters_real, 'url': result['url'] }) return info
def interface_db_to_dict(value): ''' Convert a thrift::Value representation of InterfaceDatabase to bunch object ''' def _parse_intf_info(info): return bunch.Bunch( **{ 'isUp': info.isUp, 'ifIndex': info.ifIndex, 'v4Addrs': [sprint_addr(v.addr) for v in info.v4Addrs], 'v6Addrs': [sprint_addr(v.addr) for v in info.v6LinkLocalAddrs], }) assert (isinstance(value, kv_store_types.Value)) intf_db = deserialize_thrift_object(value.value, lsdb_types.InterfaceDatabase) return bunch.Bunch( **{ 'thisNodeName': intf_db.thisNodeName, 'interfaces': {k: _parse_intf_info(v) for k, v in intf_db.interfaces.items()}, })
def load_config(config_file_path, selected_show): """Load the configuration file and validate format.""" with open(config_file_path, 'rt', encoding='utf8') as fh: from_config_file = yaml.load(fh) if not isinstance(from_config_file, dict): raise ValueError("Bad general config format, must be a dict/map.") base_keys = { 'name', 'description', 'station', 'cron', 'timezone', 'duration', 'image_url' } config_data = [] for show_id, show_data in from_config_file.items(): if not show_id.isalnum(): raise ValueError( "Bad format for show id {!r} (must be alphanumerical)".format( show_id)) if selected_show is not None and selected_show != show_id: logger.warning("Ignoring config because not selected show: %r", show_id) continue missing = base_keys - set(show_data) if missing: raise ValueError("Missing keys {} for show id {}".format( missing, show_id)) config_data.append(bunch.Bunch(show_data, id=show_id)) return config_data
def setUp(self): super(ExPlanDownloableWrongFormat, self).setUp() self.execution_plan = bunch.Bunch( ID='ID', FormatVersion='0.0.0' ) self.addCleanup(delattr, self, 'execution_plan')
def new_node(self, name=None, tokens=list()): return bunch.Bunch( name=name, tokens=tokens, attrs=list(), children=list(), )
def get_eigen(self): mean_vector = self.vector_matrix.mean(axis=1) for ii in range(self.vector_matrix.shape[1]): self.vector_matrix[:, ii] -= mean_vector shape = self.vector_matrix.shape # if there is less number of training images. Usually go for 'else' branch. if (shape[0] < shape[1]): _, lamb, u = np.linalg.svd( np.dot(self.vector_matrix, self.vector_matrix.T)) u = u.T else: _, lamb, v = np.linalg.svd( np.dot(self.vector_matrix.T, self.vector_matrix)) v = v.T u = np.dot(self.vector_matrix, v) # Normalizing u to ||u||=1 norm = np.linalg.norm(u, axis=0) u = u / norm standard_deviation = lamb**2 / float(len(lamb)) variance_proportion = standard_deviation / np.sum(standard_deviation) eigen = bunch.Bunch() eigen.lamb = lamb eigen.u = u eigen.variance_proportion = variance_proportion eigen.mean_vector = mean_vector self.eigen = eigen # The top K eigen value that represent 'most' of the variance in the training data self.K = self.get_number_of_components_to_preserve_variance( self.variance_pct) print("Get_number_of_components_to_preserve_variance: var=%.2f, K=%d" % (self.variance_pct, self.K))
def _lm_client(): lm_client_inst = lm_client.LMClient( bunch.Bunch( {"ctx": zmq.Context(), "host": "localhost", "lm_cmd_port": 5000} ) ) self.assertEqual(lm_client_inst.dump_links(), dump_links_cache)
def run(self, function, recipe_attributes=None, input=None): """It runs the puppet executor. :param function: The function :param recipe_attributes: recipe attributes :param input: """ self._valid_module_name() try: self._configure_puppet() self._generate_files(self.module_name, self.module_recipe, recipe_attributes) except Exception as e: result = {'exitCode': 2, 'stdout': None, 'stderr': e.strerror} raise muranoagent.exceptions.CustomException( 0, message='Module %s returned error code %s: %s' % (self.module_name, self.module_recipe, e.strerror), additional_data=result) path = os.path.abspath(self._path + "/files/") command = 'puppet apply --hiera_config=hiera.yaml --modulepath ' \ '{0} manifest.pp'.format(path) result = self._execute_command(command) return bunch.Bunch(result)
def _prefix_mgr_client(): prefix_mgr_client_inst = prefix_mgr_client.PrefixMgrClient( bunch.Bunch({ "ctx": zmq.Context(), "host": "localhost", "prefix_mgr_cmd_port": 5000, })) resp = prefix_mgr_client_inst.add_prefix( ["2620:0:1cff:dead:bef1:ffff:ffff:4/128"], "LOOPBACK") self.assertTrue(resp.success) resp = prefix_mgr_client_inst.view_prefix() prefix_entry4 = lsdb_types.PrefixEntry( prefix=ip_str_to_prefix( "2620:0:1cff:dead:bef1:ffff:ffff:4/128"), type=network_types.PrefixType.LOOPBACK, ) self.assertTrue(resp.success) self.assertTrue(prefix_entry4 in resp.prefixes) resp = prefix_mgr_client_inst.withdraw_prefix( ["2620:0:1cff:dead:bef1:ffff:ffff:4/128"]) self.assertTrue(resp.success) resp = prefix_mgr_client_inst.view_prefix() self.assertTrue(resp.success) self.assertFalse(prefix_entry4 in resp.prefixes) resp = prefix_mgr_client_inst.withdraw_prefix( ["2620:0:1cff:dead:bef1:ffff:ffff:5/128"]) self.assertFalse(resp.success)
def cli(ctx, host, timeout, ports_config_file, color, verbose): """ Command line tools for Open/R. """ # Default config options ctx.obj = bunch.Bunch( { "client_id": platform_types.FibClient.OPENR, "config_store_url": Consts.CONFIG_STORE_URL, "decision_rep_port": Consts.DECISION_REP_PORT, "enable_color": color, "fib_agent_port": Consts.FIB_AGENT_PORT, "fib_rep_port": Consts.FIB_REP_PORT, "health_checker_cmd_port": Consts.HEALTH_CHECKER_CMD_PORT, "host": host, "kv_pub_port": Consts.KVSTORE_PUB_PORT, "kv_rep_port": Consts.KVSTORE_REP_PORT, "lm_cmd_port": Consts.LINK_MONITOR_CMD_PORT, "monitor_pub_port": Consts.MONITOR_PUB_PORT, "monitor_rep_port": Consts.MONITOR_REP_PORT, "prefix_mgr_cmd_port": Consts.PREFIX_MGR_CMD_PORT, "proto_factory": Consts.PROTO_FACTORY, "timeout": timeout, "verbose": verbose, "zmq_ctx": zmq.Context(), } ) # Get override port configs if ports_config_file: with open(ports_config_file, "r") as f: override_ports_config = json.load(f) for key, value in override_ports_config.items(): ctx.obj[key] = value
def handle_request_data(self, request, response, error=False): now = datetime.datetime.now() if error: pass elif response.status < 200 or response.status >= 400: error = True self.q.put(bunch.Bunch({'t': now, 'o': request, 'i': response, 'e': error}))
def test_verify_execution_plan_wrong_format(self): template = bunch.Bunch( ID='ID', FormatVersion='0.0.0', ) self.assertRaises(exc.IncorrectFormat, self.agent._verify_plan, template)
def run_action(name, params={}, log_level=logging.DEBUG, output_length=1000): db = db_factory.get_database() action = actions[name] cmdline = bunch.Bunch(params) with captured_output_combined() as cap_stdout: # Logger and fake stdout to capture the output of the action root = logging.getLogger() ch = logging.StreamHandler(cap_stdout) ch.setLevel(log_level) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") ch.setFormatter(formatter) root.addHandler(ch) try: action.run(cmdline, db) except Exception as e: logging.exception(e) db.session.rollback() output = cap_stdout.getvalue() # There seems to be no way to pass a FAILURE status other than raising # an exception. self.update_status(status="FAILURE") didn't work. raise ActionError(output[-output_length:]) db.session.rollback() output = cap_stdout.getvalue() return output[-output_length:]
def run_conf(d, conf_filename: str): d.healthcheck() d.xpath.when("允许").click() d.xpath.watch_background(2.0) cf = yaml.load(read_file_content(conf_filename), Loader=yaml.SafeLoader) default = { "output_directory": "output", "action_before_delay": 0, "action_after_delay": 0, "skip_cleanup": False, } for k, v in default.items(): cf.setdefault(k, v) cf = bunch.Bunch(cf) print("Author:", cf.author) print("Description:", cf.description) print("Package:", cf.package) logger.debug("action_delay: %.1f / %.1f", cf.action_before_delay, cf.action_after_delay) app = d.session(cf.package) for step in cf.steps: time.sleep(cf.action_before_delay) run_step(cf, app, step) time.sleep(cf.action_after_delay) if not cf.skip_cleanup: app.close()
def interface_db_to_dict(value): """ Convert a thrift::Value representation of InterfaceDatabase to bunch object """ def _parse_intf_info(info): addrs = [] if info.networks is not None: addrs = [ ipnetwork.sprint_addr(v.prefixAddress.addr) for v in info.networks ] else: addrs = [ipnetwork.sprint_addr(v.addr) for v in info.v4Addrs] + [ ipnetwork.sprint_addr(v.addr) for v in info.v6LinkLocalAddrs ] return bunch.Bunch(**{ "isUp": info.isUp, "ifIndex": info.ifIndex, "Addrs": addrs }) assert isinstance(value, kv_store_types.Value) intf_db = deserialize_thrift_object(value.value, lsdb_types.InterfaceDatabase) return bunch.Bunch( **{ "thisNodeName": intf_db.thisNodeName, "interfaces": {k: _parse_intf_info(v) for k, v in intf_db.interfaces.items()}, })
def _kv_store_client(): kv_store_client_inst = kvstore_client.KvStoreClient( bunch.Bunch({ "ctx": zmq.Context(), "host": "localhost", "kv_store_rep_port": 5000, })) publication = kv_store_client_inst.get_keys( ["san jose 1", "san francisco 1", "virginia"]) key_values = publication.keyVals self.assertEqual(key_values, { "san jose 1": value1, "san francisco 1": value5 }) publication = kv_store_client_inst.dump_all_with_filter( "san jose 3") key_values = publication.keyVals self.assertEqual(key_values, {"san jose 3": value3}) publication = kv_store_client_inst.dump_all_with_filter("san jose") key_values = publication.keyVals self.assertEqual(len(key_values), 4) publication = kv_store_client_inst.dump_all_with_filter() key_values = publication.keyVals self.assertEqual(len(key_values), 5) publication = kv_store_client_inst.dump_all_with_filter("virginia") key_values = publication.keyVals self.assertEqual(len(key_values), 0)
def read_config(fp): config = bunch.Bunch() g = yaml.safe_load_all(fp) for new in g: print bunch.bunchify(new) config.update(bunch.bunchify(new)) return config
def _analyse(function): """ Return complexity analysis for function. """ # Raw metrics. raw = radon.raw.analyze(function.da_text) # McCabe cyclomatic complexity. mccabe_list = radon.complexity.cc_visit_ast(function) assert len(mccabe_list) == 1 mccabe = mccabe_list[0] # Halstead maintainability index. halstead = radon.metrics.h_visit_ast(function) # Ratios ratios = bunch.Bunch({ 'lloc_pcl': float(raw.lloc) / (float(raw.comments) + 1.0), 'mccabe_pcl': float(mccabe.complexity) / (float(raw.comments) + 1.0), 'effort_pcl': float(halstead.effort) / (float(raw.comments) + 1.0) }) return (raw, mccabe, halstead, ratios)
def _fib_client(): fib_client_inst = fib_client.FibClient( bunch.Bunch({ "ctx": zmq.Context(), "host": "localhost", "fib_rep_port": 5000 })) self.assertEqual(fib_client_inst.get_route_db(), route_db_cache)
def test_identity_params_v2(self, mock_get_project, mock_api_version): mock_get_project.return_value = bunch.Bunch(id=1234) mock_api_version.return_value = '2' ret = self.cloud._get_identity_params(domain_id='foo', project='bar') self.assertIn('tenant_id', ret) self.assertEqual(ret['tenant_id'], 1234) self.assertNotIn('domain', ret)