def Load(cls, filename): """Loads a configuration file and produces a configuration object. @type filename: string @param filename: Path to configuration file @rtype: L{_QaConfig} """ data = serializer.LoadJson(utils.ReadFile(filename)) # Patch the document using JSON Patch (RFC6902) in file _PATCH_JSON, if # available try: patch = serializer.LoadJson(utils.ReadFile(_PATCH_JSON)) if patch: mod = __import__("jsonpatch", fromlist=[]) data = mod.apply_patch(data, patch) except IOError: pass except ImportError: raise qa_error.Error( "If you want to use the QA JSON patching feature," " you need to install Python modules" " 'jsonpatch' and 'jsonpointer'.") result = cls(dict(map(_ConvertResources, data.items()))) # pylint: disable=E1103 result.Validate() return result
def Load(cls, filename): """Loads a configuration file and produces a configuration object. @type filename: string @param filename: Path to configuration file @rtype: L{_QaConfig} """ data = serializer.LoadJson(utils.ReadFile(filename)) # Patch the document using JSON Patch (RFC6902) in file _PATCH_JSON, if # available try: patches = _QaConfig.LoadPatches() # Try to use the module only if there is a non-empty patch present if any(patches.values()): mod = __import__("jsonpatch", fromlist=[]) _QaConfig.ApplyPatches(data, mod, patches) except IOError: pass except ImportError: raise qa_error.Error( "For the QA JSON patching feature to work, you " "need to install Python modules 'jsonpatch' and " "'jsonpointer'.") result = cls(dict(map(_ConvertResources, data.items()))) # pylint: disable=E1103 result.Validate() return result
def HandleRequest(self, req): """Handles a request. """ ctx = self._GetRequestContext(req) # Deserialize request parameters if req.request_body: # RFC2616, 7.2.1: Any HTTP/1.1 message containing an entity-body SHOULD # include a Content-Type header field defining the media type of that # body. [...] If the media type remains unknown, the recipient SHOULD # treat it as type "application/octet-stream". req_content_type = req.request_headers.get(http.HTTP_CONTENT_TYPE, http.HTTP_APP_OCTET_STREAM) if req_content_type.lower() != http.HTTP_APP_JSON.lower(): raise http.HttpUnsupportedMediaType() try: ctx.body_data = serializer.LoadJson(req.request_body) except Exception: raise http.HttpBadRequest(message="Unable to parse JSON data") else: ctx.body_data = None try: result = ctx.handler_fn() except rpcerr.TimeoutError: raise http.HttpGatewayTimeout() except rpcerr.ProtocolError, err: raise http.HttpBadGateway(str(err))
def ParseRequest(msg): """Parses a request message. """ try: request = serializer.LoadJson(msg) except ValueError as err: raise ProtocolError("Invalid RPC request (parsing error): %s" % err) logging.debug("RPC request: %s", request) if not isinstance(request, dict): logging.error("RPC request not a dict: %r", msg) raise ProtocolError("Invalid RPC request (not a dict)") method = request.get(KEY_METHOD, None) # pylint: disable=E1103 args = request.get(KEY_ARGS, None) # pylint: disable=E1103 version = request.get(KEY_VERSION, None) # pylint: disable=E1103 if method is None or args is None: logging.error("RPC request missing method or arguments: %r", msg) raise ProtocolError(("Invalid RPC request (no method or arguments" " in request): %r") % msg) return (method, args, version)
def testPreProc(self): def _VerifyRequest(req): req.success = True req.resp_status_code = http.HTTP_OK req.resp_body = serializer.DumpJson((True, req.post_data)) resolver = rpc._StaticResolver([ "192.0.2.30", "192.0.2.35", ]) nodes = [ "node30.example.com", "node35.example.com", ] def _PreProc(node, data): self.assertEqual(len(data), 1) return data[0] + node cdef = ("test_call", NotImplemented, None, constants.RPC_TMO_NORMAL, [ ("arg0", None, NotImplemented), ], _PreProc, None, NotImplemented) http_proc = _FakeRequestProcessor(_VerifyRequest) client = rpc._RpcClientBase(resolver, NotImplemented, _req_process_fn=http_proc) for prefix in ["foo", "bar", "baz"]: result = client._Call(cdef, nodes, [prefix]) self.assertEqual(len(result), len(nodes)) for (idx, (node, res)) in enumerate(result.items()): self.assertFalse(res.fail_msg) self.assertEqual(serializer.LoadJson(res.payload), prefix + node)
def _CombineResults(results, requests, procedure): """Combines pre-computed results for offline hosts with actual call results. """ for name, req in requests.items(): if req.success and req.resp_status_code == http.HTTP_OK: host_result = RpcResult(data=serializer.LoadJson( req.resp_body), node=name, call=procedure) else: # TODO: Better error reporting if req.error: msg = req.error else: msg = req.resp_body logging.error("RPC error in %s on node %s: %s", procedure, name, msg) host_result = RpcResult(data=msg, failed=True, node=name, call=procedure) results[name] = host_result return results
def _GetBodyTestResponse(self, test_data, req): self.assertEqual(req.host, "192.0.2.84") self.assertEqual(req.port, 18700) self.assertEqual(req.path, "/upload_file") self.assertEqual(serializer.LoadJson(req.post_data), test_data) req.success = True req.resp_status_code = http.HTTP_OK req.resp_body = serializer.DumpJson((True, None))
def ParseRequest(msg): """Parses a request message. """ try: request = serializer.LoadJson(msg) except ValueError, err: raise ProtocolError("Invalid RPC request (parsing error): %s" % err)
def perspective_run_oob(params): """Runs oob on node. """ output = backend.RunOob(params[0], params[1], params[2], params[3]) if output: result = serializer.LoadJson(output) else: result = None return result
def get_instance_nics(instance, logger): """Query Ganeti to a get the instance's NICs. Get instance's NICs from Ganeti configuration data. If running on master, query Ganeti via Ganeti CLI client. Otherwise, get the nics from Ganeti configuration file. @type instance: string @param instance: the name of the instance @rtype: List of dicts @return: Dictionary containing the instance's NICs. Each dictionary contains the following keys: 'network', 'ip', 'mac', 'mode', 'link' and 'firewall' """ try: client = cli.GetClient() fields = [ "nic.networks", "nic.ips", "nic.macs", "nic.modes", "nic.links", "tags" ] info = client.QueryInstances([instance], fields, use_locking=False) networks, ips, macs, modes, links, tags = info[0] nic_keys = ["network", "ip", "mac", "mode", "link"] nics = zip(networks, ips, macs, modes, links) nics = map(lambda x: dict(zip(nic_keys, x)), nics) except ganeti_errors.OpPrereqError: # Not running on master! Load the conf file raw_data = utils.ReadFile(constants.CLUSTER_CONF_FILE) config = serializer.LoadJson(raw_data) i = config["instances"][instance] nics = [] for nic in i["nics"]: params = nic.pop("nicparams") nic["mode"] = params["mode"] nic["link"] = params["link"] nics.append(nic) tags = i.get("tags", []) # Get firewall from instance Tags # Tags are of the form synnefo:network:N:firewall_mode for tag in tags: t = tag.split(":") if t[0:2] == ["synnefo", "network"]: if len(t) != 4: logger.error("Malformed synefo tag %s", tag) continue try: index = int(t[2]) nics[index]['firewall'] = t[3] except ValueError: logger.error("Malformed synnefo tag %s", tag) except IndexError: logger.error("Found tag %s for non-existent NIC %d", tag, index) return nics
def testFormatRequest(self): for method, args in [("a", []), ("b", [1, 2, 3])]: msg = client.FormatRequest(method, args) msgdata = serializer.LoadJson(msg) self.assertTrue(client.KEY_METHOD in msgdata) self.assertTrue(client.KEY_ARGS in msgdata) self.assertTrue(client.KEY_VERSION not in msgdata) self.assertEqualValues(msgdata, { client.KEY_METHOD: method, client.KEY_ARGS: args, })
def ParseResponse(msg): """Parses a response message. """ # Parse the result try: data = serializer.LoadJson(msg) except KeyboardInterrupt: raise except Exception, err: raise ProtocolError("Error while deserializing response: %s" % str(err))
def BuildFromJsonString(json_string): """Build a QmpMessage from a JSON encoded string. @type json_string: str @param json_string: JSON string representing the message @rtype: L{QmpMessage} @return: a L{QmpMessage} built from json_string """ # Parse the string data = serializer.LoadJson(json_string) return QmpMessage(data)
def testFormatRequestWithVersion(self): for method, args, version in [("fn1", [], 123), ("fn2", [1, 2, 3], 999)]: msg = client.FormatRequest(method, args, version=version) msgdata = serializer.LoadJson(msg) self.assert_(client.KEY_METHOD in msgdata) self.assert_(client.KEY_ARGS in msgdata) self.assert_(client.KEY_VERSION in msgdata) self.assertEqualValues(msgdata, { client.KEY_METHOD: method, client.KEY_ARGS: args, client.KEY_VERSION: version, })
def testFormatResponse(self): for success, result in [(False, "error"), (True, "abc"), (True, { "a": 123, "b": None, })]: msg = client.FormatResponse(success, result) msgdata = serializer.LoadJson(msg) self.assert_(client.KEY_SUCCESS in msgdata) self.assert_(client.KEY_RESULT in msgdata) self.assert_(client.KEY_VERSION not in msgdata) self.assertEqualValues(msgdata, { client.KEY_SUCCESS: success, client.KEY_RESULT: result, })
def testFormatResponseWithVersion(self): for success, result, version in [(False, "error", 123), (True, "abc", 999), (True, { "a": 123, "b": None, }, 2010)]: msg = client.FormatResponse(success, result, version=version) msgdata = serializer.LoadJson(msg) self.assert_(client.KEY_SUCCESS in msgdata) self.assert_(client.KEY_RESULT in msgdata) self.assert_(client.KEY_VERSION in msgdata) self.assertEqualValues(msgdata, { client.KEY_SUCCESS: success, client.KEY_RESULT: result, client.KEY_VERSION: version, })
def _VerifyRequest(req): (uldata, ) = serializer.LoadJson(req.post_data) self.assertEqual(len(uldata), 7) self.assertEqual(uldata[0], tmpfile.name) self.assertEqual(list(uldata[1]), list(rpc._Compress(nodes[0], data))) self.assertEqual(uldata[2], st.st_mode) self.assertEqual(uldata[3], "user%s" % os.getuid()) self.assertEqual(uldata[4], "group%s" % os.getgid()) self.assertTrue(uldata[5] is not None) self.assertEqual(uldata[6], st.st_mtime) req.success = True req.resp_status_code = http.HTTP_OK req.resp_body = serializer.DumpJson((True, None))
def LoadPatch(patch_dict, rel_path): """ Loads a single patch. @type patch_dict: dict of string to dict @param patch_dict: A dictionary storing patches by relative path. @type rel_path: string @param rel_path: The relative path to the patch, might or might not exist. """ try: full_path = os.path.join(_QA_BASE_PATH, rel_path) patch = serializer.LoadJson(utils.ReadFile(full_path)) patch_dict[rel_path] = patch except IOError: pass
def _Test(self, method, path, headers, reqbody, user_fn=NotImplemented, luxi_client=NotImplemented, reqauth=False): rm = rapi.testutils._RapiMock(BasicAuthenticator(user_fn), luxi_client, reqauth=reqauth) (resp_code, resp_headers, resp_body) = \ rm.FetchResponse(path, method, http.ParseHeaders(StringIO(headers)), reqbody) self.assertTrue(resp_headers[http.HTTP_DATE]) self.assertEqual(resp_headers[http.HTTP_CONNECTION], "close") self.assertEqual(resp_headers[http.HTTP_CONTENT_TYPE], http.HTTP_APP_JSON) self.assertEqual(resp_headers[http.HTTP_SERVER], http.HTTP_GANETI_VERSION) return (resp_code, resp_headers, serializer.LoadJson(resp_body))
def _ParseRbdShowmappedJson(output, volume_name): """Parse the json output of `rbd showmapped'. This method parses the json output of `rbd showmapped' and returns the rbd block device path (e.g. /dev/rbd0) that matches the given rbd volume. @type output: string @param output: the json output of `rbd showmapped' @type volume_name: string @param volume_name: the name of the volume whose device we search for @rtype: string or None @return: block device path if the volume is mapped, else None """ try: devices = serializer.LoadJson(output) except ValueError, err: base.ThrowError("Unable to parse JSON data: %s" % err)
def _ParseRbdShowmappedJson(output, volume_pool, volume_name): """Parse the json output of `rbd showmapped'. This method parses the json output of `rbd showmapped' and returns the rbd block device path (e.g. /dev/rbd0) that matches the given rbd volume. @type output: string @param output: the json output of `rbd showmapped' @type volume_pool: string @param volume_pool: name of the volume whose device we search for @type volume_name: string @param volume_name: name of the pool in which we search @rtype: string or None @return: block device path if the volume is mapped, else None """ try: devices = serializer.LoadJson(output) except ValueError as err: base.ThrowError("Unable to parse JSON data: %s" % err) # since ceph mimic the json output changed from dict to list if isinstance(devices, dict): devices = list(devices.values()) rbd_dev = None for d in devices: try: name = d["name"] except KeyError: base.ThrowError("'name' key missing from json object %s", devices) try: pool = d["pool"] except KeyError: base.ThrowError("'pool' key missing from json object %s", devices) if name == volume_name and pool == volume_pool: if rbd_dev is not None: base.ThrowError("rbd volume %s is mapped more than once", volume_name) rbd_dev = d["device"] return rbd_dev
def ParseResponse(msg): """Parses a response message. """ # Parse the result try: data = serializer.LoadJson(msg) except KeyboardInterrupt: raise except Exception as err: raise ProtocolError("Error while deserializing response: %s" % str(err)) # Validate response if not (isinstance(data, dict) and KEY_SUCCESS in data and KEY_RESULT in data): raise ProtocolError("Invalid response from server: %r" % data) return (data[KEY_SUCCESS], data[KEY_RESULT], data.get(KEY_VERSION, None)) # pylint: disable=E1103
def testArgumentEncoder(self): (AT1, AT2) = range(1, 3) resolver = rpc._StaticResolver([ "192.0.2.5", "192.0.2.6", ]) nodes = [ "node5.example.com", "node6.example.com", ] encoders = { AT1: lambda _, value: hex(value), AT2: lambda _, value: hash(value), } cdef = ("test_call", NotImplemented, None, constants.RPC_TMO_NORMAL, [ ("arg0", None, NotImplemented), ("arg1", AT1, NotImplemented), ("arg1", AT2, NotImplemented), ], None, None, NotImplemented) def _VerifyRequest(req): req.success = True req.resp_status_code = http.HTTP_OK req.resp_body = serializer.DumpJson((True, req.post_data)) http_proc = _FakeRequestProcessor(_VerifyRequest) for num in [0, 3796, 9032119]: client = rpc._RpcClientBase(resolver, encoders.get, _req_process_fn=http_proc) result = client._Call(cdef, nodes, ["foo", num, "Hello%s" % num]) self.assertEqual(len(result), len(nodes)) for res in result.values(): self.assertFalse(res.fail_msg) self.assertEqual( serializer.LoadJson(res.payload), ["foo", hex(num), hash("Hello%s" % num)])
def test(self): (c2pr, c2pw) = os.pipe() # Start child process child = os.fork() if child == 0: try: data = serializer.DumpJson(_GetSocketCredentials( self.sockpath)) os.write(c2pw, data) os.close(c2pw) os._exit(0) finally: os._exit(1) os.close(c2pw) # Wait for one connection (conn, _) = self.listener.accept() conn.recv(1) conn.close() # Wait for result result = os.read(c2pr, 4096) os.close(c2pr) # Check child's exit code (_, status) = os.waitpid(child, 0) self.assertFalse(os.WIFSIGNALED(status)) self.assertEqual(os.WEXITSTATUS(status), 0) # Check result (pid, uid, gid) = serializer.LoadJson(result) self.assertEqual(pid, os.getpid()) self.assertEqual(uid, os.getuid()) self.assertEqual(gid, os.getgid())
def HandleRequest(self, req): """Handle a request. """ if req.request_method.upper() != http.HTTP_POST: raise http.HttpBadRequest("Only the POST method is supported") path = req.request_path if path.startswith("/"): path = path[1:] method = getattr(self, "perspective_%s" % path, None) if method is None: raise http.HttpNotFound() try: result = (True, method(serializer.LoadJson(req.request_body))) except backend.RPCFail as err: # our custom failure exception; str(err) works fine if the # exception was constructed with a single argument, and in # this case, err.message == err.args[0] == str(err) result = (False, str(err)) except errors.QuitGanetiException as err: # Tell parent to quit logging.info("Shutting down the node daemon, arguments: %s", str(err.args)) os.kill(self.noded_pid, signal.SIGTERM) # And return the error's arguments, which must be already in # correct tuple format result = err.args except Exception as err: # pylint: disable=W0703 logging.exception("Error in RPC call") result = (False, "Error while executing backend function: %s" % str(err)) return serializer.DumpJson(result)
def HandleRequest(self, req): """Handle a request. """ if req.request_method.upper() != http.HTTP_POST: raise http.HttpBadRequest("Only the POST method is supported") path = req.request_path if path.startswith("/"): path = path[1:] method = getattr(self, "perspective_%s" % path, None) if method is None: raise http.HttpNotFound() try: result = (True, method(serializer.LoadJson(req.request_body))) except backend.RPCFail, err: # our custom failure exception; str(err) works fine if the # exception was constructed with a single argument, and in # this case, err.message == err.args[0] == str(err) result = (False, str(err))
def main(): debug = int(os.environ["GNT_DEBUG"]) logname = pathutils.GetLogFilename("jobs") utils.SetupLogging(logname, "job-startup", debug=debug) (job_id, llock, secret_params_serialized) = _SetupJob() secret_params = "" if secret_params_serialized: secret_params_json = serializer.LoadJson(secret_params_serialized) secret_params = RestorePrivateValueWrapping(secret_params_json) utils.SetupLogging(logname, "job-%s" % (job_id,), debug=debug) try: logging.debug("Preparing the context and the configuration") context = masterd.GanetiContext(llock) logging.debug("Registering signal handlers") cancel = [False] prio_change = [False] def _TermHandler(signum, _frame): logging.info("Killed by signal %d", signum) cancel[0] = True signal.signal(signal.SIGTERM, _TermHandler) def _HupHandler(signum, _frame): logging.debug("Received signal %d, old flag was %s, will set to True", signum, mcpu.sighupReceived) mcpu.sighupReceived[0] = True signal.signal(signal.SIGHUP, _HupHandler) def _User1Handler(signum, _frame): logging.info("Received signal %d, indicating priority change", signum) prio_change[0] = True signal.signal(signal.SIGUSR1, _User1Handler) job = context.jobqueue.SafeLoadJobFromDisk(job_id, False) job.SetPid(os.getpid()) if secret_params: for i in range(0, len(secret_params)): if hasattr(job.ops[i].input, "osparams_secret"): job.ops[i].input.osparams_secret = secret_params[i] execfun = mcpu.Processor(context, job_id, job_id).ExecOpCode proc = _JobProcessor(context.jobqueue, execfun, job) result = _JobProcessor.DEFER while result != _JobProcessor.FINISHED: result = proc() if result == _JobProcessor.WAITDEP and not cancel[0]: # Normally, the scheduler should avoid starting a job where the # dependencies are not yet finalised. So warn, but wait an continue. logging.warning("Got started despite a dependency not yet finished") time.sleep(5) if cancel[0]: logging.debug("Got cancel request, cancelling job %d", job_id) r = context.jobqueue.CancelJob(job_id) job = context.jobqueue.SafeLoadJobFromDisk(job_id, False) proc = _JobProcessor(context.jobqueue, execfun, job) logging.debug("CancelJob result for job %d: %s", job_id, r) cancel[0] = False if prio_change[0]: logging.debug("Received priority-change request") try: fname = os.path.join(pathutils.LUXID_MESSAGE_DIR, "%d.prio" % job_id) new_prio = int(utils.ReadFile(fname)) utils.RemoveFile(fname) logging.debug("Changing priority of job %d to %d", job_id, new_prio) r = context.jobqueue.ChangeJobPriority(job_id, new_prio) job = context.jobqueue.SafeLoadJobFromDisk(job_id, False) proc = _JobProcessor(context.jobqueue, execfun, job) logging.debug("Result of changing priority of %d to %d: %s", job_id, new_prio, r) except Exception: # pylint: disable=W0703 logging.warning("Informed of priority change, but could not" " read new priority") prio_change[0] = False except Exception: # pylint: disable=W0703 logging.exception("Exception when trying to run job %d", job_id) finally: logging.debug("Job %d finalized", job_id) logging.debug("Removing livelock file %s", llock.GetPath()) os.remove(llock.GetPath()) sys.exit(0)
def _LoadTestDataConfig(self, filename): return serializer.LoadJson(testutils.ReadTestData(filename))
def _LoadConfig(self): return serializer.LoadJson(utils.ReadFile(self.config_path))
def Run(self): """Main program. """ self._ComposePaths() self.SetupLogging() # Option checking if self.args: raise Error("No arguments expected") if self.opts.downgrade and not self.opts.no_verify: self.opts.no_verify = True # Check master name if not (self.CheckHostname(self.opts.SSCONF_MASTER_NODE) or self.opts.ignore_hostname): logging.error("Aborting due to hostname mismatch") sys.exit(constants.EXIT_FAILURE) self._AskUser() # Check whether it's a Ganeti configuration directory if not (os.path.isfile(self.opts.CONFIG_DATA_PATH) and os.path.isfile(self.opts.SERVER_PEM_PATH) and os.path.isfile(self.opts.KNOWN_HOSTS_PATH)): raise Error(("%s does not seem to be a Ganeti configuration" " directory") % self.opts.data_dir) if not os.path.isdir(self.opts.conf_dir): raise Error("Not a directory: %s" % self.opts.conf_dir) self.config_data = serializer.LoadJson( utils.ReadFile(self.opts.CONFIG_DATA_PATH)) try: config_version = self.config_data["version"] except KeyError: raise Error("Unable to determine configuration version") (config_major, config_minor, config_revision) = \ version.SplitVersion(config_version) logging.info("Found configuration version %s (%d.%d.%d)", config_version, config_major, config_minor, config_revision) if "config_version" in self.config_data["cluster"]: raise Error("Inconsistent configuration: found config_version in" " configuration file") # Downgrade to the previous stable version if self.opts.downgrade: self._Downgrade(config_major, config_minor, config_version, config_revision) # Upgrade from 2.0-2.16 to 3.0 # TODO: handle upgrades from 2.17beta elif config_major == 2 and config_minor in range(0, LAST_V2_MINOR + 1): if config_revision != 0: logging.warning("Config revision is %s, not 0", config_revision) if not self.UpgradeAll(): raise Error("Upgrade failed:\n%s" % '\n'.join(self.errors)) elif config_major == TARGET_MAJOR and config_minor == TARGET_MINOR: logging.info("No changes necessary") else: raise Error( "Configuration version %d.%d.%d not supported by this tool" % (config_major, config_minor, config_revision)) try: logging.info("Writing configuration file to %s", self.opts.CONFIG_DATA_PATH) utils.WriteFile(file_name=self.opts.CONFIG_DATA_PATH, data=serializer.DumpJson(self.config_data), mode=0600, dry_run=self.opts.dry_run, backup=True) if not self.opts.dry_run: # This creates the cluster certificate if it does not exist yet. # In this case, we do not automatically create a client certificate # as well, because if the cluster certificate did not exist before, # no client certificate will exist on any node yet. In this case # all client certificate should be renewed by 'gnt-cluster # renew-crypto --new-node-certificates'. This will be enforced # by a nagging warning in 'gnt-cluster verify'. bootstrap.GenerateClusterCrypto( False, False, False, False, False, False, None, nodecert_file=self.opts.SERVER_PEM_PATH, rapicert_file=self.opts.RAPI_CERT_FILE, spicecert_file=self.opts.SPICE_CERT_FILE, spicecacert_file=self.opts.SPICE_CACERT_FILE, hmackey_file=self.opts.CONFD_HMAC_KEY, cds_file=self.opts.CDS_FILE) except Exception: logging.critical( "Writing configuration failed. It is probably in an" " inconsistent state and needs manual intervention.") raise self._TestLoadingConfigFile()