def _prepare_request(event, payload, _secret=None, headers=None): if headers is None: headers = dict() request = FakeRequest() request.uri = b"/change_hook/github" request.method = b"GET" request.received_headers = { _HEADER_EVENT: event } assert isinstance(payload, (bytes, list)), \ "payload can only be bytes or list, not {}".format(type(payload)) if isinstance(payload, bytes): request.content = BytesIO(payload) request.received_headers[_HEADER_CT] = _CT_JSON if _secret is not None: signature = hmac.new(unicode2bytes(_secret), msg=unicode2bytes(payload), digestmod=sha1) request.received_headers[_HEADER_SIGNATURE] = \ 'sha1={}'.format(signature.hexdigest()) else: request.args[b'payload'] = payload request.received_headers[_HEADER_CT] = _CT_ENCODED request.received_headers.update(headers) # print request.received_headers return request
def __init__(self, port, username, password, ssh_hostkey_dir): """ @type port: string or int @param port: what port should the Manhole listen on? This is a strports specification string, like 'tcp:12345' or 'tcp:12345:interface=127.0.0.1'. Bare integers are treated as a simple tcp port. @param username: @param password: username= and password= form a pair of strings to use when authenticating the remote user. @type ssh_hostkey_dir: str @param ssh_hostkey_dir: directory which contains ssh host keys for this server """ if not manhole_ssh: config.error("cryptography required for ssh mahole.") self.username = username self.password = password self.ssh_hostkey_dir = ssh_hostkey_dir c = checkers.InMemoryUsernamePasswordDatabaseDontUse() c.addUser(unicode2bytes(username), unicode2bytes(password)) super().__init__(port, c, ssh_hostkey_dir)
def _prepare_request(event, payload, _secret=None, headers=None): if headers is None: headers = dict() request = FakeRequest() request.uri = "/change_hook/github" request.method = "GET" request.received_headers = { _HEADER_EVENT: event } if isinstance(payload, str): request.content = NativeStringIO(payload) request.received_headers[_HEADER_CT] = _CT_JSON if _secret is not None: signature = hmac.new(unicode2bytes(_secret), msg=unicode2bytes(payload), digestmod=sha1) request.received_headers[_HEADER_SIGNATURE] = \ 'sha1=%s' % (signature.hexdigest(),) else: request.args['payload'] = payload request.received_headers[_HEADER_CT] = _CT_ENCODED request.received_headers.update(headers) # print request.received_headers return request
def encodeRaw(self, data, request): request.setHeader(b"content-type", unicode2bytes(data['mime-type']) + b'; charset=utf-8') request.setHeader(b"content-disposition", b'attachment; filename=' + unicode2bytes(data['filename'])) request.write(unicode2bytes(data['raw'])) return
def getChanges(self, req): change_svc = req.site.master.change_svc poll_all = b"poller" not in req.args allow_all = True allowed = [] if isinstance(self.options, dict) and b"allowed" in self.options: allow_all = False allowed = self.options[b"allowed"] pollers = [] for source in change_svc: if not isinstance(source, PollingChangeSource): continue if not hasattr(source, "name"): continue if (not poll_all and unicode2bytes(source.name) not in req.args[b'poller']): continue if not allow_all and unicode2bytes(source.name) not in allowed: continue pollers.append(source) if not poll_all: missing = (set(req.args[b'poller']) - set(unicode2bytes(s.name) for s in pollers)) if missing: raise ValueError("Could not find pollers: {}".format( bytes2unicode(b",".join(missing)))) for p in pollers: p.force() return [], None
def reconfigResource(self, new_config): self.avatarMethods = new_config.www.get('avatar_methods', []) self.defaultAvatarFullUrl = urljoin( unicode2bytes(new_config.buildbotURL), unicode2bytes(self.defaultAvatarUrl)) self.cache = {} # ensure the avatarMethods is a iterable if isinstance(self.avatarMethods, AvatarBase): self.avatarMethods = (self.avatarMethods, )
def test_push_with_skip_message(self): gitJsonPayloadCiSkips = [ unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[bb skip]'}), unicode2bytes(gitJsonPayloadCiSkipTemplate % {'skip': '[ bb skip ]'}), ] for payload in gitJsonPayloadCiSkips: self._check_push_with_skip_message(payload)
def __init__(self, header=None, headerRegex=None, **kwargs): super().__init__(**kwargs) if self.userInfoProvider is None: self.userInfoProvider = UserInfoProviderBase() if header is not None: self.header = unicode2bytes(header) if headerRegex is not None: self.headerRegex = re.compile(unicode2bytes(headerRegex))
def test_versions(self): master = self.make_master(url='h:/a/b/') rsrc = rest.RestRootResource(master) versions = [unicode2bytes('v{}'.format(v)) for v in range(2, self.maxVersion + 1)] versions = [unicode2bytes(v) for v in versions] versions.append(b'latest') self.assertEqual(sorted(rsrc.listNames()), sorted(versions))
def deliverJob(self): # returns a Deferred that fires when the job has been delivered if self.connect == "ssh": tryhost = self.getopt("host") tryport = self.getopt("port") tryuser = self.getopt("username") trydir = self.getopt("jobdir") buildbotbin = self.getopt("buildbotbin") ssh_command = self.getopt("ssh") if not ssh_command: ssh_commands = which("ssh") if not ssh_commands: raise RuntimeError("couldn't find ssh executable, make sure " "it is available in the PATH") argv = [ssh_commands[0]] else: # Split the string on whitespace to allow passing options in # ssh command too, but preserving whitespace inside quotes to # allow using paths with spaces in them which is common under # Windows. And because Windows uses backslashes in paths, we # can't just use shlex.split there as it would interpret them # specially, so do it by hand. if runtime.platformType == 'win32': # Note that regex here matches the arguments, not the # separators, as it's simpler to do it like this. And then we # just need to get all of them together using the slice and # also remove the quotes from those that were quoted. argv = [string.strip(a, '"') for a in re.split(r'''([^" ]+|"[^"]+")''', ssh_command)[1::2]] else: # Do use standard tokenization logic under POSIX. argv = shlex.split(ssh_command) if tryuser: argv += ["-l", tryuser] if tryport: argv += ["-p", tryport] argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir] pp = RemoteTryPP(self.jobfile) reactor.spawnProcess(pp, argv[0], argv, os.environ) d = pp.d return d if self.connect == "pb": user = self.getopt("username") passwd = self.getopt("passwd") master = self.getopt("master") tryhost, tryport = master.split(":") tryport = int(tryport) f = pb.PBClientFactory() d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd))) reactor.connectTCP(tryhost, tryport, f) d.addCallback(self._deliverJob_pb) return d raise RuntimeError("unknown connecttype '{}', " "should be 'ssh' or 'pb'".format(self.connect))
def __init__(self, users, **kwargs): if isinstance(users, dict): users = {user: unicode2bytes(pw) for user, pw in users.items()} elif isinstance(users, list): users = [(user, unicode2bytes(pw)) for user, pw in users] super().__init__([DigestCredentialFactory(b"md5", b"buildbot"), BasicCredentialFactory(b"buildbot")], [InMemoryUsernamePasswordDatabaseDontUse(**dict(users))], **kwargs)
def getName(cls, *args, **kwargs): _hash = hashlib.sha1() for arg in args: arg = unicode2bytes(str(arg)) _hash.update(arg) for k, v in sorted(kwargs.items()): k = unicode2bytes(str(k)) v = unicode2bytes(str(v)) _hash.update(k) _hash.update(v) return cls.__name__ + "_" + _hash.hexdigest()
def test_listen_add_then_remove(self): self.render_resource(self.sse, b'/listen') request = self.request uuid = self.readUUID(request) self.render_resource(self.sse, b'/add/' + unicode2bytes(uuid) + b"/changes/*/*") self.assertReceivesChangeNewMessage(request) self.assertEqual(request.finished, False) self.render_resource(self.sse, b'/remove/' + unicode2bytes(uuid) + b"/changes/*/*") self.assertRaises( AssertionError, self.assertReceivesChangeNewMessage, request)
def _get_payload(self, request): content = request.content.read() content = bytes2unicode(content) signature = request.getHeader(_HEADER_SIGNATURE) signature = bytes2unicode(signature) if not signature and self._strict: raise ValueError('Request has no required signature') if self._secret and signature: try: hash_type, hexdigest = signature.split('=') except ValueError: raise ValueError( 'Wrong signature format: {}'.format(signature)) if hash_type != 'sha1': raise ValueError('Unknown hash type: {}'.format(hash_type)) mac = hmac.new(unicode2bytes(self._secret), msg=unicode2bytes(content), digestmod=sha1) def _cmp(a, b): try: # try the more secure compare_digest() first from hmac import compare_digest return compare_digest(a, b) except ImportError: # pragma: no cover # and fallback to the insecure simple comparison otherwise return a == b if not _cmp(bytes2unicode(mac.hexdigest()), hexdigest): raise ValueError('Hash mismatch') content_type = request.getHeader(b'Content-Type') if content_type == b'application/json': payload = json.loads(content) elif content_type == b'application/x-www-form-urlencoded': payload = json.loads(bytes2unicode(request.args[b'payload'][0])) else: raise ValueError('Unknown content type: {}'.format(content_type)) log.msg("Payload: {}".format(payload), logLevel=logging.DEBUG) return payload
def test_mode_incremental_p4base_with_p4extra_views(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4extra_views=[('-//depot/trunk/test', 'test'), ('-//depot/trunk/doc', 'doc'), ('-//depot/trunk/white space', 'white space')], p4user='******', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... \t-//depot/trunk/test/... //p4_client1/test/... \t-//depot/trunk/doc/... //p4_client1/doc/... \t"-//depot/trunk/white space/..." "//p4_client1/white space/..." ''' % root_dir) client_spec = unicode2bytes(client_spec) self._incremental(client_stdin=client_spec)
def test_mode_incremental_parent_workdir(self): self.setupStep(P4(p4port='localhost:12000', mode='incremental', p4base='//depot', p4branch='trunk', p4user='******', p4client='p4_client1', p4passwd='pass', workdir='../another_wkdir')) root_dir = '/home/user/another_wkdir' if _is_windows: root_dir = r'C:\Users\username\another_wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) client_spec = unicode2bytes(client_spec) self._incremental(client_stdin=client_spec, workdir='../another_wkdir')
def test_mode_full_p4base_not_obfuscated(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='******', p4client='p4_client1', p4passwd='pass'), worker_version={'*': '2.15'}) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/...\n''' % root_dir) client_stdin = unicode2bytes(client_stdin) self._full(client_stdin=client_stdin, obfuscated_pass=False)
def getAndSubmitChanges(self, request): changes, src = yield self.getChanges(request) if not changes: request.write(b"no change found") else: yield self.submitChanges(changes, request, src) request.write(unicode2bytes("{} change found".format(len(changes))))
def test_mode_full_renderable_p4branch(self): # Note that the config check skips checking p4base if it's a renderable self.setupStep( P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch=ConstantRenderable('render_branch'), p4user='******', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/render_branch/... //p4_client1/...\n''' % root_dir) client_stdin = unicode2bytes(client_stdin) self._full(client_stdin=client_stdin)
def test_mode_full_renderable_p4viewspec(self): self.setupStep( P4(p4port='localhost:12000', mode='full', p4viewspec=[(ConstantRenderable('//depot/render_trunk/'), '')], p4user='******', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_stdin = textwrap.dedent('''\ Client: p4_client1 Owner: different_user Description: \tCreated by different_user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/render_trunk/... //p4_client1/...\n''' % root_dir) client_stdin = unicode2bytes(client_stdin) self._full(client_stdin=client_stdin, p4user=b'different_user')
def test_mode_full_p4viewspec_suffix(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4viewspec_suffix=None, p4viewspec=[('//depot/trunk/foo.xml', 'bar.xml'), ('//depot/trunk/white space/...', 'white space/...'), ('-//depot/trunk/white space/excluded/...', 'white space/excluded/...')], p4user='******', p4client='p4_client1', p4passwd='pass')) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/foo.xml //p4_client1/bar.xml \t"//depot/trunk/white space/..." "//p4_client1/white space/..." \t"-//depot/trunk/white space/excluded/..." "//p4_client1/white space/excluded/..." ''' % root_dir) client_spec = unicode2bytes(client_spec) self._full(client_stdin=client_spec)
def test_mode_full_p4extra_args(self): self.setupStep(P4(p4port='localhost:12000', mode='full', p4base='//depot', p4branch='trunk', p4user='******', p4client='p4_client1', p4passwd='pass', p4extra_args=['-Zproxyload'])) root_dir = '/home/user/workspace/wkdir' if _is_windows: root_dir = r'C:\Users\username\Workspace\wkdir' client_spec = textwrap.dedent('''\ Client: p4_client1 Owner: user Description: \tCreated by user Root:\t%s Options:\tallwrite rmdir LineEnd:\tlocal View: \t//depot/trunk/... //p4_client1/... ''' % root_dir) client_spec = unicode2bytes(client_spec) self._full(client_stdin=client_spec, extra_args=[b'-Zproxyload'])
def writeError(msg, errcode=400): msg = bytes2NativeString(msg) if self.debug: log.msg("HTTP error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') if request.method == b'POST': # jsonRPC callers want the error message in error.message data = json.dumps(dict(error=dict(message=msg))) data = unicode2bytes(data) request.write(data) else: data = json.dumps(dict(error=msg)) data = unicode2bytes(data) request.write(data) request.finish()
def render_GET(self, request): def decode(x): if isinstance(x, bytes): return bytes2unicode(x) elif isinstance(x, (list, tuple)): return [bytes2unicode(y) for y in x] elif isinstance(x, dict): newArgs = {} for a, b in x.items(): newArgs[decode(a)] = decode(b) return newArgs return x args = decode(request.args) content_type = request.getHeader(b'content-type') if content_type == b"application/json": jsonBytes = request.content.read() jsonStr = bytes2unicode(jsonBytes) args['json_received'] = json.loads(jsonStr) data = json.dumps(args) data = unicode2bytes(data) request.setHeader(b'content-type', b'application/json') request.setHeader(b'content-length', intToBytes(len(data))) if request.method == b'HEAD': return b'' return data
def check_passwd(guess, passwd): """ Tests to see if the guess, after salting and hashing, matches the passwd from the database. @param guess: incoming password trying to be used for authentication @param passwd: already encrypted password from the database @returns: boolean """ m = sha1() salt = passwd[:salt_len * 2] # salt_len * 2 due to encode('hex_codec') m.update(unicode2bytes(guess) + unicode2bytes(salt)) crypted_guess = bytes2NativeString(salt) + m.hexdigest() return (crypted_guess == bytes2NativeString(passwd))
def onMessage(self, event, data): request = self.request key = [bytes2NativeString(e) for e in event] msg = dict(key=key, message=data) request.write(b"event: " + b"event" + b"\n") request.write( b"data: " + unicode2bytes(json.dumps(msg, default=toJson)) + b"\n") request.write(b"\n")
def render(self, request): request.setHeader(b"content-type", JSON_ENCODED) min_vers = self.master.config.www.get('rest_minimum_version', 0) api_versions = dict(('v%d' % v, '%sapi/v%d' % (self.base_url, v)) for v in self.version_classes if v > min_vers) data = json.dumps(dict(api_versions=api_versions)) return unicode2bytes(data)
def test_post(self): self._http.post('/bar', json={'foo': 'bar'}) jsonStr = json.dumps(dict(foo='bar')) jsonBytes = unicode2bytes(jsonStr) self._http._session.request.assert_called_once_with('post', 'http://foo/bar', background_callback=mock.ANY, data=jsonBytes, headers={'Content-Type': 'application/json'})
def assertEncodedIn(self, text, mail): # python 2.6 default transfer in base64 for utf-8 if "base64" not in mail: self.assertIn(text, mail) else: # b64encode and remove '=' padding (hence [:-1]) encodedBytes = base64.b64encode(unicode2bytes(text)).rstrip(b"=") encodedText = bytes2unicode(encodedBytes) self.assertIn(encodedText, mail)
def test_versions_limited(self): master = self.make_master(url='h:/a/b/') master.config.www['rest_minimum_version'] = 2 rsrc = rest.RestRootResource(master) versions = [unicode2bytes('v{}'.format(v)) for v in range(2, self.maxVersion + 1)] versions.append(b'latest') self.assertEqual(sorted(rsrc.listNames()), sorted(versions))
def __init__(self, s): s = unicode2bytes(s) super().__init__(BytesIO(s))
def assertResult(self, result): content = json.dumps({"data": result, "errors": None}) self.assertRequest(content=unicode2bytes(content), responseCode=200)
def setupSite(self, new_config): self.reconfigurableResources = [] # we're going to need at least the base plugin (buildbot-www) if 'base' not in self.apps: raise RuntimeError("could not find buildbot-www; is it installed?") root = self.apps.get('base').resource for key, plugin in iteritems(new_config.www.get('plugins', {})): log.msg("initializing www plugin %r" % (key, )) if key not in self.apps: raise RuntimeError( "could not find plugin %s; is it installed?" % (key, )) self.apps.get(key).setMaster(self.master) root.putChild(unicode2bytes(key), self.apps.get(key).resource) known_plugins = set(new_config.www.get('plugins', {})) | set(['base']) for plugin_name in set(self.apps.names) - known_plugins: log.msg("NOTE: www plugin %r is installed but not " "configured" % (plugin_name, )) # / root.putChild( b'', wwwconfig.IndexResource(self.master, self.apps.get('base').static_dir)) # /auth root.putChild(b'auth', auth.AuthRootResource(self.master)) # /avatar root.putChild(b'avatar', avatar.AvatarResource(self.master)) # /api root.putChild(b'api', rest.RestRootResource(self.master)) # /ws root.putChild(b'ws', ws.WsResource(self.master)) # /sse root.putChild(b'sse', sse.EventResource(self.master)) # /change_hook resource_obj = change_hook.ChangeHookResource(master=self.master) # FIXME: this does not work with reconfig change_hook_auth = new_config.www.get('change_hook_auth') if change_hook_auth is not None: resource_obj = self.setupProtectedResource(resource_obj, change_hook_auth) root.putChild(b"change_hook", resource_obj) self.root = root rotateLength = new_config.www.get( 'logRotateLength') or self.master.log_rotation.rotateLength maxRotatedFiles = new_config.www.get( 'maxRotatedFiles') or self.master.log_rotation.maxRotatedFiles httplog = None if new_config.www['logfileName']: httplog = os.path.abspath( os.path.join(self.master.basedir, new_config.www['logfileName'])) self.site = BuildbotSite(root, logPath=httplog, rotateLength=rotateLength, maxRotatedFiles=maxRotatedFiles) self.site.sessionFactory = None # Make sure site.master is set. It is required for poller change_hook self.site.master = self.master # convert this to a tuple so it can't be appended anymore (in # case some dynamically created resources try to get reconfigs) self.reconfigurableResources = tuple(self.reconfigurableResources)
def test_push_no_ci_skip(self): # user overrode the skip pattern already, # so the default patterns should not work. payload = gitJsonPayloadCiSkipTemplate % {'skip': '[ci skip]'} payload = unicode2bytes(payload) self._check_push_no_ci_skip(payload)
def failHttpError(f): f.trap(Error) e = f.value message = unicode2bytes(e.message) writeError(message, errcode=int(e.status))
def render(self, request): command = b"listen" path = request.postpath if path and path[-1] == b'': path = path[:-1] if path and path[0] in (b"listen", b"add", b"remove"): command = path[0] path = path[1:] if command == b"listen": cid = unicode2bytes(str(uuid.uuid4())) consumer = Consumer(request) elif command in (b"add", b"remove"): if path: cid = path[0] path = path[1:] if cid not in self.consumers: return self.finish(request, 400, b"unknown uuid") consumer = self.consumers[cid] else: return self.finish(request, 400, b"need uuid") pathref = b"/".join(path) path = self.decodePath(path) if command == b"add" or (command == b"listen" and path): options = request.args for k in options: if len(options[k]) == 1: options[k] = options[k][1] try: d = self.master.mq.startConsuming( consumer.onMessage, tuple([bytes2unicode(p) for p in path])) @d.addCallback def register(qref): consumer.registerQref(pathref, qref) d.addErrback(log.err, "while calling startConsuming") except NotImplementedError: return self.finish(request, 404, b"not implemented") except InvalidPathError: return self.finish(request, 404, b"not implemented") elif command == b"remove": try: consumer.stopConsuming(pathref) except KeyError: return self.finish(request, 404, b"consumer is not listening to this event") if command == b"listen": self.consumers[cid] = consumer request.setHeader(b"content-type", b"text/event-stream") request.write(b"") request.write(b"event: handshake\n") request.write(b"data: " + cid + b"\n") request.write(b"\n") d = request.notifyFinish() @d.addBoth def onEndRequest(_): consumer.stopConsuming() del self.consumers[cid] return server.NOT_DONE_YET self.finish(request, 200, b"ok") return
def parseJob(self, f): # jobfiles are serialized build requests. Each is a list of # serialized netstrings, in the following order: # format version number: # "1" the original # "2" introduces project and repository # "3" introduces who # "4" introduces comment # "5" introduces properties and JSON serialization of values after # version # "6" sends patch_body as base64-encoded string in the patch_body_base64 attribute # jobid: arbitrary string, used to find the buildSet later # branch: branch name, "" for default-branch # baserev: revision, "" for HEAD # patch_level: usually "1" # patch_body: patch to be applied for build (as string) # patch_body_base64: patch to be applied for build (as base64-encoded bytes) # repository # project # who: user requesting build # comment: comment from user about diff and/or build # builderNames: list of builder names # properties: dict of build properties p = netstrings.NetstringParser() f.seek(0, 2) if f.tell() > basic.NetstringReceiver.MAX_LENGTH: raise BadJobfile( "The patch size is greater that NetStringReceiver.MAX_LENGTH. " "Please Set this higher in the master.cfg") f.seek(0, 0) try: p.feed(f.read()) except basic.NetstringParseError as e: raise BadJobfile("unable to parse netstrings") from e if not p.strings: raise BadJobfile("could not find any complete netstrings") ver = bytes2unicode(p.strings.pop(0)) v1_keys = ['jobid', 'branch', 'baserev', 'patch_level', 'patch_body'] v2_keys = v1_keys + ['repository', 'project'] v3_keys = v2_keys + ['who'] v4_keys = v3_keys + ['comment'] keys = [v1_keys, v2_keys, v3_keys, v4_keys] # v5 introduces properties and uses JSON serialization parsed_job = {} def extract_netstrings(p, keys): for i, key in enumerate(keys): if key == 'patch_body': parsed_job[key] = p.strings[i] else: parsed_job[key] = bytes2unicode(p.strings[i]) def postprocess_parsed_job(): # apply defaults and handle type casting parsed_job['branch'] = parsed_job['branch'] or None parsed_job['baserev'] = parsed_job['baserev'] or None parsed_job['patch_level'] = int(parsed_job['patch_level']) for key in 'repository project who comment'.split(): parsed_job[key] = parsed_job.get(key, '') parsed_job['properties'] = parsed_job.get('properties', {}) if ver <= "4": i = int(ver) - 1 extract_netstrings(p, keys[i]) parsed_job['builderNames'] = [ bytes2unicode(s) for s in p.strings[len(keys[i]):] ] postprocess_parsed_job() elif ver == "5": try: data = bytes2unicode(p.strings[0]) parsed_job = json.loads(data) parsed_job['patch_body'] = unicode2bytes( parsed_job['patch_body']) except ValueError as e: raise BadJobfile("unable to parse JSON") from e postprocess_parsed_job() elif ver == "6": try: data = bytes2unicode(p.strings[0]) parsed_job = json.loads(data) parsed_job['patch_body'] = base64.b64decode( parsed_job['patch_body_base64']) del parsed_job['patch_body_base64'] except ValueError as e: raise BadJobfile("unable to parse JSON") from e postprocess_parsed_job() else: raise BadJobfile(f"unknown version '{ver}'") return parsed_job
def __init__(self, s): s = unicode2bytes(s) FileReader.__init__(self, BytesIO(s))
def perspective_try(self, branch, revision, patch, repository, project, builderNames, who="", comment="", properties=None): log.msg( f"user {self.username} requesting build on builders {builderNames}" ) if properties is None: properties = {} # build the intersection of the request and our configured list builderNames = self.scheduler.filterBuilderList(builderNames) if not builderNames: return None branch = bytes2unicode(branch) revision = bytes2unicode(revision) patch_level = patch[0] patch_body = unicode2bytes(patch[1]) repository = bytes2unicode(repository) project = bytes2unicode(project) who = bytes2unicode(who) comment = bytes2unicode(comment) reason = "'try' job" if who: reason += f" by user {bytes2unicode(who)}" if comment: reason += f" ({bytes2unicode(comment)})" sourcestamp = dict( branch=branch, revision=revision, repository=repository, project=project, patch_level=patch_level, patch_body=patch_body, patch_subdir='', patch_author=who or '', patch_comment=comment or '', codebase='', ) # note: no way to specify patch subdir - #1769 requested_props = Properties() requested_props.update(properties, "try build") (bsid, brids) = yield self.scheduler.addBuildsetForSourceStamps( sourcestamps=[sourcestamp], reason=reason, properties=requested_props, builderNames=builderNames) # return a remotely-usable BuildSetStatus object bss = RemoteBuildSetStatus(self.scheduler.master, bsid, brids) return bss
def deliverJob(self): # returns a Deferred that fires when the job has been delivered if self.connect == "ssh": tryhost = self.getopt("host") tryport = self.getopt("port") tryuser = self.getopt("username") trydir = self.getopt("jobdir") buildbotbin = self.getopt("buildbotbin") ssh_command = self.getopt("ssh") if not ssh_command: ssh_commands = which("ssh") if not ssh_commands: raise RuntimeError( "couldn't find ssh executable, make sure " "it is available in the PATH") argv = [ssh_commands[0]] else: # Split the string on whitespace to allow passing options in # ssh command too, but preserving whitespace inside quotes to # allow using paths with spaces in them which is common under # Windows. And because Windows uses backslashes in paths, we # can't just use shlex.split there as it would interpret them # specially, so do it by hand. if runtime.platformType == 'win32': # Note that regex here matches the arguments, not the # separators, as it's simpler to do it like this. And then we # just need to get all of them together using the slice and # also remove the quotes from those that were quoted. argv = [ string.strip(a, '"') for a in re.split( r'''([^" ]+|"[^"]+")''', ssh_command)[1::2] ] else: # Do use standard tokenization logic under POSIX. argv = shlex.split(ssh_command) if tryuser: argv += ["-l", tryuser] if tryport: argv += ["-p", tryport] argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir] pp = RemoteTryPP(self.jobfile) reactor.spawnProcess(pp, argv[0], argv, os.environ) d = pp.d return d if self.connect == "pb": user = self.getopt("username") passwd = self.getopt("passwd") master = self.getopt("master") tryhost, tryport = master.split(":") tryport = int(tryport) f = pb.PBClientFactory() d = f.login( credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd))) reactor.connectTCP(tryhost, tryport, f) d.addCallback(self._deliverJob_pb) return d raise RuntimeError("unknown connecttype '{}', " "should be 'ssh' or 'pb'".format(self.connect))
def __init__(self, master, auth=('change', 'changepw'), encoding='utf8'): self.username = unicode2bytes(auth[0]) self.password = unicode2bytes(auth[1]) self.host, self.port = master.split(":") self.port = int(self.port) self.encoding = encoding
def setConfiguration(self, config): for dashboard in config: dashboard['app'].buildbot_api = self resource = WSGIResource(reactor, reactor.getThreadPool(), dashboard['app']) self.resource.putChild(unicode2bytes(dashboard['name']), resource)
def test_cors_origin_mismatch_post(self): yield self.render_resource(self.rsrc, b'/', method=b'POST', origin=b'h://bad') content = json.dumps({'error': {'message': 'invalid origin'}}) content = unicode2bytes(content) self.assertRequest(content=content, responseCode=400)
class LogWatcher(LineOnlyReceiver): POLL_INTERVAL = 0.1 TIMEOUT_DELAY = 10.0 delimiter = unicode2bytes(os.linesep) def __init__(self, logfile, timeout=None, _reactor=reactor): self.logfile = logfile self.in_reconfig = False self.transport = FakeTransport() self.pp = TailProcess() self.pp.lw = self self.timer = None self._reactor = _reactor self._timeout_delay = timeout or self.TIMEOUT_DELAY def start(self): # If the log file doesn't exist, create it now. if not os.path.exists(self.logfile): open(self.logfile, 'a').close() # return a Deferred that fires when the reconfig process has # finished. It errbacks with TimeoutError if the startup has not # progressed for 10 seconds, and with ReconfigError if the error # line was seen. If the logfile could not be opened, it errbacks with # an IOError. if platform.system().lower() == 'sunos' and os.path.exists( '/usr/xpg4/bin/tail'): tailBin = "/usr/xpg4/bin/tail" else: tailBin = "/usr/bin/tail" args = ("tail", "-f", "-n", "0", self.logfile) self.p = self._reactor.spawnProcess(self.pp, tailBin, args, env=os.environ) self.running = True d = defer.maybeDeferred(self._start) return d def _start(self): self.d = defer.Deferred() self.startTimer() return self.d def startTimer(self): self.timer = self._reactor.callLater(self._timeout_delay, self.timeout) def timeout(self): # was the timeout set to be ignored? if so, restart it if not self.timer: self.startTimer() return self.timer = None e = BuildmasterTimeoutError() self.finished(Failure(e)) def finished(self, results): try: self.p.signalProcess("KILL") except error.ProcessExitedAlready: pass if self.timer: self.timer.cancel() self.timer = None self.running = False self.in_reconfig = False self.d.callback(results) def lineReceived(self, line): if not self.running: return if b"Log opened." in line: self.in_reconfig = True if b"beginning configuration update" in line: self.in_reconfig = True if self.in_reconfig: print(line) # certain lines indicate progress, so we "cancel" the timeout # and it will get re-added when it fires PROGRESS_TEXT = [ b'Starting BuildMaster', b'Loading configuration from', b'added builder', b'adding scheduler', b'Loading builder', b'Starting factory' ] for progressText in PROGRESS_TEXT: if progressText in line: self.timer = None break if b"message from master: attached" in line: return self.finished("worker") if b"reconfig aborted" in line or b'reconfig partially applied' in line: return self.finished(Failure(ReconfigError())) if b"Server Shut Down" in line: return self.finished(Failure(ReconfigError())) if b"configuration update complete" in line: return self.finished("buildmaster") if b"BuildMaster is running" in line: return self.finished("buildmaster") if b"BuildMaster startup failed" in line: return self.finished(Failure(BuildmasterStartupError()))
def setUp(self): table_names = [ 'objects', 'object_state', 'masters', 'workers', 'configured_workers', 'connected_workers', 'builder_masters', 'builders' ] master = fakemaster.make_master(self, wantRealReactor=True) yield self.setUpRealDatabaseWithConnector(master, table_names=table_names, sqlite_memory=False) master.data = dataconnector.DataConnector() yield master.data.setServiceParent(master) master.config.mq = dict(type='simple') master.mq = mqconnector.MQConnector() yield master.mq.setServiceParent(master) yield master.mq.setup() yield master.mq.startService() master.config.www = dict( port='tcp:0:interface=127.0.0.1', debug=True, auth=auth.NoAuth(), authz=authz.Authz(), avatar_methods=[], logfileName='http.log') master.www = wwwservice.WWWService() yield master.www.setServiceParent(master) yield master.www.startService() yield master.www.reconfigServiceWithBuildbotConfig(master.config) session = mock.Mock() session.uid = "0" master.www.site.sessionFactory = mock.Mock(return_value=session) # now that we have a port, construct the real URL and insert it into # the config. The second reconfig isn't really required, but doesn't # hurt. self.url = 'http://127.0.0.1:%d/' % master.www.getPortnum() self.url = unicode2bytes(self.url) master.config.buildbotURL = self.url yield master.www.reconfigServiceWithBuildbotConfig(master.config) self.master = master self.agent = client.Agent(reactor) # create a telegram bot service tb = master.config.services['TelegramBot'] = telegram.TelegramBot( bot_token='12345:secret', useWebhook=True, chat_ids=[-123456], notify_events=['worker'] ) tb._get_http = self.get_http yield tb.setServiceParent(self.master) self.bot_url = self.url + b"telegram12345:secret" yield tb.startService() self.sent_messages = [] def send_message(chat, message, **kwargs): self.sent_messages.append((chat, message)) tb.bot.send_message = send_message
def sendComment(self, pr_url, text): path = urlparse(unicode2bytes(pr_url)).path payload = {'text': text} return self._http.post( COMMENT_API_URL.format(path=bytes2unicode(path)), json=payload)
def __init__(self, url): Error.__init__(self, 302, "redirect") self.url = protect_redirect_url(unicode2bytes(url))
def connectionMade(self): self.transport.write(unicode2bytes(self.job)) self.transport.closeStdin()
def content(self): content = unicode2bytes(self._content) return defer.succeed(content)
def feed(self, data): data = unicode2bytes(data) self.dataReceived(data) # dataReceived handles errors unusually quietly! if self.brokenPeer: raise basic.NetstringParseError
def stopService(self): try: super().stopService() except AttributeError: pass self._root.delEntity(unicode2bytes(self.path))
def renderRest(self, request): def writeError(msg, errcode=404, jsonrpccode=None): if self.debug: log.msg("REST error: {}".format(msg)) request.setResponseCode(errcode) request.setHeader(b'content-type', b'text/plain; charset=utf-8') msg = bytes2unicode(msg) data = json.dumps(dict(error=msg)) data = unicode2bytes(data) request.write(data) with self.handleErrors(writeError): ep, kwargs = yield self.getEndpoint(request, bytes2unicode(request.method), {}) rspec = self.decodeResultSpec(request, ep) data = yield ep.get(rspec, kwargs) if data is None: msg = ("not found while getting from {} with " "arguments {} and {}").format(repr(ep), repr(rspec), str(kwargs)) msg = unicode2bytes(msg) writeError(msg, errcode=404) return if ep.isRaw: self.encodeRaw(data, request) return # post-process any remaining parts of the resultspec data = rspec.apply(data) # annotate the result with some metadata meta = {} if ep.isCollection: offset, total = data.offset, data.total if offset is None: offset = 0 # add total, if known if total is not None: meta['total'] = total # get the real list instance out of the ListResult data = data.data else: data = [data] typeName = ep.rtype.plural data = {typeName: data, 'meta': meta} # set up the content type and formatting options; if the request # accepts text/html or text/plain, the JSON will be rendered in a # readable, multiline format. if b'application/json' in (request.getHeader(b'accept') or b''): compact = True request.setHeader(b"content-type", b'application/json; charset=utf-8') else: compact = False request.setHeader(b"content-type", b'text/plain; charset=utf-8') # set up caching if self.cache_seconds: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(seconds=self.cache_seconds) expiresBytes = unicode2bytes( expires.strftime("%a, %d %b %Y %H:%M:%S GMT")) request.setHeader(b"Expires", expiresBytes) request.setHeader(b"Pragma", b"no-cache") # filter out blanks if necessary and render the data if compact: data = json.dumps(data, default=toJson, sort_keys=True, separators=(',', ':')) else: data = json.dumps(data, default=toJson, sort_keys=True, indent=2) if request.method == b"HEAD": request.setHeader(b"content-length", unicode2bytes(str(len(data)))) else: data = unicode2bytes(data) request.write(data)
def startService(self): self._root.putChild(unicode2bytes(self.path), self) try: super().startService() except AttributeError: pass
def assertNotOk(self, message): content = json.dumps({'error': message}) content = unicode2bytes(content) self.assertRequest(content=content, responseCode=400)
def _write_config(self, config_str): config_bytes = unicode2bytes(config_str) configfile = FilePath(self.mktemp()) configfile.setContent(config_bytes) return configfile
def assertSimpleError(self, message, responseCode): content = json.dumps({'error': message}) self.assertRequest(content=unicode2bytes(content), responseCode=responseCode)
def _process_changes(self, newRev, branch): """ Read changes since last change. - Read list of commit hashes. - Extract details from each commit. - Add changes to database. """ # initial run, don't parse all history if not self.lastRev: return rebuild = False if newRev in itervalues(self.lastRev): if self.buildPushesWithNoCommits: existingRev = self.lastRev.get(branch) if existingRev is None: # This branch was completely unknown, rebuild log.msg('gitpoller: rebuilding %s for new branch "%s"' % (newRev, branch)) rebuild = True elif existingRev != newRev: # This branch is known, but it now points to a different # commit than last time we saw it, rebuild. log.msg( 'gitpoller: rebuilding %s for updated branch "%s"' % (newRev, branch)) rebuild = True # get the change list revListArgs = ([b'--format=%H', r'%s' % newRev] + [ b'^' + unicode2bytes(rev, 'ascii', 'ignore') for rev in sorted(itervalues(self.lastRev)) ] + [b'--']) self.changeCount = 0 results = yield self._dovccmd('log', revListArgs, path=self.workdir) # process oldest change first revList = results.split() revList.reverse() if rebuild and not revList: revList = [newRev] self.changeCount = len(revList) self.lastRev[branch] = newRev if self.changeCount: log.msg( 'gitpoller: processing %d changes: %s from "%s" branch "%s"' % (self.changeCount, revList, self.repourl, branch)) for rev in revList: dl = defer.DeferredList([ self._get_commit_timestamp(rev), self._get_commit_author(rev), self._get_commit_files(rev), self._get_commit_comments(rev), ], consumeErrors=True) results = yield dl # check for failures failures = [r[1] for r in results if not r[0]] if failures: for failure in failures: log.err( failure, "while processing changes for {} {}".format( newRev, branch)) # just fail on the first error; they're probably all related! failures[0].raiseException() timestamp, author, files, comments = [r[1] for r in results] yield self.master.data.updates.addChange( author=author, revision=ascii2unicode(rev), files=files, comments=comments, when_timestamp=timestamp, branch=ascii2unicode(self._removeHeads(branch)), project=self.project, repository=ascii2unicode(self.repourl), category=self.category, src=u'git')
def ensureLength(self, col, value): assert col.type.length, "column %s does not have a length" % (col,) if value and len(value) > col.type.length: value = value[:col.type.length // 2] + hashlib.sha1(unicode2bytes(value)).hexdigest()[:col.type.length // 2] return value
class TestLogWatcher(unittest.TestCase, dirs.DirsMixin, TestReactorMixin): delimiter = unicode2bytes(os.linesep) def setUp(self): self.setUpDirs('workdir') self.addCleanup(self.tearDownDirs) self.setUpTestReactor() self.spawned_process = mock.Mock() self.reactor.spawnProcess = mock.Mock( return_value=self.spawned_process) def test_start(self): lw = MockedLogWatcher('workdir/test.log', _reactor=self.reactor) lw._start = mock.Mock() lw.start() self.reactor.spawnProcess.assert_called() self.assertEqual(lw.created_paths, ['workdir/test.log']) self.assertTrue(lw.running) @defer.inlineCallbacks def test_success_before_timeout(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') res = yield d self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_failure_after_timeout(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(5.1) lw.lineReceived(b'BuildMaster is running') with self.assertRaises(BuildmasterTimeoutError): yield d @defer.inlineCallbacks def test_progress_restarts_timeout(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() self.reactor.advance(4.9) lw.lineReceived(b'added builder') self.reactor.advance(4.9) lw.lineReceived(b'BuildMaster is running') res = yield d self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_handles_very_long_lines(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.dataReceived(b't' * lw.MAX_LENGTH * 2 + self.delimiter + b'BuildMaster is running' + self.delimiter) res = yield d self.assertEqual(lw.printed_output, [ 'Got an a very long line in the log (length 32768 bytes), ignoring' ]) self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_handles_very_long_lines_separate_packet(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.dataReceived(b't' * lw.MAX_LENGTH * 2) lw.dataReceived(self.delimiter + b'BuildMaster is running' + self.delimiter) res = yield d self.assertEqual(lw.printed_output, [ 'Got an a very long line in the log (length 32768 bytes), ignoring' ]) self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_handles_very_long_lines_separate_packet_with_newline(self): lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.dataReceived(b't' * lw.MAX_LENGTH * 2 + self.delimiter) lw.dataReceived(b'BuildMaster is running' + self.delimiter) res = yield d self.assertEqual(lw.printed_output, [ 'Got an a very long line in the log (length 32768 bytes), ignoring' ]) self.assertEqual(res, 'buildmaster') @defer.inlineCallbacks def test_matches_lines(self): lines_and_expected = [ (b'reconfig aborted without making any changes', ReconfigError()), (b'WARNING: reconfig partially applied; master may malfunction', ReconfigError()), (b'Server Shut Down', ReconfigError()), (b'BuildMaster startup failed', BuildmasterStartupError()), (b'message from master: attached', 'worker'), (b'configuration update complete', 'buildmaster'), (b'BuildMaster is running', 'buildmaster'), ] for line, expected in lines_and_expected: lw = MockedLogWatcher('workdir/test.log', timeout=5, _reactor=self.reactor) d = lw.start() lw.lineReceived(line) if isinstance(expected, Exception): with self.assertRaises(type(expected)): yield d else: res = yield d self.assertEqual(res, expected)
def sendJsonMessage(self, **msg): return self.sendMessage(unicode2bytes(json.dumps(msg, default=toJson, separators=(',', ':'))))