def test_stopConsuming(self): self.proto.onMessage(json.dumps(dict(cmd="startConsuming", path="builds/*/*", _id=1)), False) self.proto.sendMessage.assert_called_with( '{"msg":"OK","code":200,"_id":1}') self.proto.onMessage(json.dumps(dict(cmd="stopConsuming", path="builds/*/*", _id=2)), False) self.proto.sendMessage.assert_called_with( '{"msg":"OK","code":200,"_id":2}')
def testDefaultDialectWithChange(self): self.request.uri = "/change_hook/" self.request.method = "GET" self.request.args = { "category" : ["mycat"], "files" : [json.dumps(['file1', 'file2'])], "repository" : ["myrepo"], "when" : [1234], "author" : ["Santa Claus"], "number" : [2], "comments" : ["a comment"], "project" : ["a project"], "at" : ["sometime"], "branch" : ["a branch"], "revlink" : ["a revlink"], "properties" : [json.dumps( { "prop1" : "val1", "prop2" : "val2" })], "revision" : [99] } d = self.request.test_render(self.changeHook) def check_changes(r): self.assertEquals(len(self.request.addedChanges), 1) change = self.request.addedChanges[0] self.assertEquals(change["category"], "mycat") self.assertEquals(change["repository"], "myrepo") self.assertEquals(change["when"], 1234) self.assertEquals(change["author"], "Santa Claus") self.assertEquals(change["src"], None) self.assertEquals(change["revision"], 99) self.assertEquals(change["comments"], "a comment") self.assertEquals(change["project"], "a project") self.assertEquals(change["branch"], "a branch") self.assertEquals(change["revlink"], "a revlink") self.assertEquals(change['properties'], dict(prop1='val1', prop2='val2')) self.assertEquals(change['files'], ['file1', 'file2']) d.addCallback(check_changes) return d
def setProperty(self, name, value, source, runtime=False): name = util.ascii2unicode(name) json.dumps(value) # Let the exception propagate ... source = util.ascii2unicode(source) self.properties[name] = (value, source) if runtime: self.runtime.add(name)
def __new__(cls, v): if pedantic: # we raise exception if v is not json able json.dumps(v) if type(v) == dict: return dict.__new__(cls, v) elif type(v) == list: return [Namespace(i) for i in v] else: return v
def handle(): try: try: authAction = self.getRequiredAuthAction(request) if authAction is not None: authz = self.getAuthz(request) res = yield authz.actionAllowed(authAction, request) if not res: logger.info("Auth action '%s' is not allowed: %s" % (authAction, request.uri)) raise Forbidden('Not allowed: %s' % request.uri) data = yield self.asDict(request) if data is None: raise NotFound("Not found: %s" % request.uri) except NotFound as e: data = dict(message=str(e), _httpCode=404) except Forbidden as e: data = dict(message=str(e), _httpCode=403) except Conflict as e: data = dict(message=str(e), _httpCode=409) except NeedUpdate as e: data = dict(message=str(e), _httpCode=410) except BadRequest as e: data = dict(message=str(e), _httpCode=400) except Exception as e: log.err() data = dict(message=str(e), _httpCode=500) assert isinstance(data, dict) httpCode = data.get('_httpCode', None) if httpCode is not None: request.setResponseCode(httpCode) del data['_httpCode'] compact = RequestArgToBool(request, 'compact', False) if compact: data = json.dumps(data, sort_keys=True, separators=(',', ':')) else: data = json.dumps(data, sort_keys=True, indent=2) data = data.encode("utf-8") request.setHeader("Access-Control-Allow-Origin", "*") request.setHeader("content-type", "application/json") if httpCode is None or httpCode == 200: if RequestArgToBool(request, 'as_file', False): request.setHeader("content-disposition", "attachment; filename=\"%s.json\"" % request.path) # Make sure we get fresh pages. request.setHeader("Pragma", "no-cache") request.write(data) request.finish() except Exception as e: request.processingFailed(Failure(e)) return
def writeError(msg, errcode=400): if self.debug: log.msg("HTTP error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader('content-type', 'text/plain; charset=utf-8') if request.method == 'POST': # jsonRPC callers want the error message in error.message request.write(json.dumps(dict(error=dict(message=msg)))) else: request.write(json.dumps(dict(error=msg))) request.finish()
def validate(self, name, object): if not isinstance(object, dict): yield "%s (%r) is not a dictionary (got type %s)" \ % (name, object, type(object)) return # make sure JSON can represent it try: json.dumps(object) except Exception as e: yield "%s is not JSON-able: %s" % (name, e) return
def setProperty(self, name, value, source, runtime=False): try: json.dumps(value) except TypeError: warnings.warn( "Non jsonable properties are not explicitly supported and" + "will be explicitly disallowed in a future version.", DeprecationWarning, stacklevel=2) self.properties[name] = (value, source) if runtime: self.runtime.add(name)
def setBuildProperty(self, buildid, name, value, source): validation.verifyType(self.testcase, 'buildid', buildid, validation.IntValidator()) validation.verifyType(self.testcase, 'name', name, validation.StringValidator()) try: json.dumps(value) except (TypeError, ValueError): self.testcase.fail("Value for %s is not JSON-able" % name) validation.verifyType(self.testcase, 'source', source, validation.StringValidator()) return defer.succeed(None)
def content(self, request): """Renders the json dictionaries.""" # Supported flags. select = request.args.get('select') as_text = RequestArgToBool(request, 'as_text', False) filter_out = RequestArgToBool(request, 'filter', as_text) compact = RequestArgToBool(request, 'compact', not as_text) callback = request.args.get('callback') # Implement filtering at global level and every child. if select is not None: del request.args['select'] # Do not render self.asDict()! data = {} # Remove superfluous / select = [s.strip('/') for s in select] select.sort(cmp=lambda x,y: cmp(x.count('/'), y.count('/')), reverse=True) for item in select: # Start back at root. node = data # Implementation similar to twisted.web.resource.getChildForRequest # but with a hacked up request. child = self prepath = request.prepath[:] postpath = request.postpath[:] request.postpath = filter(None, item.split('/')) while request.postpath and not child.isLeaf: pathElement = request.postpath.pop(0) node[pathElement] = {} node = node[pathElement] request.prepath.append(pathElement) child = child.getChildWithDefault(pathElement, request) node.update(child.asDict(request)) request.prepath = prepath request.postpath = postpath else: data = self.asDict(request) if filter_out: data = FilterOut(data) if compact: data = json.dumps(data, sort_keys=True, separators=(',',':')) else: data = json.dumps(data, sort_keys=True, indent=2) if callback: # Only accept things that look like identifiers for now callback = callback[0] if re.match(r'^[a-zA-Z$][a-zA-Z$0-9.]*$', callback): data = '%s(%s);' % (callback, data) return data
def assertProperties(self, sourced, properties): self.testcase.assertIsInstance(properties, dict) for k, v in properties.iteritems(): self.testcase.assertIsInstance(k, unicode) if sourced: self.testcase.assertIsInstance(v, tuple) self.testcase.assertEqual(len(v), 2) propval, propsrc = v self.testcase.assertIsInstance(propsrc, unicode) else: propval = v try: json.dumps(propval) except (TypeError, ValueError): self.testcase.fail("value for %s is not JSON-able" % (k,))
def render(self, request): request.setHeader("content-type", JSON_ENCODED) min_vers = self.master.config.www.get('rest_minimum_version', 0) api_versions = dict(('v%d' % v, '%sapi/v%d' % (self.base_url, v)) for v in self.version_classes if v > min_vers) return json.dumps(dict(api_versions=api_versions))
def create_buildset(self, ssid, reason, properties, builderNames, t, external_idstring=None): # this creates both the BuildSet and the associated BuildRequests now = self._getCurrentTime() t.execute(self.quoteq("INSERT INTO buildsets" " (external_idstring, reason," " sourcestampid, submitted_at)" " VALUES (?,?,?,?)"), (external_idstring, reason, ssid, now)) bsid = t.lastrowid for propname, propvalue in properties.properties.items(): encoded_value = json.dumps(propvalue) t.execute(self.quoteq("INSERT INTO buildset_properties" " (buildsetid, property_name, property_value)" " VALUES (?,?,?)"), (bsid, propname, encoded_value)) brids = [] for bn in builderNames: t.execute(self.quoteq("INSERT INTO buildrequests" " (buildsetid, buildername, submitted_at)" " VALUES (?,?,?)"), (bsid, bn, now)) brid = t.lastrowid brids.append(brid) self.notify("add-buildset", bsid) self.notify("add-buildrequest", *brids) return bsid
def render_control_resource(self, rsrc, path='/', params={}, requestJson=None, action="notfound", id=None, content_type='application/json'): # pass *either* a request or postpath id = id or self.UUID request = self.make_request(path) request.method = "POST" request.content = StringIO(requestJson or json.dumps( {"jsonrpc": "2.0", "method": action, "params": params, "id": id})) request.input_headers = {'content-type': content_type} rv = rsrc.render(request) if rv != server.NOT_DONE_YET: d = defer.succeed(rv) else: d = request.deferred @d.addCallback def check(_json): res = json.loads(_json) self.assertIn("jsonrpc", res) self.assertEqual(res["jsonrpc"], "2.0") if not requestJson: # requestJson is used for invalid requests, so don't expect ID self.assertIn("id", res) self.assertEqual(res["id"], id) return d
def writeError(msg, errcode=400): if self.debug: log.msg("HTTP error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader('content-type', 'text/plain; charset=utf-8') request.write(json.dumps(dict(error=msg))) request.finish()
def thd(conn): self.assertEqual(len(brids), 2) # should see one buildset row r = conn.execute(self.db.model.buildsets.select()) rows = [ (row.id, row.external_idstring, row.reason, row.sourcestampid, row.complete, row.complete_at, row.results) for row in r.fetchall() ] self.assertEqual(rows, [ ( bsid, None, u'because', 234, 0, None, -1) ]) # one property row r = conn.execute(self.db.model.buildset_properties.select()) rows = [ (row.buildsetid, row.property_name, row.property_value) for row in r.fetchall() ] self.assertEqual(rows, [ ( bsid, 'prop', json.dumps([ ['list'], 'test' ]) ) ]) # and two buildrequests rows (and don't re-check the default columns) r = conn.execute(self.db.model.buildrequests.select()) rows = [ (row.buildsetid, row.id, row.buildername) for row in r.fetchall() ] # we don't know which of the brids is assigned to which # buildername, but either one will do self.assertEqual(sorted(rows), [ ( bsid, brids['a'], 'a'), (bsid, brids['b'], 'b') ])
def test_createJobfile_v5(self): jobid = '123-456' branch = 'branch' baserev = 'baserev' patch_level = 0 patch_body = 'diff...' repository = 'repo' project = 'proj' who = 'someuser' comment = 'insightful comment' builderNames = ['runtests'] properties = {'foo': 'bar'} job = tryclient.createJobfile( jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties) jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': jobid, 'branch': branch, 'baserev': baserev, 'patch_level': patch_level, 'patch_body': patch_body, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, })) self.assertEqual(job, jobstr)
def thd(conn): submitted_at = datetime.now() submitted_at_epoch = time.mktime(submitted_at.timetuple()) transaction = conn.begin() # insert the buildset itself r = conn.execute(self.db.model.buildsets.insert(), dict( sourcestampid=ssid, submitted_at=submitted_at_epoch, reason=reason, external_idstring=external_idstring)) bsid = r.inserted_primary_key[0] # add any properties if properties: conn.execute(self.db.model.buildset_properties.insert(), [ dict(buildsetid=bsid, property_name=k, property_value=json.dumps([v,s])) for (k,v,s) in properties.asList() ]) # and finish with a build request for each builder conn.execute(self.db.model.buildrequests.insert(), [ dict(buildsetid=bsid, buildername=buildername, submitted_at=submitted_at_epoch) for buildername in builderNames ]) transaction.commit() return bsid
def _txn_addChangeToDatabase(self, t, change): q = self.quoteq("INSERT INTO changes" " (author," " comments, is_dir," " branch, revision, revlink," " when_timestamp, category," " repository, project)" " VALUES (?, ?,?, ?,?,?, ?,?, ?,?)") # TODO: map None to.. empty string? values = (change.who, change.comments, change.isdir, change.branch, change.revision, change.revlink, change.when, change.category, change.repository, change.project) t.execute(q, values) change.number = t.lastrowid for link in change.links: t.execute(self.quoteq("INSERT INTO change_links (changeid, link) " "VALUES (?,?)"), (change.number, link)) for filename in change.files: t.execute(self.quoteq("INSERT INTO change_files (changeid,filename)" " VALUES (?,?)"), (change.number, filename)) for propname,propvalue in change.properties.properties.items(): encoded_value = json.dumps(propvalue) t.execute(self.quoteq("INSERT INTO change_properties" " (changeid, property_name, property_value)" " VALUES (?,?,?)"), (change.number, propname, encoded_value)) self.notify("add-change", change.number)
def thd(conn): # should see one buildset row r = conn.execute(self.db.model.buildsets.select()) rows = [ ( row.id, row.external_idstring, row.reason, row.sourcestampid, row.complete, row.complete_at, row.results, ) for row in r.fetchall() ] self.assertEqual(rows, [(bsid, None, u"because", 234, 0, None, None)]) # one property row r = conn.execute(self.db.model.buildset_properties.select()) rows = [(row.buildsetid, row.property_name, row.property_value) for row in r.fetchall()] self.assertEqual(rows, [(bsid, "prop", json.dumps([["list"], "test"]))]) # and two buildrequests rows (and don't re-check the default columns) r = conn.execute(self.db.model.buildrequests.select()) rows = [(row.buildsetid, row.buildername) for row in r.fetchall()] self.assertEqual(sorted(rows), [(bsid, "a"), (bsid, "b")])
def parseCustomTemplateDir(self, template_dir): res = {} allowed_ext = [".html"] try: import pyjade allowed_ext.append(".jade") except ImportError: # pragma: no cover log.msg("pyjade not installed. Ignoring .jade files from %s" % (template_dir,)) pyjade = None for root, dirs, files in os.walk(template_dir): if root == template_dir: template_name = posixpath.join("views", "%s.html") else: # template_name is a url, so we really want '/' # root is a os.path, though template_name = posixpath.join(os.path.basename(root), "views", "%s.html") for f in files: fn = os.path.join(root, f) basename, ext = os.path.splitext(f) if ext not in allowed_ext: continue if ext == ".html": with open(fn) as f: html = f.read().strip() elif ext == ".jade": with open(fn) as f: jade = f.read() parser = pyjade.parser.Parser(jade) block = parser.parse() compiler = pyjade.ext.html.Compiler(block, pretty=False) html = compiler.compile() res[template_name % (basename,)] = json.dumps(html) pass return res
def test_parseJob_v5(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': 1, 'patch_body': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': ['buildera', 'builderc'], 'properties': {'foo': 'bar'}, })) parsedjob = sched.parseJob(StringIO.StringIO(jobstr)) self.assertEqual(parsedjob, { 'baserev': '1234', 'branch': 'trunk', 'builderNames': ['buildera', 'builderc'], 'jobid': 'extid', 'patch_body': 'this is my diff, -- ++, etc.', 'patch_level': 1, 'project': 'proj', 'who': 'who', 'comment': 'comment', 'repository': 'repo', 'properties': {'foo': 'bar'}, })
def testBasic(self): s = transfer.JSONPropertiesDownload("props.json") s.build = Mock() props = Properties() props.setProperty("key1", "value1", "test") s.build.getProperties.return_value = props s.build.getWorkerCommandVersion.return_value = 1 ss = Mock() ss.asDict.return_value = dict(revision="12345") s.build.getAllSourceStamps.return_value = [ss] s.worker = Mock() s.remote = Mock() s.start() for c in s.remote.method_calls: name, command, args = c commandName = command[3] kwargs = command[-1] if commandName == "downloadFile": self.assertEquals(kwargs["slavedest"], "props.json") reader = kwargs["reader"] data = reader.remote_read(100) self.assertEquals(data, json.dumps(dict(sourcestamps=[ss.asDict()], properties={"key1": "value1"}))) break else: raise ValueError("No downloadFile command found")
def test_handled_events_filter_false(self): s = self.newChangeSource( 'somehost', 'some_choosy_user') d = s.lineReceived(json.dumps(self.change_merged_event)) check = lambda _: self.failUnlessEqual(len(self.changes_added), 0) d.addCallback(check) return d
def addBuild(self, builderid, buildrequestid, buildslaveid, masterid, state_strings, _reactor=reactor, _race_hook=None): started_at = _reactor.seconds() state_strings_json = json.dumps(state_strings) def thd(conn): tbl = self.db.model.builds # get the highest current number r = conn.execute(sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.builderid == builderid))) number = r.scalar() new_number = 1 if number is None else number + 1 # insert until we are succesful.. while True: if _race_hook: _race_hook(conn) try: r = conn.execute(self.db.model.builds.insert(), dict(number=new_number, builderid=builderid, buildrequestid=buildrequestid, buildslaveid=buildslaveid, masterid=masterid, started_at=started_at, complete_at=None, state_strings_json=state_strings_json)) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): new_number += 1 continue return r.inserted_primary_key[0], new_number return self.db.pool.do(thd)
def thd(conn): assert change.number is None ins = self.db.model.changes.insert() r = conn.execute(ins, dict( author=change.who, comments=change.comments, is_dir=change.isdir, branch=change.branch, revision=change.revision, revlink=change.revlink, when_timestamp=change.when, category=change.category, repository=change.repository, project=change.project)) change.number = r.inserted_primary_key[0] if change.links: ins = self.db.model.change_links.insert() conn.execute(ins, [ dict(changeid=change.number, link=l) for l in change.links ]) if change.files: ins = self.db.model.change_files.insert() conn.execute(ins, [ dict(changeid=change.number, filename=f) for f in change.files ]) if change.properties: ins = self.db.model.change_properties.insert() conn.execute(ins, [ dict(changeid=change.number, property_name=k, property_value=json.dumps(v)) for k,v,s in change.properties.asList() ]) return change
def renderJsonRpc(self, request): jsonRpcReply = {'jsonrpc': "2.0"} def writeError(msg, errcode=399, jsonrpccode=JSONRPC_CODES["internal_error"]): if self.debug: log.msg("JSONRPC error: %s" % (msg,)) request.setResponseCode(errcode) request.setHeader('content-type', JSON_ENCODED) if "error" not in jsonRpcReply: # already filled in by caller jsonRpcReply['error'] = dict(code=jsonrpccode, message=msg) request.write(json.dumps(jsonRpcReply)) with self.handleErrors(writeError): method, id, params = self.decodeJsonRPC2(request) jsonRpcReply['id'] = id ep, kwargs = self.getEndpoint(request) result = yield ep.control(method, params, kwargs) jsonRpcReply['result'] = result data = json.dumps(jsonRpcReply, default=self._toJson, sort_keys=True, separators=(',', ':')) request.setHeader('content-type', JSON_ENCODED) if request.method == "HEAD": request.setHeader("content-length", len(data)) request.write('') else: request.write(data)
def test_lineReceived_patchset_created(self): s = self.newChangeSource('somehost', 'someuser') d = s.lineReceived(json.dumps(dict( type="patchset-created", change=dict( branch="br", project="pr", owner=dict(name="Dustin", email="*****@*****.**"), url="http://buildbot.net", subject="fix 1234" ), patchSet=dict(revision="abcdef") ))) def check(_): self.failUnlessEqual(len(self.changes_added), 1) c = self.changes_added[0] self.assertEqual(c['author'], "Dustin <*****@*****.**>") self.assertEqual(c['project'], "pr") self.assertEqual(c['branch'], "br") self.assertEqual(c['revision'], "abcdef") self.assertEqual(c['revlink'], "http://buildbot.net") self.assertEqual(c['comments'], "fix 1234") self.assertEqual(c['files'], [ 'unknown' ]) self.assertEqual(c['properties']['event.change.subject'], 'fix 1234') d.addCallback(check) return d
def testBasic(self): msg = dict(message="Hello World") s = transfer.JSONStringDownload(msg, "hello.json") s.build = Mock() s.build.getProperties.return_value = Properties() s.build.getSlaveCommandVersion.return_value = 1 s.step_status = Mock() s.buildslave = Mock() s.remote = Mock() s.start() for c in s.remote.method_calls: name, command, args = c commandName = command[3] kwargs = command[-1] if commandName == 'downloadFile': self.assertEquals(kwargs['slavedest'], 'hello.json') reader = kwargs['reader'] data = reader.remote_read(100) self.assertEquals(data, json.dumps(msg)) break else: self.assert_(False, "No downloadFile command found")
def testBasic(self): s = transfer.JSONPropertiesDownload("props.json") s.build = Mock() props = Properties() props.setProperty('key1', 'value1', 'test') s.build.getProperties.return_value = props s.build.getSlaveCommandVersion.return_value = 1 ss = Mock() ss.asDict.return_value = dict(revision="12345") s.build.getSourceStamp.return_value = ss s.step_status = Mock() s.buildslave = Mock() s.remote = Mock() s.start() for c in s.remote.method_calls: name, command, args = c commandName = command[3] kwargs = command[-1] if commandName == 'downloadFile': self.assertEquals(kwargs['slavedest'], 'props.json') reader = kwargs['reader'] data = reader.remote_read(100) self.assertEquals(data, json.dumps(dict(sourcestamp=ss.asDict(), properties={'key1': 'value1'}))) break else: self.assert_(False, "No downloadFile command found")
def thd(conn): # note that in a read-uncommitted database like SQLite this # transaction does not buy atomicity - other database users may # still come across a change without its files, properties, # etc. That's OK, since we don't announce the change until it's # all in the database, but beware. transaction = conn.begin() r = conn.execute(ch_tbl.insert(), dict( author=author, comments=comments, branch=branch, revision=revision, revlink=revlink, when_timestamp=datetime2epoch(when_timestamp), category=category, repository=repository, codebase=codebase, project=project, sourcestampid=ssid, parent_changeids=parent_changeid)) changeid = r.inserted_primary_key[0] if files: tbl = self.db.model.change_files for f in files: self.checkLength(tbl.c.filename, f) conn.execute(tbl.insert(), [ dict(changeid=changeid, filename=f) for f in files ]) if properties: tbl = self.db.model.change_properties inserts = [ dict(changeid=changeid, property_name=k, property_value=json.dumps(v)) for k, v in properties.iteritems() ] for i in inserts: self.checkLength(tbl.c.property_name, i['property_name']) self.checkLength(tbl.c.property_value, i['property_value']) conn.execute(tbl.insert(), inserts) if uid: ins = self.db.model.change_users.insert() conn.execute(ins, dict(changeid=changeid, uid=uid)) transaction.commit() return changeid
def assertReceivesChangeNewMessage(self, request): self.master.mq.callConsumer(("changes", "500", "new"), test_data_changes.Change.changeEvent) kw = self.readEvent(request) self.assertEqual(kw["event"], "event") msg = json.loads(kw["data"]) self.assertEqual(msg["key"], [u'changes', u'500', u'new']) self.assertEqual( msg["message"], json.loads( json.dumps(test_data_changes.Change.changeEvent, default=self._toJson)))
def start(self): properties = self.build.getProperties() props = {} for key, value, source in properties.asList(): props[key] = value self.s = json.dumps( dict( properties=props, sourcestamp=self.build.getSourceStamp().asDict(), ), ) return self.super_class.start(self)
def test_handled_events_filter_true(self): s = self.newChangeSource( 'somehost', 'some_choosy_user', handled_events=["change-merged"]) d = s.lineReceived(json.dumps(self.change_merged_event)) @d.addCallback def check(_): self.failUnlessEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.failUnlessEqual(c["category"], "change-merged") self.assertEqual(c["branch"], "br") return d
def content(self, request): """Renders the json dictionaries.""" # Implement filtering at global level and every child. select = request.args.get('select') if select is not None: del request.args['select'] # Do not render self.asDict()! data = {} # Remove superfluous / select = [s.strip('/') for s in select] select.sort(cmp=lambda x, y: cmp(x.count('/'), y.count('/')), reverse=True) for item in select: # Start back at root. node = data # Implementation similar to twisted.web.resource.getChildForRequest # but with a hacked up request. child = self prepath = request.prepath[:] postpath = request.postpath[:] request.postpath = filter(None, item.split('/')) while request.postpath and not child.isLeaf: pathElement = request.postpath.pop(0) node[pathElement] = {} node = node[pathElement] request.prepath.append(pathElement) child = child.getChildWithDefault(pathElement, request) node.update(child.asDict(request)) request.prepath = prepath request.postpath = postpath else: data = self.asDict(request) as_text = RequestArgToBool(request, 'as_text', False) filter_out = RequestArgToBool(request, 'filter', as_text) if filter_out: data = FilterOut(data) if RequestArgToBool(request, 'compact', not as_text): return json.dumps(data, sort_keys=True, separators=(',', ':')) else: return json.dumps(data, sort_keys=True, indent=2)
def test_handled_events_filter_true(self): s = self.newChangeSource('somehost', 'some_choosy_user', handled_events=["change-merged"]) d = s.lineReceived(json.dumps(self.change_merged_event)) def check(_): self.failUnlessEqual(len(self.changes_added), 1) c = self.changes_added[0] self.failUnlessEqual(c["category"], "change-merged") d.addCallback(check) return d
def setUp(self): changeDict = { "category": ["mycat"], "files": [json.dumps(['file1', 'file2'])], "repository": ["myrepo"], "when": [1234], "who": ["Santa Claus"], "number": [2], "comments": ["a comment"], "project": ["a project"], "at": ["sometime"], "branch": ["a branch"], "revlink": ["a revlink"], "properties": [json.dumps({ "prop1": "val1", "prop2": "val2" })], "revision": [99] } self.request = MockRequest(changeDict) self.changeHook = change_hook.ChangeHookResource( dialects={'base': True})
def addStep(self, buildid, name, state_strings): state_strings_json = json.dumps(state_strings) def thd(conn): tbl = self.db.model.steps # get the highest current number r = conn.execute( sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.buildid == buildid))) number = r.scalar() number = 0 if number is None else number + 1 # note that there is no chance for a race condition here, # since only one master is inserting steps. If there is a # conflict, then the name is likely already taken. insert_row = dict(buildid=buildid, number=number, started_at=None, complete_at=None, state_strings_json=state_strings_json, urls_json='[]', name=name) try: r = conn.execute(self.db.model.steps.insert(), insert_row) got_id = r.inserted_primary_key[0] except (sa.exc.IntegrityError, sa.exc.ProgrammingError): got_id = None if got_id: return (got_id, number, name) # we didn't get an id, so calculate a unique name and use that # instead. Because names are truncated at the right to fit in a # 50-character identifier, this isn't a simple query. res = conn.execute( sa.select([tbl.c.name], whereclause=((tbl.c.buildid == buildid)))) names = set([row[0] for row in res]) num = 1 while True: numstr = '_%d' % num newname = name[:50 - len(numstr)] + numstr if newname not in names: break num += 1 insert_row['name'] = newname r = conn.execute(self.db.model.steps.insert(), insert_row) got_id = r.inserted_primary_key[0] return (got_id, number, newname) return self.db.pool.do(thd)
def thd(conn): # note that in a read-uncommitted database like SQLite this # transaction does not buy atomicitiy - other database users may # still come across a change without its links, files, properties, # etc. That's OK, since we don't announce the change until it's # all in the database, but beware. transaction = conn.begin() # Trim long comment fields to 1024 characters, but preserve header # and footer with important tags such as Cr-Commit-Position. trimmed_comments = comments if len(trimmed_comments) > 1024: header, footer = trimmed_comments[:506], trimmed_comments[ -506:] trimmed_comments = '%s\n...skip...\n%s' % (header, footer) ins = self.db.model.changes.insert() r = conn.execute( ins, dict(author=author, comments=trimmed_comments, is_dir=is_dir, branch=branch, revision=revision, revlink=revlink, when_timestamp=datetime2epoch(when_timestamp), category=category, repository=repository, project=project)) changeid = r.inserted_primary_key[0] if links: ins = self.db.model.change_links.insert() conn.execute(ins, [dict(changeid=changeid, link=l) for l in links]) if files: ins = self.db.model.change_files.insert() conn.execute( ins, [dict(changeid=changeid, filename=f) for f in files]) if properties: ins = self.db.model.change_properties.insert() conn.execute(ins, [ dict(changeid=changeid, property_name=k, property_value=json.dumps(v)) for k, v in properties.iteritems() ]) transaction.commit() return changeid
def thd(conn): submitted_at = _reactor.seconds() transaction = conn.begin() # insert the buildset itself r = conn.execute( self.db.model.buildsets.insert(), dict(sourcestampid=ssid, submitted_at=submitted_at, reason=reason, complete=0, complete_at=None, results=-1, external_idstring=external_idstring)) bsid = r.inserted_primary_key[0] # add any properties if properties: conn.execute(self.db.model.buildset_properties.insert(), [ dict(buildsetid=bsid, property_name=k, property_value=json.dumps([v, s])) for k, (v, s) in properties.iteritems() ]) # and finish with a build request for each builder. Note that # sqlalchemy and the Python DBAPI do not provide a way to recover # inserted IDs from a multi-row insert, so this is done one row at # a time. brids = {} ins = self.db.model.buildrequests.insert() for buildername in builderNames: r = conn.execute( ins, dict(buildsetid=bsid, buildername=buildername, priority=0, claimed_at=0, claimed_by_name=None, claimed_by_incarnation=None, complete=0, results=-1, submitted_at=submitted_at, complete_at=None)) brids[buildername] = r.inserted_primary_key[0] transaction.commit() return (bsid, brids)
def test_parseJob_v5_no_properties(self): sched = trysched.Try_Jobdir( name='tsched', builderNames=['buildera', 'builderb'], jobdir='foo') jobstr = self.makeNetstring( '5', json.dumps({ 'jobid': 'extid', 'branch': 'trunk', 'baserev': '1234', 'patch_level': '1', 'diff': 'this is my diff, -- ++, etc.', 'repository': 'repo', 'project': 'proj', 'who': 'who', 'comment': 'comment', 'builderNames': ['buildera', 'builderb'], 'properties': {}, })) parsedjob = sched.parseJob(StringIO.StringIO(jobstr)) self.assertEqual(parsedjob['properties'], {})
def thd(conn): tbl = self.db.model.steps wc = (tbl.c.id == stepid) q = sa.select([tbl.c.urls_json], whereclause=wc) res = conn.execute(q) row = res.fetchone() if _racehook is not None: _racehook() urls = json.loads(row.urls_json) urls.append(dict(name=name, url=url)) q = tbl.update(whereclause=wc) conn.execute(q, urls_json=json.dumps(urls))
def dataspec(config): master = yield fakemaster.make_master() data = connector.DataConnector(master) dirs = os.path.dirname(config['out']) if dirs and not os.path.exists(dirs): os.makedirs(dirs) with open(config['out'], "w") as f: if config['global'] is not None: f.write("window." + config['global'] + '=') f.write(json.dumps(data.allEndpoints(), indent=2)) print "written", config['out'] defer.returnValue(0)
def thd(conn): # note that in a read-uncommitted database like SQLite this # transaction does not buy atomicitiy - other database users may # still come across a change without its links, files, properties, # etc. That's OK, since we don't announce the change until it's # all in the database, but beware. transaction = conn.begin() ins = self.db.model.changes.insert() r = conn.execute(ins, dict( author=author, comments=comments, is_dir=is_dir, branch=branch, revision=revision, revlink=revlink, when_timestamp=datetime2epoch(when_timestamp), category=category, repository=repository, project=project)) changeid = r.inserted_primary_key[0] if links: ins = self.db.model.change_links.insert() conn.execute(ins, [ dict(changeid=changeid, link=l) for l in links ]) if files: ins = self.db.model.change_files.insert() conn.execute(ins, [ dict(changeid=changeid, filename=f) for f in files ]) if properties: ins = self.db.model.change_properties.insert() conn.execute(ins, [ dict(changeid=changeid, property_name=k, property_value=json.dumps(v)) for k,v in properties.iteritems() ]) if uid: ins = self.db.model.change_users.insert() conn.execute(ins, dict(changeid=changeid, uid=uid)) transaction.commit() return changeid
def dataspec(config): master = yield fakemaster.make_master() data = connector.DataConnector(master) if config['out'] != '--': dirs = os.path.dirname(config['out']) if dirs and not os.path.exists(dirs): os.makedirs(dirs) f = open(config['out'], "w") else: f = sys.stdout if config['global'] is not None: f.write("window." + config['global'] + '=') f.write(json.dumps(data.allEndpoints(), indent=2)) f.close() defer.returnValue(0)
class IndexResource(resource.Resource): # enable reconfigResource calls needsReconfig = True def __init__(self, master, staticdir): resource.Resource.__init__(self, master) loader = jinja2.FileSystemLoader(staticdir) self.jinja = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined) def reconfigResource(self, new_config): self.config = new_config.www def render_GET(self, request): return self.asyncRenderHelper(request, self.renderIndex) @defer.inlineCallbacks def renderIndex(self, request): config = {} request.setHeader("content-type", 'text/html') request.setHeader("Cache-Control", "public;max-age=0") session = request.getSession() try: yield self.config['auth'].maybeAutoLogin(request) except Error, e: config["on_load_warning"] = e.message if hasattr(session, "user_info"): config.update({"user": session.user_info}) else: config.update({"user": {"anonymous": True}}) config.update(self.config) config['buildbotURL'] = self.master.config.buildbotURL config['title'] = self.master.config.title config['titleURL'] = self.master.config.titleURL config['multiMaster'] = self.master.config.multiMaster def toJson(obj): obj = IConfigured(obj).getConfigDict() if isinstance(obj, dict): return obj return repr(obj) + " not yet IConfigured" tpl = self.jinja.get_template('index.html') tpl = tpl.render(configjson=json.dumps(config, default=toJson), config=self.config) defer.returnValue(tpl.encode("ascii"))
def thd(conn): assert change.number is None # note that in a read-uncommitted database like SQLite this # transaction does not buy atomicitiy - other database users may # still come across a change without its links, files, properties, # etc. That's OK, since we don't announce the change until it's # all in the database, but beware. transaction = conn.begin() ins = self.db.model.changes.insert() r = conn.execute( ins, dict(author=change.who, comments=change.comments, is_dir=change.isdir, branch=change.branch, revision=change.revision, revlink=change.revlink, when_timestamp=change.when, category=change.category, repository=change.repository, project=change.project)) change.number = r.inserted_primary_key[0] if change.links: ins = self.db.model.change_links.insert() conn.execute(ins, [ dict(changeid=change.number, link=l) for l in change.links ]) if change.files: ins = self.db.model.change_files.insert() conn.execute(ins, [ dict(changeid=change.number, filename=f) for f in change.files ]) if change.properties: ins = self.db.model.change_properties.insert() conn.execute(ins, [ dict(changeid=change.number, property_name=k, property_value=json.dumps(v)) for k, v, s in change.properties.asList() ]) transaction.commit() return change
def do_ParameterTest( self, expect, klass, expectKind=None, # None=one prop, Exception=exception, dict=many props owner='user', value=None, req=None, expectJson=None, **kwargs): name = kwargs.setdefault('name', 'p1') # construct one if needed if isinstance(klass, type): prop = klass(**kwargs) else: prop = klass self.assertEqual(prop.name, name) self.assertEqual(prop.label, kwargs.get('label', prop.name)) if expectJson is not None: gotJson = json.dumps(prop.getSpec()) if gotJson != expectJson: try: import xerox formated = self.formatJsonForTest(gotJson) print "You may update the test with (copied to clipboard):\n" + formated xerox.copy(formated) input() except ImportError: print "Note: for quick fix, pip install xerox" self.assertEqual(gotJson, expectJson) sched = self.makeScheduler(properties=[prop]) if not req: req = {name: value, 'reason': 'because'} try: bsid, brids = yield sched.force(owner, builderNames=['a'], **req) except Exception, e: if expectKind is not Exception: # an exception is not expected raise if not isinstance(e, expect): # the exception is the wrong kind raise defer.returnValue(None) # success
class ChangeHookResource(resource.Resource): # this is a cheap sort of template thingy contentType = "text/html; charset=utf-8" children = {} def __init__(self, dialects={}): """ The keys of 'dialects' select a modules to load under master/buildbot/status/web/hooks/ The value is passed to the module's getChanges function, providing configuration options to the dialect. """ self.dialects = dialects def getChild(self, name, request): return self def render_GET(self, request): """ Reponds to events and starts the build process different implementations can decide on what methods they will accept """ return self.render_POST(request) def render_POST(self, request): """ Reponds to events and starts the build process different implementations can decide on what methods they will accept :arguments: request the http request object """ try: changes = self.getChanges(request) except ValueError, err: request.setResponseCode(400, err.args[0]) return err.args[0] msg("Payload: " + str(request.args)) if not changes: msg("No changes found") return "no changes found" submitted = self.submitChanges(changes, request) return json.dumps(submitted)
def _txn_addChangeToDatabase(self, t, change): change_obj = rpc.RpcProxy('software_dev.commit') cdict = change.asDict() cleanupDict(cdict) for f in cdict['files']: cleanupDict(f) try: change.number = change_obj.submit_change(cdict) prop_arr = [] for propname, propvalue in change.properties.properties.items(): prop_arr.append((propname, json.dumps(propvalue))) if prop_arr: change_obj.setProperties(change.number, prop_arr) self.notify("add-change", change.number) except Exception, e: log.err("Cannot add change: %s" % e)
def _addSchedulers(self, t, added): for scheduler in added: name = scheduler.name assert name class_name = "%s.%s" % (scheduler.__class__.__module__, scheduler.__class__.__name__) q = self.quoteq(""" SELECT schedulerid, class_name FROM schedulers WHERE name=? AND (class_name=? OR class_name='') """) t.execute(q, (name, class_name)) row = t.fetchone() if row: sid, db_class_name = row if db_class_name == '': # We're updating from an old schema where the class name # wasn't stored. # Update this row's class name and move on q = self.quoteq("""UPDATE schedulers SET class_name=? WHERE schedulerid=?""") t.execute(q, (class_name, sid)) elif db_class_name != class_name: # A different scheduler is being used with this name. # Ignore the old scheduler and create a new one sid = None else: sid = None if sid is None: # create a new row, with the latest changeid (so it won't try # to process all of the old changes) new Schedulers are # supposed to ignore pre-existing Changes q = ("SELECT changeid FROM changes" " ORDER BY changeid DESC LIMIT 1") t.execute(q) max_changeid = _one_or_else(t.fetchall(), 0) state = scheduler.get_initial_state(max_changeid) state_json = json.dumps(state) q = self.quoteq("INSERT INTO schedulers" " (name, class_name, state)" " VALUES (?,?,?)") t.execute(q, (name, class_name, state_json)) sid = t.lastrowid log.msg("scheduler '%s' got id %d" % (scheduler.name, sid)) scheduler.schedulerid = sid
def thd(conn): bp_tbl = self.db.model.build_properties self.checkLength(bp_tbl.c.name, name) self.checkLength(bp_tbl.c.source, source) whereclause = sa.and_(bp_tbl.c.buildid == bid, bp_tbl.c.name == name) q = sa.select([bp_tbl.c.value, bp_tbl.c.source], whereclause=whereclause) prop = conn.execute(q).fetchone() value_js = json.dumps(value) if prop is None: conn.execute( bp_tbl.insert(), dict(buildid=bid, name=name, value=value_js, source=source)) elif (prop.value != value_js) or (prop.source != source): conn.execute(bp_tbl.update(whereclause=whereclause), dict(value=value_js, source=source))
def test_custom_handler(self): s = self.newChangeSource( 'somehost', 'some_choosy_user', handled_events=["change-merged"]) def custom_handler(self, properties, event): event['change']['project'] = "world" return self.addChangeFromEvent(properties, event) # Patches class to not bother with the inheritance s.eventReceived_change_merged = types.MethodType(custom_handler, s) d = s.lineReceived(json.dumps(self.change_merged_event)) @d.addCallback def check(_): self.failUnlessEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] self.failUnlessEqual(c['project'], "world") return d
def buildFinished(self, builderName, build, results): builder = build.getBuilder() if self.builders is not None and builderName not in self.builders: return # ignore this build if self.categories is not None and \ builder.category not in self.categories: return # ignore this build if isinstance(self.command, str): cmd = [self.command] else: cmd = self.command[:] cmd = build.getProperties().render(cmd) cmd.extend([ os.path.join(self.master_status.basedir, builder.basedir), str(build.number)]) self.queuedir.add(json.dumps(cmd))
def createJobfile(jobid, branch, baserev, patch_level, patch_body, repository, project, who, comment, builderNames, properties): # Determine job file version from provided arguments if properties: version = 5 elif comment: version = 4 elif who: version = 3 else: version = 2 job = "" job += ns(str(version)) if version < 5: job += ns(jobid) job += ns(branch) job += ns(str(baserev)) job += ns("%d" % patch_level) job += ns(patch_body) job += ns(repository) job += ns(project) if (version >= 3): job += ns(who) if (version >= 4): job += ns(comment) for bn in builderNames: job += ns(bn) else: job += ns( json.dumps({ 'jobid': jobid, 'branch': branch, 'baserev': str(baserev), 'patch_level': patch_level, 'patch_body': patch_body, 'repository': repository, 'project': project, 'who': who, 'comment': comment, 'builderNames': builderNames, 'properties': properties, })) return job
def thd(conn): submitted_at = _reactor.seconds() transaction = conn.begin() # insert the buildset itself r = conn.execute( self.db.model.buildsets.insert(), dict(sourcestampid=ssid, submitted_at=submitted_at, reason=reason, complete=0, complete_at=None, results=-1, external_idstring=external_idstring)) bsid = r.inserted_primary_key[0] # add any properties if properties: conn.execute(self.db.model.buildset_properties.insert(), [ dict(buildsetid=bsid, property_name=k, property_value=json.dumps([v, s])) for k, (v, s) in properties.iteritems() ]) # and finish with a build request for each builder conn.execute(self.db.model.buildrequests.insert(), [ dict(buildsetid=bsid, buildername=buildername, priority=0, claimed_at=0, claimed_by_name=None, claimed_by_incarnation=None, complete=0, results=-1, submitted_at=submitted_at, complete_at=None) for buildername in builderNames ]) transaction.commit() return bsid
def __init__(self, o, workerdest=None, slavedest=None, # deprecated, use `workerdest` instead **buildstep_kwargs): # Deprecated API support. if slavedest is not None: reportDeprecatedWorkerNameUsage( "'slavedest' keyword argument is deprecated, " "use 'workerdest' instead") assert workerdest is None workerdest = slavedest # Emulate that first two arguments are positional. if workerdest is None: raise TypeError("__init__() takes at least 3 arguments") if 's' in buildstep_kwargs: del buildstep_kwargs['s'] s = json.dumps(o) StringDownload.__init__(self, s=s, workerdest=workerdest, **buildstep_kwargs)
def renderJsonRpc(self, request): jsonRpcReply = {'jsonrpc': "2.0"} def writeError(msg, errcode=399, jsonrpccode=JSONRPC_CODES["internal_error"]): if self.debug: log.msg("JSONRPC error: %s" % (msg, )) request.setResponseCode(errcode) request.setHeader('content-type', JSON_ENCODED) if "error" not in jsonRpcReply: # already filled in by caller jsonRpcReply['error'] = dict(code=jsonrpccode, message=msg) request.write(json.dumps(jsonRpcReply)) with self.handleErrors(writeError): method, id, params = self.decodeJsonRPC2(request) jsonRpcReply['id'] = id yield self.master.www.assertUserAllowed(request, tuple(request.postpath), method, params) userinfos = self.master.www.getUserInfos(request) if 'anonymous' in userinfos and userinfos['anonymous']: owner = "anonymous" else: owner = userinfos['email'] ep, kwargs = self.getEndpoint(request) params['owner'] = owner result = yield ep.control(method, params, kwargs) jsonRpcReply['result'] = result data = json.dumps(jsonRpcReply, default=toJson, sort_keys=True, separators=(',', ':')) request.setHeader('content-type', JSON_ENCODED) if request.method == "HEAD": request.setHeader("content-length", len(data)) request.write('') else: request.write(data)
def processwwwindex(config): master = yield fakemaster.make_master() master_service = WWWService() master_service.setServiceParent(master) if not config.get('index-file'): print( "Path to the index.html file is required with option --index-file or -i" ) defer.returnValue(1) path = config.get('index-file') if not os.path.isfile(path): print("Invalid path to index.html") defer.returnValue(2) main_dir = os.path.dirname(path) for name in master_service.apps.names: if name != 'base': pluginapp = master_service.apps.get(name) try: os.symlink(pluginapp.static_dir, os.path.join(main_dir, name)) except OSError: pass plugins = dict((k, {}) for k in master_service.apps.names if k != "base") fakeconfig = {"user": {"anonymous": True}} fakeconfig['buildbotURL'] = master.config.buildbotURL fakeconfig['title'] = master.config.title fakeconfig['titleURL'] = master.config.titleURL fakeconfig['multiMaster'] = master.config.multiMaster fakeconfig['versions'] = IndexResource.getEnvironmentVersions() fakeconfig['plugins'] = plugins fakeconfig['auth'] = auth.NoAuth().getConfigDict() outputstr = '' with open(path) as indexfile: template = jinja2.Template(indexfile.read()) outputstr = template.render(configjson=json.dumps(fakeconfig), config=fakeconfig) with open(path, 'w') as indexfile: indexfile.write(outputstr) defer.returnValue(0)
def addBuild(self, builderid, buildrequestid, buildslaveid, masterid, state_strings, _reactor=reactor, _race_hook=None): started_at = _reactor.seconds() state_strings_json = json.dumps(state_strings) def thd(conn): tbl = self.db.model.builds # get the highest current number r = conn.execute( sa.select([sa.func.max(tbl.c.number)], whereclause=(tbl.c.builderid == builderid))) number = r.scalar() new_number = 1 if number is None else number + 1 # insert until we are succesful.. while True: if _race_hook: _race_hook(conn) try: r = conn.execute( self.db.model.builds.insert(), dict(number=new_number, builderid=builderid, buildrequestid=buildrequestid, buildslaveid=buildslaveid, masterid=masterid, started_at=started_at, complete_at=None, state_strings_json=state_strings_json)) except (sa.exc.IntegrityError, sa.exc.ProgrammingError): new_number += 1 continue return r.inserted_primary_key[0], new_number return self.db.pool.do(thd)