def test_setState_badjson(self): d = self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), ]) d.addCallback(lambda _: self.db.state.setState(10, 'x', self) ) # self is not JSON-able.. return self.assertFailure(d, TypeError)
def test_getState_badjson(self): d = self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), fakedb.ObjectState(objectid=10, name='x', value_json='ff[1'), ]) d.addCallback(lambda _: self.db.state.getState(10, 'x')) return self.assertFailure(d, TypeError)
def test_atomicCreateState_nojsonable(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) d = self.db.state.atomicCreateState(10, 'x', object) yield self.assertFailure(d, TypeError)
def test_savedProperties(self): sched = self.makeScheduler( name='test', builderNames=['test'], minute=[5], codebases={'cb': { 'repository': 'annoying' }}) value_json = \ '[ [ {"codebase": "cb", "project": "p", "repository": "r", ' \ '"branch": "br", "revision": "myrev"} ], ' \ '{"testprop": ["test", "TEST"]}, null, null ]' self.db.insertTestData([ fakedb.Object(id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), fakedb.ObjectState(objectid=self.SCHEDULERID, name='lastTrigger', value_json=value_json), ]) sched.activate() self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded(properties={'testprop': ('test', 'TEST')}, sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ])
def test_saveTrigger_noTrigger(self): sched = self.makeScheduler( name='test', builderNames=['test'], minute=[5], codebases={'cb': { 'repository': 'annoying' }}) self.db.insertTestData([ fakedb.Object(id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() _, d = sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], set_props=None) self.clock.advance(60 * 60) # Run for 1h @d.addCallback def cb(_): self.db.state.assertState(self.SCHEDULERID, lastTrigger=None) return d
def test_triggerProperties(self): sched = self.makeScheduler(name='test', builderNames=['test'], minute=[5], codebases={'cb': {'repository': 'annoying'}}) self.db.insertTestData([ fakedb.Object( id=self.SCHEDULERID, name='test', class_name='NightlyTriggerable'), ]) sched.activate() sched.trigger(False, [ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], properties.Properties(testprop='test')) self.db.state.assertState(self.SCHEDULERID, lastTrigger=[[ dict(codebase='cb', revision='myrev', branch='br', project='p', repository='r'), ], {'testprop': ['test', 'TEST']}, None, None]) self.clock.advance(60 * 60) # Run for 1h self.assertBuildsetAdded( properties=dict(testprop=('test', 'TEST')), sourcestamps=[ dict(codebase='cb', branch='br', project='p', repository='r', revision='myrev'), ])
def test_gotChange_createAbsoluteSourceStamps_older_change(self): # check codebase is not stored if it's older than the most recent sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='SingleBranchScheduler'), fakedb.ObjectState(objectid=self.OBJECTID, name='lastCodebases', value_json='{"a": {"branch": "master", "repository": "A", ' '"revision": "5555:def", "lastChange": 20}}')]) yield sched.activate() # this change is not recorded, since it's older than # change 20 yield sched.gotChange(self.mkch(codebase='a', revision='1234:abc', repository='A', number=10), True) self.db.state.assertState(self.OBJECTID, lastCodebases={ 'a': dict(branch='master', repository='A', revision='5555:def', lastChange=20)}) yield sched.deactivate()
def test_getState_present(self): yield self.insertTestData([ fakedb.Object(id=10, name='x', class_name='y'), fakedb.ObjectState(objectid=10, name='x', value_json='[1,2]'), ]) val = yield self.db.state.getState(10, 'x') self.assertEqual(val, [1, 2])
def test_atomicCreateState(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) res = yield self.db.state.atomicCreateState(10, 'x', lambda: [1, 2]) self.assertEqual(res, [1, 2]) res = yield self.db.state.getState(10, 'x') self.assertEqual(res, [1, 2])
def test_setState(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual([(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '[1, 2]')]) yield self.db.pool.do(thd)
def test_atomicCreateState_conflict(self): yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) def hook(conn): conn.execute(self.db.model.object_state.insert(), objectid=10, name='x', value_json='22') self.db.state._test_timing_hook = hook res = yield self.db.state.atomicCreateState(10, 'x', lambda: [1, 2]) self.assertEqual(res, 22) res = yield self.db.state.getState(10, 'x') self.assertEqual(res, 22)
def test_gotChange_createAbsoluteSourceStamps_saveCodebase(self): # check codebase is stored after receiving change. sched = self.makeFullScheduler(name='test', builderNames=['test'], treeStableTimer=None, branch='master', codebases=self.codebases, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='SingleBranchScheduler') ]) yield sched.activate() yield sched.gotChange( self.mkch(codebase='a', revision='1234:abc', repository='A', number=0), True) yield sched.gotChange( self.mkch(codebase='b', revision='2345:bcd', repository='B', number=1), True) self.db.state.assertState(self.OBJECTID, lastCodebases={ 'a': dict(branch='master', repository='A', revision='1234:abc', lastChange=0), 'b': dict(branch='master', repository='B', revision='2345:bcd', lastChange=1) }) yield sched.deactivate()
def test_getUpstreamBuildsets_missing(self): sched = self.makeScheduler() # insert some state, with more bsids than exist self.db.insertTestData([ fakedb.SourceStamp(id=1234), fakedb.Buildset(id=11), fakedb.Buildset(id=13), fakedb.BuildsetSourceStamp(buildsetid=13, sourcestampid=1234), fakedb.Object(id=OBJECTID), fakedb.ObjectState(objectid=OBJECTID, name='upstream_bsids', value_json='[11,12,13]'), ]) # check return value (missing 12) self.assertEqual((yield sched._getUpstreamBuildsets()), [(11, [], False, -1), (13, [1234], False, -1)]) # and check that it wrote the correct value back to the state self.db.state.assertState(OBJECTID, upstream_bsids=[11, 13])
def test_setState_conflict(self): def hook(conn): conn.execute(self.db.model.object_state.insert(), objectid=10, name='x', value_json='22') self.db.state._test_timing_hook = hook yield self.insertTestData([ fakedb.Object(id=10, name='-', class_name='-'), ]) yield self.db.state.setState(10, 'x', [1, 2]) def thd(conn): q = self.db.model.object_state.select() rows = conn.execute(q).fetchall() self.assertEqual([(r.objectid, r.name, r.value_json) for r in rows], [(10, 'x', '22')]) yield self.db.pool.do(thd)
def test_iterations_onlyIfChanged_createAbsoluteSourceStamps_oneChanged_loadOther(self): # Test createAbsoluteSourceStamps=True when only one codebase has changed, # but the other was previously changed fII = mock.Mock(name='fII') self.makeScheduler(name='test', builderNames=['test'], branch=None, minute=[5, 25, 45], onlyIfChanged=True, fileIsImportant=fII, codebases={'a': {'repository': "", 'branch': 'master'}, 'b': {'repository': "", 'branch': 'master'}}, createAbsoluteSourceStamps=True) self.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='test', class_name='Nightly'), fakedb.ObjectState(objectid=self.OBJECTID, name='lastCodebases', value_json='{"b": {"branch": "master", "repository": "B", "revision": "1234:abc", "lastChange": 2}}')]) # noqa pylint: disable=line-too-long change = self.makeFakeChange(number=3, codebase='a', revision='2345:bcd') yield self.do_test_iterations_onlyIfChanged_test(fII, (120, change, True)) self.db.state.assertStateByClass('test', 'Nightly', last_build=1500 + self.localtime_offset) # addBuildsetForChanges calls getCodebase, so this isn't too # interesting self.assertEqual(self.addBuildsetCallTimes, [300]) self.assertEqual(self.addBuildsetCalls, [ ('addBuildsetForChanges', { 'builderNames': None, 'changeids': [3], 'external_idstring': None, 'properties': None, 'reason': "The Nightly scheduler named 'test' triggered this build", 'waited_for': False})]) self.db.state.assertStateByClass('test', 'Nightly', lastCodebases={ 'a': dict(revision='2345:bcd', branch=None, repository='', lastChange=3), 'b': dict(revision='1234:abc', branch="master", repository='B', lastChange=2)}) yield self.sched.deactivate()
def attachScheduler(self, scheduler, objectid, schedulerid, overrideBuildsetMethods=False, createBuilderDB=False): """Set up a scheduler with a fake master and db; sets self.sched, and sets the master's basedir to the absolute path of 'basedir' in the test directory. If C{overrideBuildsetMethods} is true, then all of the addBuildsetForXxx methods are overridden to simply append the method name and arguments to self.addBuildsetCalls. These overridden methods return buildsets starting with 500 and buildrequest IDs starting with 100. For C{addBuildsetForSourceStamp}, this also overrides DB API methods C{addSourceStamp} and C{addSourceStampSet}, and uses that information to generate C{addBuildsetForSourceStamp} results. @returns: scheduler """ scheduler.objectid = objectid # set up a fake master db = self.db = self.master.db self.mq = self.master.mq scheduler.setServiceParent(self.master) rows = [ fakedb.Object(id=objectid, name=scheduler.name, class_name='SomeScheduler'), fakedb.Scheduler(id=schedulerid, name=scheduler.name), ] if createBuilderDB is True: rows.extend([ fakedb.Builder(name=bname) for bname in scheduler.builderNames ]) db.insertTestData(rows) if overrideBuildsetMethods: for method in ('addBuildsetForSourceStampsWithDefaults', 'addBuildsetForChanges', 'addBuildsetForSourceStamps'): actual = getattr(scheduler, method) fake = getattr(self, 'fake_{}'.format(method)) self.assertArgSpecMatches(actual, fake) setattr(scheduler, method, fake) self.addBuildsetCalls = [] self._bsidGenerator = iter(range(500, 999)) self._bridGenerator = iter(range(100, 999)) # temporarily override the sourcestamp and sourcestampset methods self.addedSourceStamps = [] self.addedSourceStampSets = [] def fake_addSourceStamp(**kwargs): self.assertEqual(kwargs['sourcestampsetid'], 400 + len(self.addedSourceStampSets) - 1) self.addedSourceStamps.append(kwargs) return defer.succeed(300 + len(self.addedSourceStamps) - 1) self.db.sourcestamps.addSourceStamp = fake_addSourceStamp def fake_addSourceStampSet(): self.addedSourceStampSets.append([]) return defer.succeed(400 + len(self.addedSourceStampSets) - 1) self.db.sourcestamps.addSourceStampSet = fake_addSourceStampSet # patch methods to detect a failure to upcall the activate and # deactivate methods .. unless we're testing BaseScheduler def patch(meth): oldMethod = getattr(scheduler, meth) @defer.inlineCallbacks def newMethod(): self._parentMethodCalled = False rv = yield oldMethod() self.assertTrue(self._parentMethodCalled, "'{}' did not call its parent".format(meth)) return rv setattr(scheduler, meth, newMethod) oldParent = getattr(base.BaseScheduler, meth) def newParent(self_): self._parentMethodCalled = True return oldParent(self_) self.patch(base.BaseScheduler, meth, newParent) if scheduler.__class__.activate != base.BaseScheduler.activate: patch('activate') if scheduler.__class__.deactivate != base.BaseScheduler.deactivate: patch('deactivate') self.sched = scheduler return scheduler
def test_lineReceived_patchset_created(self): self.master.db.insertTestData([ fakedb.Object(id=self.OBJECTID, name='GerritEventLogPoller:gerrit', class_name='GerritEventLogPoller')]) yield self.newChangeSource(get_files=True) self.changesource.now = lambda: datetime.datetime.utcfromtimestamp( self.NOW_TIMESTAMP) thirty_days_ago = ( datetime.datetime.utcfromtimestamp(self.NOW_TIMESTAMP) - datetime.timedelta(days=30)) self._http.expect(method='get', ep='/plugins/events-log/events/', params={'t1': thirty_days_ago.strftime("%Y-%m-%d %H:%M:%S")}, content_json=dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="*****@*****.**"), url="http://buildbot.net", subject="fix 1234" ), eventCreatedOn=self.EVENT_TIMESTAMP, patchSet=dict(revision="abcdef", number="12"))) self._http.expect( method='get', ep='/changes/4321/revisions/12/files/', content=self.change_revision_resp, ) yield self.startChangeSource() yield self.changesource.poll() self.assertEqual(len(self.master.data.updates.changesAdded), 1) c = self.master.data.updates.changesAdded[0] expected_change = dict(TestGerritChangeSource.expected_change) expected_change['properties'] = dict(expected_change['properties']) expected_change['properties']['event.source'] = 'GerritEventLogPoller' for k, v in c.items(): if k == 'files': continue self.assertEqual(expected_change[k], v) self.master.db.state.assertState( self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP) self.assertEqual(set(c['files']), {'/COMMIT_MSG', 'file1'}) # do a second poll, it should ask for the next events self._http.expect(method='get', ep='/plugins/events-log/events/', params={'t1': self.EVENT_FORMATTED}, content_json=dict( type="patchset-created", change=dict( branch="br", project="pr", number="4321", owner=dict(name="Dustin", email="*****@*****.**"), url="http://buildbot.net", subject="fix 1234" ), eventCreatedOn=self.EVENT_TIMESTAMP + 1, patchSet=dict(revision="abcdef", number="12"))) self._http.expect( method='get', ep='/changes/4321/revisions/12/files/', content=self.change_revision_resp, ) yield self.changesource.poll() self.master.db.state.assertState( self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP + 1)
def test_getObjectId_existing(self): yield self.insertTestData( [fakedb.Object(id=19, name='someobj', class_name='someclass')]) objectid = yield self.db.state.getObjectId('someobj', 'someclass') self.assertEqual(objectid, 19)