def test_fetch_chrome(self): fake_version = '43.0.2357.134' with temporary_directory() as tmp: cache_dir = os.path.join(tmp, 'cache_dir') fake_zip = os.path.join(tmp, 'fake.zip') with zipfile.ZipFile(fake_zip, 'w') as zf: fake_chrome = os.path.join(tmp, 'fake_chrome') with open(fake_chrome, 'w') as f: f.write('foobar') zf.write(fake_chrome, 'chrome-precise64/chrome') with open(fake_zip, 'rb') as f: with mock.patch.object( requests, 'get', return_value=FakeRequest(f.read())) as mock_get: chrome_path, version = chrome.fetch_chrome( cache_dir, fake_version, 'linux2') mock_get.assert_called_once_with( 'https://storage.googleapis.com/chrome-unsigned/desktop-W15K3Y/' '%s/precise64/chrome-precise64.zip' % fake_version) self.assertEquals( chrome_path, os.path.join( cache_dir, 'chrome-linux-%s' % fake_version, 'chrome-precise64', 'chrome')) self.assertEquals(version, fake_version) self.assertTrue(os.path.isdir(cache_dir)) self.assertTrue(os.path.isfile(chrome_path)) with open(chrome_path, 'r') as f: self.assertEquals(f.read(), 'foobar')
def testInvalidState(self): with self.assertRaises(desired_state_parser.InvalidDesiredMasterState): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') desired_state_parser.write_master_state( { 'master_states': { 'master.chromium.fyi': [ { 'desired_state': 'running', 'transition_time_utc': 'toast' }, { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_4000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000 }, ] }, 'master_params': {}, 'version': desired_state_parser.VERSION, }, filename)
def luci_ctx(self, body): luci_ctx._reset() with utils.temporary_directory() as tempdir: ctx_file = os.path.join(tempdir, 'ctx.json') with open(ctx_file, 'w') as f: json.dump(body, f) yield {'LUCI_CONTEXT': ctx_file}
def test_run2(self): with mock.patch.object( git_cache_updater, 'get_project_list', return_value=['a', 'b']) as _: with utils.temporary_directory() as tempdir: with mock.patch.object(subprocess, 'call') as sub_m: git_cache_updater.run('aproj', tempdir) self.assertEquals(sub_m.call_count, 2)
def test_get_cookies(self): with utils.temporary_directory() as tempdir: fake_cookie_file = os.path.join(tempdir, 'gitcookie') with open(fake_cookie_file, 'wb') as f: f.write(fake_cookie) with mock.patch.object(subprocess, 'check_output', return_value=fake_cookie_file) as _: cookies = list(git_cache_updater.get_cookies()) self.assertEquals(1, len(cookies)) cookie = cookies[0] self.assertEquals('o', cookie.name) self.assertEquals('foo=barbaz', cookie.value) self.assertIsNone(cookie.port) self.assertEquals(False, cookie.port_specified) self.assertEquals('.googlesource.com', cookie.domain) self.assertEquals(True, cookie.domain_specified) self.assertEquals(True, cookie.domain_initial_dot) self.assertEquals('/', cookie.path) self.assertEquals(False, cookie.path_specified) self.assertEquals(True, cookie.secure) self.assertEquals(2147483647, cookie.expires) self.assertEquals(False, cookie.discard) self.assertIsNone(cookie.comment) self.assertIsNone(cookie.comment_url) self.assertEquals(False, cookie.rfc2109)
def test_run(self): with mock.patch.object( git_cache_updater, 'get_project_list', return_value=['a', 'b']) as _: with utils.temporary_directory() as tempdir: workdir = os.path.join(tempdir, 'workdir') with mock.patch.object(subprocess, 'call') as sub_m: git_cache_updater.run('aproj', workdir) self.assertTrue(os.path.isdir(workdir)) self.assertEquals(sub_m.call_count, 2)
def test_get_no_cookies(self): with utils.temporary_directory() as tempdir: fake_cookie_file = os.path.join(tempdir, 'gitcookie') with open(fake_cookie_file, 'wb') as f: f.write(fake_cookie) with mock.patch.object(subprocess, 'check_output', return_value=None) as _: return git_cache_updater.get_cookies()
def test_garbage_collect(self): with temporary_directory() as tmp: test_dir = os.path.join(tmp, 'somedir') os.mkdir(test_dir) fetch_browser.garbage_collect(tmp) self.assertTrue(os.path.isdir(test_dir)) two_months_ago = time.time() - (60 * 60 * 24 * 30 * 2) os.utime(test_dir, (two_months_ago, two_months_ago)) fetch_browser.garbage_collect(tmp) self.assertFalse(os.path.isdir(test_dir)) fetch_browser.garbage_collect(tmp)
def testNothing(self): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') desired_state_parser.write_master_state({}, filename) with open(filename) as f: parsed_data = json.load(f) self.assertEqual(parsed_data, { 'master_states': {}, 'master_params': {}, 'version': desired_state_parser.PREV_VERSION, })
def testNothing(self): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') desired_state_parser.write_master_state({}, filename) with open(filename) as f: parsed_data = json.load(f) self.assertEqual( parsed_data, { 'master_states': {}, 'master_params': {}, 'version': desired_state_parser.VERSION, })
def testNothingInPast(self): with self.assertRaises(desired_state_parser.InvalidDesiredMasterState): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') desired_state_parser.write_master_state({ 'master_states': { 'master.chromium.fyi': [ {'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000}, {'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000}, ]}, 'master_params': {}, 'version': desired_state_parser.PREV_VERSION, }, filename)
def test_get_display(self, choice, exists, popen): choice.return_value = 102 exists.side_effect = [False, True] fake_popen = mock.MagicMock() fake_popen.poll.return_value = None fake_popen.pid = 1234 popen.return_value = fake_popen with utils.temporary_directory() as tempdir: tempfile = os.path.join(tempdir, 'pidfile') real_tempfile = '%s102' % tempfile with open(real_tempfile, 'w') as f: f.write('1234') testjs.LOCK_LOCATION = '%s%%d' % tempfile with testjs.get_display() as display: self.assertEquals(display, ':102')
def testPruneOldEntriesFilter(self): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') master_state = [ { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_0500 }, { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_1000 }, { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_4000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000 }, ] # No pruning should happen here. desired_state_parser.write_master_state( { 'master_states': { 'master.leave.as.is': master_state[:], 'master.pruned': master_state[:], }, 'master_params': {}, 'version': desired_state_parser.VERSION, }, filename, prune_only_masters=set(['master.pruned'])) with open(filename) as f: parsed_data = json.load(f) self.assertEqual( parsed_data['master_states']['master.leave.as.is'], master_state) self.assertEqual(parsed_data['master_states']['master.pruned'], master_state[1:])
def testPruneOldEntries(self): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') desired_state_parser.write_master_state({ 'master_states': { 'master.chromium.fyi': [ {'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_0500}, {'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_1000}, {'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_4000}, {'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000}, {'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000}, ]}, 'master_params': {}, 'version': desired_state_parser.PREV_VERSION, }, filename) with open(filename) as f: parsed_data = json.load(f) self.assertEqual(parsed_data, { 'master_states': { 'master.chromium.fyi': [ {'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_1000}, {'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_4000}, {'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000}, {'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000}, ]}, 'master_params': {}, 'version': desired_state_parser.PREV_VERSION, }, filename)
def testPruneOldEntries(self): with temporary_directory() as dirname: filename = os.path.join(dirname, 'desired_state.json') desired_state_parser.write_master_state( { 'master_states': { 'master.chromium.fyi': [ { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_0500 }, { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_1000 }, { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_4000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000 }, ] }, 'master_params': {}, 'version': desired_state_parser.VERSION, }, filename) with open(filename) as f: parsed_data = json.load(f) self.assertEqual( parsed_data, { 'master_states': { 'master.chromium.fyi': [ { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_1000 }, { 'desired_state': 'running', 'transition_time_utc': UNIX_TIMESTAMP_4000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_6000 }, { 'desired_state': 'offline', 'transition_time_utc': UNIX_TIMESTAMP_7000 }, ] }, 'master_params': {}, 'version': desired_state_parser.VERSION, }, filename)
def read(self, ctx_body, section): with utils.temporary_directory() as tempdir: ctx_file = os.path.join(tempdir, 'ctx.json') with open(ctx_file, 'w') as f: f.write(ctx_body) return luci_ctx.read(section, environ={'LUCI_CONTEXT': ctx_file})