def testStructured(self): stream = annotator.StructuredAnnotationStream(stream=self.buf, flush_before=None) step = annotator.StructuredAnnotationStep(annotation_stream=stream, stream=self.buf, flush_before=None) stream.step_cursor('one') step.step_started() stream.step_cursor('two') step.step_started() stream.step_cursor('one') step.step_closed() stream.step_cursor('two') step.step_closed() result = [ '@@@STEP_CURSOR one@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR two@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_CLOSED@@@', '@@@STEP_CURSOR two@@@', '@@@STEP_CLOSED@@@', ] self.assertEquals(result, self._getLines())
def testSeedStepSkip(self): steps = ['one', 'two', 'three'] stream = annotator.StructuredAnnotationStream(seed_steps=steps, stream=self.buf) with stream.step('one'): pass with stream.step('three'): pass with stream.step('two'): pass result = [ '@@@SEED_STEP one@@@', '@@@SEED_STEP two@@@', '@@@SEED_STEP three@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_CLOSED@@@', '@@@STEP_CURSOR three@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR three@@@', '@@@STEP_CLOSED@@@', '@@@SEED_STEP two@@@', '@@@STEP_CURSOR two@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR two@@@', '@@@STEP_CLOSED@@@', ] self.assertEquals(result, self._getLines())
def test_bootstrap_annotations(self): sio = StringIO.StringIO() stream = annotator.StructuredAnnotationStream(stream=sio) cfg = ldbs.Config( params=self.base, plat=None, host='example.com', prefix='foo/bar', tags=None, logdog_only=False, service_account_path=None, ) bs = ldbs.BootstrapState(cfg, [], '/foo/bar') bs.annotate(stream) lines = [l for l in sio.getvalue().splitlines() if l] self.assertEqual(lines, [ '@@@SEED_STEP LogDog Bootstrap@@@', '@@@STEP_CURSOR LogDog Bootstrap@@@', '@@@STEP_STARTED@@@', '@@@SET_BUILD_PROPERTY@logdog_project@"alpha"@@@', '@@@SET_BUILD_PROPERTY@logdog_prefix@"foo/bar"@@@', ('@@@SET_BUILD_PROPERTY@logdog_annotation_url@' '"logdog://example.com/alpha/foo/bar/+/recipes/annotations"@@@'), '@@@STEP_CURSOR LogDog Bootstrap@@@', '@@@STEP_CLOSED@@@', ])
def main(): stream = annotator.StructuredAnnotationStream( seed_steps=['gatekeeper_ng', 'blink_gatekeeper']) with stream.step('gatekeeper_ng') as s: master_urls = [ 'http://build.chromium.org/p/chromium', 'http://build.chromium.org/p/chromium.lkgr', 'http://build.chromium.org/p/chromium.perf', 'http://build.chromium.org/p/client.libvpx', ] if run_gatekeeper(master_urls) != 0: s.step_failure() return 2 with stream.step('blink_gatekeeper') as s: status_url = 'https://blink-status.appspot.com' master_urls = ['http://build.chromium.org/p/chromium.webkit'] extra_args = [ '--build-db=blink_build_db.json', '-s', '--status-url=%s' % status_url, '--password-file=.blink_status_password' ] if run_gatekeeper(master_urls, extra_args=extra_args) != 0: s.step_failure() return 2 return 0
def main(argv=None): opts, _ = get_args(argv) stream = annotator.StructuredAnnotationStream(seed_steps=['setup_build']) ret = run_steps(stream, opts.build_properties, opts.factory_properties) return ret.status_code
def RunRecipe(test_data): stream = annotator.StructuredAnnotationStream(stream=open(os.devnull, 'w')) recipe_config_types.ResetTostringFns() # TODO(iannucci): Only pass test_data once. result = annotated_run.run_steps(stream, test_data.properties, test_data.properties, test_data) return expect_tests.Result([s.step for s in result.steps_ran.itervalues()])
def testDupLogs(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) with stream.step('one') as s: lines = ['one', 'two'] s.write_log_lines('mylog', lines) self.assertRaises(ValueError, s.write_log_lines, 'mylog', lines)
def update_scripts(): if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') return False stream = annotator.StructuredAnnotationStream() with stream.step('update_scripts') as s: build_root = os.path.join(SCRIPT_PATH, '..', '..') gclient_name = 'gclient' if sys.platform.startswith('win'): gclient_name += '.bat' gclient_path = os.path.join(build_root, '..', 'depot_tools', gclient_name) gclient_cmd = [gclient_path, 'sync', '--force', '--verbose'] cmd_dict = { 'name': 'update_scripts', 'cmd': gclient_cmd, 'cwd': build_root, } annotator.print_step(cmd_dict, os.environ, stream) if subprocess.call(gclient_cmd, cwd=build_root) != 0: s.step_text('gclient sync failed!') s.step_warnings() os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real # annotated_run. os.environ['PYTHONIOENCODING'] = 'UTF-8' return True
def testNoNesting(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) def dummy_func(): with stream.step('one'): with stream.step('two'): pass self.assertRaises(Exception, dummy_func)
def testStepAnnotationsWrongParams(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) with stream.step('one') as s: with self.assertRaisesRegexp(TypeError, r'1 argument \(2 given\)'): s.step_warnings('bar') with self.assertRaisesRegexp(TypeError, r'2 arguments \(3 given\)'): s.step_summary_text('hello!', 'bar') with self.assertRaisesRegexp(TypeError, r'3 arguments \(1 given\)'): s.step_log_line()
def testException(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) def dummy_func(): with stream.step('one'): raise Exception('oh no!') self.assertRaises(Exception, dummy_func) log_string = '@@@STEP_LOG_LINE@exception' exception = any(line.startswith(log_string) for line in self._getLines()) self.assertTrue(exception)
def shell_main(argv): logging.basicConfig( level=(logging.DEBUG if '--verbose' in argv else logging.INFO)) if update_scripts.update_scripts(): # Re-execute with the updated remote_run.py. return _call([sys.executable] + argv) stream = annotator.StructuredAnnotationStream() with stream.step('remote_run_result'): return main(argv)
def testProtectedStartStop(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) def dummy_func(): with stream.step('one') as s: s.step_started() self.assertRaises(AttributeError, dummy_func) def dummy_func2(): with stream.step('two') as s: s.step_closed() self.assertRaises(AttributeError, dummy_func2)
def testStepAnnotations(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) with stream.step('one') as s: s.step_warnings() s.step_failure() s.step_exception() s.step_clear() s.step_summary_clear() s.step_text('hello') s.step_summary_text('hello!') s.step_log_line('mylog', 'test') s.step_log_end('mylog') s.step_log_line('myperflog', 'perf data') s.step_log_end_perf('myperflog', 'dashboardname') s.step_link('cool_link', 'https://cool.example.com/beano_gnarly') s.write_log_lines('full_log', ['line one', 'line two']) s.write_log_lines('full_perf_log', ['perf line one', 'perf line two'], perf='full_perf') result = [ '@@@SEED_STEP one@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_STARTED@@@', '@@@STEP_WARNINGS@@@', '@@@STEP_FAILURE@@@', '@@@STEP_EXCEPTION@@@', '@@@STEP_CLEAR@@@', '@@@STEP_SUMMARY_CLEAR@@@', '@@@STEP_TEXT@hello@@@', '@@@STEP_SUMMARY_TEXT@hello!@@@', '@@@STEP_LOG_LINE@mylog@test@@@', '@@@STEP_LOG_END@mylog@@@', '@@@STEP_LOG_LINE@myperflog@perf data@@@', '@@@STEP_LOG_END_PERF@myperflog@dashboardname@@@', '@@@STEP_LINK@cool_link@https://cool.example.com/beano_gnarly@@@', '@@@STEP_LOG_LINE@full_log@line one@@@', '@@@STEP_LOG_LINE@full_log@line two@@@', '@@@STEP_LOG_END@full_log@@@', '@@@STEP_LOG_LINE@full_perf_log@perf line one@@@', '@@@STEP_LOG_LINE@full_perf_log@perf line two@@@', '@@@STEP_LOG_END_PERF@full_perf_log@full_perf@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_CLOSED@@@', ] self.assertEquals(result, self._getLines())
def setUp(self): logging.basicConfig(level=logging.ERROR + 1) self._orig_env = os.environ.copy() self.maxDiff = None self._patchers = [] map(self._patch, ( mock.patch('slave.annotated_run._run_command'), mock.patch('slave.annotated_run._build_dir'), mock.patch('slave.annotated_run._builder_dir'), mock.patch('slave.annotated_run._get_engine_flags'), mock.patch('os.path.exists'), )) # Mock build and builder directories. annotated_run._build_dir.return_value = '/home/user/builder/build' annotated_run._builder_dir.return_value = '/home/user/builder' self.rt = robust_tempdir.RobustTempdir(prefix='annotated_run_test') self.stream_output = StringIO.StringIO() self.stream = annotator.StructuredAnnotationStream( stream=self.stream_output) self.basedir = self.rt.tempdir() self.tdir = self.rt.tempdir() self.opts = MockOptions(dry_run=False, logdog_disable=False) self.properties = { 'slavename': 'bot42-m1', 'recipe': 'example/recipe', 'mastername': 'tryserver.chromium.linux', 'buildername': 'builder', } self.rpy_path = os.path.join(env.Build, 'scripts', 'slave', 'recipes.py') self.recipe_args = [ sys.executable, '-u', self.rpy_path, '--verbose', 'run', '--workdir=/home/user/builder/build', '--properties-file=%s' % (self._tp('recipe_properties.json'), ), '--output-result-json', self._tp('recipe_result.json'), 'example/recipe' ] # Use public recipes.py path. os.path.exists.return_value = False # Swap out testing _ENGINE_FLAGS. annotated_run._get_engine_flags.return_value = {}
def UpdateScripts(): if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') return False stream = annotator.StructuredAnnotationStream( seed_steps=['update_scripts']) with stream.step('update_scripts') as s: build_root = os.path.join(SCRIPT_PATH, '..', '..') gclient_name = 'gclient' if sys.platform.startswith('win'): gclient_name += '.bat' gclient_path = os.path.join(build_root, '..', 'depot_tools', gclient_name) if subprocess.call([gclient_path, 'sync', '--force'], cwd=build_root) != 0: s.step_text('gclient sync failed!') s.step_warnings() os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' return True
def shell_main(argv): logging.basicConfig( level=(logging.DEBUG if '--verbose' in argv else logging.INFO)) if update_scripts.update_scripts(): # Re-execute with the updated remote_run.py. return _call([sys.executable] + argv) stream = annotator.StructuredAnnotationStream() exc_info = None try: return main(argv, stream) except Exception: exc_info = sys.exc_info() # Report on the "remote_run" execution. If an exception (infra failure) # occurred, raise it so that the build and the step turn purple. with stream.step('remote_run_result'): if exc_info is not None: raise exc_info[0], exc_info[1], exc_info[2]
def testBasicUsage(self): stream = annotator.StructuredAnnotationStream(stream=self.buf) with stream.step('one') as _: pass with stream.step('two') as _: pass result = [ '@@@SEED_STEP one@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR one@@@', '@@@STEP_CLOSED@@@', '@@@SEED_STEP two@@@', '@@@STEP_CURSOR two@@@', '@@@STEP_STARTED@@@', '@@@STEP_CURSOR two@@@', '@@@STEP_CLOSED@@@', ] self.assertEquals(result, self._getLines())
def update_scripts(): if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') return False stream = annotator.StructuredAnnotationStream() git_cmd = 'git.bat' if os.name == "nt" else 'git' with stream.step('update_scripts') as s: fetch_cmd = [git_cmd, 'fetch', '--all'] reset_cmd = [git_cmd, 'reset', '--hard', 'origin/emu-master-dev'] if subprocess.call(fetch_cmd) != 0 or subprocess.call(reset_cmd) != 0: s.step_text('git update source failed!') s.step_warnings() s.step_text('git pull') os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real # annotated_run. os.environ['PYTHONIOENCODING'] = 'UTF-8' return True
def main(): master_urls = ['http://build.chromium.org/p/chromium'] json = os.path.join(SLAVE_DIR, 'gatekeeper.json') args = ['-v', '--no-email-app', '--json=%s' % json] script = os.path.join(SLAVE_DIR, 'gatekeeper_ng.py') cmd = [sys.executable, script] cmd.extend(args) cmd.extend(master_urls) stream = annotator.StructuredAnnotationStream(seed_steps=['gatekeeper_ng']) with stream.step('gatekeeper_ng') as s: env = {} env['PYTHONPATH'] = os.pathsep.join(sys.path) result = chromium_utils.RunCommand(cmd, env=env) if result != 0: s.step_failure() return 2 return 0
def setUp(self): logging.basicConfig(level=logging.ERROR + 1) self._orig_env = os.environ.copy() os.environ = {'FOO': 'BAR', 'PYTHONPATH': '/pants'} self.maxDiff = None map(lambda x: x.start(), ( mock.patch('slave.remote_run._call'), mock.patch('slave.remote_run._get_is_canary'), mock.patch('slave.remote_run._get_is_kitchen'), mock.patch( 'slave.cipd_bootstrap_v2.high_level_ensure_cipd_client'), mock.patch('slave.monitoring_utils.write_build_monitoring_event'), mock.patch('os.path.exists'), mock.patch('common.chromium_utils.RemoveDirectory'), mock.patch('common.chromium_utils.MoveFile'), mock.patch('common.chromium_utils.GetActiveSubdir'), )) self.addCleanup(mock.patch.stopall) self.rt = robust_tempdir.RobustTempdir(prefix='remote_run_test') self.addCleanup(self.rt.close) self.stream_output = StringIO.StringIO() self.stream = annotator.StructuredAnnotationStream( stream=self.stream_output) self.basedir = self.rt.tempdir() self.buildbot_build_dir = self.rt.tempdir() self.tempdir = self.rt.tempdir() self.build_data_dir = self.rt.tempdir() self.opts = MockOptions( dry_run=False, logdog_disable=False, factory_properties={}, build_properties={ 'slavename': 'bot42-m1', 'mastername': 'tryserver.chromium.linux', 'buildername': 'builder', }, kitchen=None, repository='https://example.com/repo.git', revision=None, use_gitiles=True, recipe='example/recipe', logdog_debug_out_file=None, canary=None, ) self.rpy_path = os.path.join(env.Build, 'scripts', 'slave', 'recipes.py') self.recipe_remote_args = [ sys.executable, self._bp('.remote_run_cipd', 'recipes.py'), '--operational-args-path', self._tp('engine_flags.json'), '--verbose', 'remote', '--repository', self.opts.repository, '--workdir', self._tp('rw'), '--use-gitiles', ] self.kitchen_args = [ self._bp('.remote_run_cipd', 'kitchen'), '-log-level', 'info', 'cook', '-mode', 'buildbot', '-output-result-json', self._tp('kitchen_result.json'), '-properties-file', self._tp('remote_run_properties.json'), '-recipe', self.opts.recipe, '-repository', self.opts.repository, '-cache-dir', os.path.join(remote_run.BUILDBOT_ROOT, 'c'), '-temp-dir', self._tp('t'), '-checkout-dir', self._tp('rw'), '-workdir', self._tp('w'), '-python-path', os.path.join(remote_run.BUILD_ROOT, 'scripts'), '-python-path', os.path.join(remote_run.BUILD_ROOT, 'site_config'), ] self.recipe_args = [ '--operational-args-path', self._tp('engine_flags.json'), '--verbose', 'run', '--properties-file', self._tp('remote_run_properties.json'), '--workdir', self._tp('w'), '--output-result-json', self._tp('recipe_result.json'), self.opts.recipe, ] # No active subdir by default. chromium_utils.GetActiveSubdir.return_value = None # Easily-configurable CIPD pins. self.is_canary = False self.is_kitchen = False remote_run._get_is_canary.side_effect = lambda *_a: self.is_canary remote_run._get_is_kitchen.side_effect = lambda *_a: self.is_kitchen # Written via '_write_recipe_result'. self.recipe_result = None # Written via '_write_kitchen_result'. self.kitchen_result = None
def update_scripts(): if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') return False # For testing, we don't actually want to run "gclient sync" against its native # root. However, we don't want to mock/disable it either, since we want to # exercise this code path. build_dir = os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS_TEST_BUILD_DIR', env.Build) stream = annotator.StructuredAnnotationStream() with stream.step('update_scripts') as s: if ensure_managed(os.path.join(build_dir, os.pardir, '.gclient')): s.step_text('Top-level gclient solution was unmanaged, ' 'changed to managed') # Get our "gclient" file. We will use the "gclient" relative to this # script's checkout, regardless of "build_dir". gclient_name = 'gclient' if sys.platform.startswith('win'): gclient_name += '.bat' gclient_path = os.path.join(env.Build, os.pardir, 'depot_tools', gclient_name) gclient_cmd = [ gclient_path, 'sync', # these two need to both be here to actually get # `git checkout --force` to happen. '--force', '--delete_unversioned_trees', '--break_repo_locks', '--verbose', '--jobs=2' ] try: fd, output_json = tempfile.mkstemp() os.close(fd) gclient_cmd += ['--output-json', output_json] except Exception: # Super paranoia try block. output_json = None repos_to_old_hashes = {} try: for repo in sorted(IMPORTANT_REPOS): path = os.path.join(env.Build, os.pardir, repo) rv, out = get_repo_head(path) if rv: break proj_sha = out.strip() repos_to_old_hashes[repo] = proj_sha except Exception: traceback.print_exc() cmd_dict = { 'name': 'update_scripts', 'cmd': gclient_cmd, 'cwd': build_dir, } annotator.print_step(cmd_dict, os.environ, stream) rv, _ = _run_command(gclient_cmd, cwd=build_dir) if rv != 0: s.step_text('gclient sync failed!') s.step_exception() elif output_json: try: with open(output_json, 'r') as f: gclient_json = json.load(f) for line in json.dumps(gclient_json, sort_keys=True, indent=4, separators=(',', ': ')).splitlines(): s.step_log_line('gclient_json', line) s.step_log_end('gclient_json') if repos_to_old_hashes: add_revision_links(s, repos_to_old_hashes) else: build_checkout = gclient_json['solutions'].get('build/') if build_checkout: s.step_text('%(scm)s - %(revision)s' % build_checkout) s.set_build_property('build_scm', json.dumps(build_checkout['scm'])) s.set_build_property( 'build_revision', json.dumps(build_checkout['revision'])) except Exception as e: s.step_text('Unable to process gclient JSON %s' % repr(e)) s.step_exception() finally: try: os.remove(output_json) except Exception as e: LOGGER.warning("LEAKED: %s", output_json, exc_info=True) else: s.step_text('Unable to get SCM data') s.step_exception() os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real # annotated_run. os.environ['PYTHONIOENCODING'] = 'UTF-8' return True
def get_recipe_properties(workdir, build_properties, use_factory_properties_from_disk): """Constructs the recipe's properties from buildbot's properties. This retrieves the current factory properties from the master_config in the slave's checkout (no factory properties are handed to us from the master), and merges in the build properties. Using the values from the checkout allows us to do things like change the recipe and other factory properties for a builder without needing a master restart. As the build properties doesn't include the factory properties, we would: 1. Load factory properties from checkout on the slave. 2. Override the factory properties with the build properties. 3. Set the factory-only properties as build properties using annotation so that they will show up on the build page. """ if not use_factory_properties_from_disk: return build_properties stream = annotator.StructuredAnnotationStream() with stream.step('setup_properties') as s: factory_properties = {} mastername = build_properties.get('mastername') buildername = build_properties.get('buildername') if mastername and buildername: # Load factory properties from tip-of-tree checkout on the slave builder. factory_properties = get_factory_properties_from_disk( workdir, mastername, buildername) # Check conflicts between factory properties and build properties. conflicting_properties = {} for name, value in factory_properties.items(): if not build_properties.has_key( name) or build_properties[name] == value: continue conflicting_properties[name] = (value, build_properties[name]) if conflicting_properties: s.step_text( '<br/>detected %d conflict[s] between factory and build properties' % len(conflicting_properties)) conflicts = [ ' "%s": factory: "%s", build: "%s"' % (name, '<unset>' if (fv is None) else fv, '<unset>' if (bv is None) else bv) for name, (fv, bv) in conflicting_properties.items() ] LOGGER.warning('Conflicting factory and build properties:\n%s', '\n'.join(conflicts)) LOGGER.warning("Will use the values from build properties.") # Figure out the factory-only properties and set them as build properties so # that they will show up on the build page. for name, value in factory_properties.items(): if not build_properties.has_key(name): s.set_build_property(name, json.dumps(value)) # Build properties override factory properties. properties = factory_properties.copy() properties.update(build_properties) # Unhack buildbot-hacked blamelist (iannucci). if ('blamelist_real' in properties and 'blamelist' in properties): properties['blamelist'] = properties['blamelist_real'] del properties['blamelist_real'] return properties
def update_scripts(): if os.environ.get('RUN_SLAVE_UPDATED_SCRIPTS'): os.environ.pop('RUN_SLAVE_UPDATED_SCRIPTS') return False stream = annotator.StructuredAnnotationStream() with stream.step('update_scripts') as s: gclient_name = 'gclient' if sys.platform.startswith('win'): gclient_name += '.bat' gclient_path = os.path.join(env.Build, os.pardir, 'depot_tools', gclient_name) gclient_cmd = [gclient_path, 'sync', '--force', '--verbose', '--jobs=2', '--break_repo_locks'] try: fd, output_json = tempfile.mkstemp() os.close(fd) gclient_cmd += ['--output-json', output_json] except Exception: # Super paranoia try block. output_json = None cmd_dict = { 'name': 'update_scripts', 'cmd': gclient_cmd, 'cwd': env.Build, } annotator.print_step(cmd_dict, os.environ, stream) rv, _ = _run_command(gclient_cmd, cwd=env.Build) if rv != 0: s.step_text('gclient sync failed!') s.step_exception() elif output_json: try: with open(output_json, 'r') as f: gclient_json = json.load(f) for line in json.dumps( gclient_json, sort_keys=True, indent=4, separators=(',', ': ')).splitlines(): s.step_log_line('gclient_json', line) s.step_log_end('gclient_json') build_checkout = gclient_json['solutions'].get('build/') if build_checkout: s.step_text('%(scm)s - %(revision)s' % build_checkout) s.set_build_property('build_scm', json.dumps(build_checkout['scm'])) s.set_build_property('build_revision', json.dumps(build_checkout['revision'])) except Exception as e: s.step_text('Unable to process gclient JSON %s' % repr(e)) s.step_exception() finally: try: os.remove(output_json) except Exception as e: LOGGER.warning("LEAKED: %s", output_json, exc_info=True) else: s.step_text('Unable to get SCM data') s.step_exception() os.environ['RUN_SLAVE_UPDATED_SCRIPTS'] = '1' # After running update scripts, set PYTHONIOENCODING=UTF-8 for the real # annotated_run. os.environ['PYTHONIOENCODING'] = 'UTF-8' return True