def _get_co(self, read_only): if read_only: return checkout.ReadOnlyCheckout( checkout.SvnCheckout( self.root_dir, self.name, None, None, self.svn_url)) else: return checkout.SvnCheckout( self.root_dir, self.name, self.usr, self.pwd, self.svn_url)
def testProcess(self): co = lambda x: checkout.SvnCheckout( self.root_dir, self.name, None, None, self.svn_url, x) self._test_process(co)
def _internal_simple(path, project_bases, user, root_dir, rietveld_obj): """Generates a PendingManager commit queue for chrome/trunk/tools/build.""" local_checkout = checkout.SvnCheckout( root_dir, os.path.basename(path), user, None, 'svn://svn.chromium.org/chrome/trunk/' + path, [chromium_copyright.process]) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush( 'https://chromium-status.appspot.com/cq', _chromium_status_pwd(root_dir))) verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier( _get_chromium_committers(), [re.escape(user)]), ] verifiers = [ presubmit_check.PresubmitCheckVerifier(context_obj, timeout=900), ] return pending_manager.PendingManager( context_obj, verifiers_no_patch, verifiers)
def main(): parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('-v', '--verbose', action='count', default=0, help='Prints debugging infos') parser.add_option('-i', '--issue', type='int', help='Rietveld issue number') parser.add_option('-p', '--patchset', type='int', help='Rietveld issue\'s patchset number') parser.add_option('-r', '--root_dir', default=os.getcwd(), help='Root directory to apply the patch') parser.add_option('-s', '--server', default='http://codereview.chromium.org', help='Rietveld server') options, args = parser.parse_args() logging.basicConfig( format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s', level=[logging.WARNING, logging.INFO, logging.DEBUG][min(2, options.verbose)]) if args: parser.error('Extra argument(s) "%s" not understood' % ' '.join(args)) if not options.issue: parser.error('Require --issue') obj = rietveld.Rietveld(options.server, None, None) if not options.patchset: options.patchset = obj.get_issue_properties(options.issue, False)['patchsets'][-1] logging.info('Using patchset %d' % options.patchset) # Download the patch. patchset = obj.get_patch(options.issue, options.patchset) for patch in patchset.patches: logging.info(patch) scm_type = scm.determine_scm(options.root_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(options.root_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckoutBase(options.root_dir, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(options.root_dir, None) else: parser.error('Couldn\'t determine the scm') # Apply the patch. scm_obj.apply_patch(patchset) return 0
def testWithRevPropsSupportNotCommitBot(self): # Add the hook that will commit in a way that removes the race condition. hook = os.path.join(self.FAKE_REPOS.svn_repo, 'hooks', 'pre-commit') shutil.copyfile(os.path.join(ROOT_DIR, 'sample_pre_commit_hook'), hook) os.chmod(hook, 0755) co = checkout.SvnCheckout(self.root_dir, self.name, self.FAKE_REPOS.USERS[1][0], self.FAKE_REPOS.USERS[1][1], self.svn_url) root = os.path.join(self.root_dir, self.name) expected = { 'author': self.FAKE_REPOS.USERS[1][0], } self._check_base(co, root, False, expected)
def _gen_nacl(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for Native Client.""" svn_creds = creds.Credentials(os.path.join(root_dir, '.svn_pwd')) offset = 'trunk/src/native_client' local_checkout = checkout.SvnCheckout( root_dir, 'nacl', user, svn_creds.get(user), 'svn://svn.chromium.org/native_client/' + offset) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush('https://nativeclient-status.appspot.com/cq', _chromium_status_pwd(root_dir))) host_aliases = SVN_HOST_ALIASES + [ 'http://src.chromium.org', 'https://src.chromium.org' ] svn_bases = [i + '/native_client' for i in host_aliases] project_bases = [ '^%s/%s(|/.*)$' % (re.escape(base), offset) for base in svn_bases ] git_url = ('http://git.chromium.org/native_client/src/native_client.git@') project_bases.append('^%s[a-z\\-_]+$' % re.escape(git_url)) verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier(_get_nacl_committers(), [re.escape(user)]), ] verifiers = [ # Disable Native Client presubmit check until it works in a stand-alone # checkout and doesn't have side-effects. #presubmit_check.PresubmitCheckVerifier(context_obj), ] if not no_try: # Grab the list of all the builders here. The commit queue needs to know # which builders were triggered. TODO: makes this more automatic. url = 'http://build.chromium.org/p/tryserver.nacl/json/builders' builders_and_tests = dict( (key, []) for key in json.load(urllib2.urlopen(url)) if (key.startswith('nacl-') and 'toolchain' not in key and 'valgrind' not in key and 'perf_panda' not in key and 'arm_hw' not in key and 'shared' not in key and 'coverage' not in key)) verifiers.append( try_server.TryRunner( context_obj, 'http://build.chromium.org/p/tryserver.nacl/', user, builders_and_tests, IGNORED_STEPS, ['--root', 'native_client'], _nacl_lkgr, 'native_client')) verifiers.append( tree_status.TreeStatusVerifier( 'http://nativeclient-status.appspot.com')) return pending_manager.PendingManager(context_obj, verifiers_no_patch, verifiers)
def _gen_gyp(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for GYP.""" naked_url = '://gyp.googlecode.com/svn/trunk' local_checkout = checkout.SvnCheckout( root_dir, 'gyp', user, None, 'https' + naked_url) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush( 'https://chromium-status.appspot.com/cq/receiver', _chromium_status_pwd(root_dir))) project_bases = [ '^%s(|/.*)$' % re.escape(base + naked_url) for base in ('http', 'https') ] verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier( _get_gyp_committers(), [re.escape(user)]), ] verifiers = [] if not no_try: # Grab the list of all the builders here. The commit queue needs to know # which builders were triggered. TODO: makes this more automatic. # GYP is using the Nacl try server. url = 'http://build.chromium.org/p/tryserver.nacl/json/builders' builders_and_tests = dict( (key, []) for key in json.load(urllib2.urlopen(url)) if key.startswith('gyp-') ) verifiers.append(try_server.TryRunnerSvn( context_obj, 'http://build.chromium.org/p/tryserver.nacl/', user, builders_and_tests, IGNORED_STEPS, 'gyp', ['--root', 'gyp'], lambda: None)) verifiers.append(tree_status.TreeStatusVerifier( 'https://gyp-status.appspot.com/status')) return pending_manager.PendingManager( context_obj, verifiers_no_patch, verifiers)
def _gen_blink(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for blink/trunk.""" local_checkout = checkout.SvnCheckout( root_dir, 'blink', user, None, 'svn://svn.chromium.org/blink/trunk', []) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush( 'https://chromium-status.appspot.com/cq', _chromium_status_pwd(root_dir))) project_bases = [ '^%s/trunk(|/.*)$' % re.escape(base) for base in BLINK_SVN_BASES] project_bases.append( r'^https?\:\/\/chromium.googlesource.com\/chromium\/blink(?:\.git)?%s$' % BRANCH_MATCH) verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier( _get_chromium_committers(), [re.escape(user)]), ] verifiers = [] prereq_builder = 'blink_presubmit' prereq_tests = ['presubmit'] step_verifiers = [ try_job_steps.TryJobSteps(builder_name=prereq_builder, steps=prereq_tests)] if not no_try: blink_tests = [ 'webkit_lint', 'webkit_python_tests', 'webkit_tests', 'webkit_unit_tests', 'weborigin_unittests', 'wtf_unittests', ] # A "compile-only" bot runs the webkit_lint tests (which are fast) # in order to pick up the default build targets. We don't use the # "compile" step because that will build all the chromium targets, not # just the blink-specific ones. compile_only = [ 'webkit_lint' ] builders_and_tests = { 'linux_layout': compile_only, 'mac_layout': compile_only, 'win_layout': compile_only, 'linux_blink_rel': blink_tests, 'mac_blink_rel': blink_tests, 'win_blink_rel': blink_tests, } step_verifiers += [ try_job_steps.TryJobSteps(builder_name=b, prereq_builder=prereq_builder, prereq_tests=prereq_tests, steps=s) for b, s in builders_and_tests.iteritems() ] verifiers.append(try_job_on_rietveld.TryRunnerRietveld( context_obj, 'http://build.chromium.org/p/tryserver.chromium/', user, step_verifiers, IGNORED_STEPS, 'src')) verifiers.append(tree_status.TreeStatusVerifier( 'https://blink-status.appspot.com')) return pending_manager.PendingManager( context_obj, verifiers_no_patch, verifiers)
def _get_co(self, post_processors): self.assertNotEqual(False, post_processors) return checkout.SvnCheckout(self.root_dir, self.name, self.usr, self.pwd, self.svn_url, post_processors)
'Try visiting %s/%d') % ( options.issue, options.patchset, options.server, options.issue) return 1 if options.whitelist: patchset.patches = [patch for patch in patchset.patches if patch.filename in options.whitelist] if options.blacklist: patchset.patches = [patch for patch in patchset.patches if patch.filename not in options.blacklist] for patch in patchset.patches: print(patch) full_dir = os.path.abspath(options.root_dir) scm_type = scm.determine_scm(full_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckout(full_dir, None, None, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(full_dir, None, None) else: parser.error('Couldn\'t determine the scm') # TODO(maruel): HACK, remove me. # When run a build slave, make sure buildbot knows that the checkout was # modified. if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot': # See sourcedirIsPatched() in: # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/ # chromium_commands.py?view=markup open('.buildbot-patched', 'w').close()
def testPrepare(self): co = checkout.SvnCheckout( self.root_dir, self.name, None, None, self.svn_url) self._test_prepare(co)
def _gen_nacl(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for Native Client.""" offset = 'trunk/src/native_client' local_checkout = checkout.SvnCheckout( root_dir, 'nacl', user, None, 'svn://svn.chromium.org/native_client/' + offset) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush( 'https://nativeclient-status.appspot.com/cq', _chromium_status_pwd(root_dir))) host_aliases = SVN_HOST_ALIASES + [ 'http://src.chromium.org', 'https://src.chromium.org'] svn_bases = [i + '/native_client' for i in host_aliases] project_bases = [ '^%s/%s(|/.*)$' % (re.escape(base), offset) for base in svn_bases ] aliases = ( 'git.chromium.org/native_client/src/native_client', 'chromium.googlesource.com/native_client/src/native_client', ) project_bases.extend( r'^https?\:\/\/%s(?:\.git)?%s$' % (re.escape(i), BRANCH_MATCH) for i in aliases) verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier( _get_nacl_committers(), [re.escape(user)]), ] verifiers = [ presubmit_check.PresubmitCheckVerifier(context_obj), ] if not no_try: # Grab the list of all the builders here. The commit queue needs to know # which builders were triggered. TODO: makes this more automatic. url = 'http://build.chromium.org/p/tryserver.nacl/json/builders' builders_and_tests = dict( (key, []) for key in json.load(urllib2.urlopen(url)) if (key.startswith('nacl-') and 'toolchain' not in key and 'valgrind' not in key and 'perf_panda' not in key and 'arm_hw' not in key and 'shared' not in key and 'coverage' not in key) ) verifiers.append(try_server.TryRunnerSvn( context_obj, 'http://build.chromium.org/p/tryserver.nacl/', user, builders_and_tests, IGNORED_STEPS, 'native_client', ['--root', 'native_client'], _nacl_lkgr)) verifiers.append(tree_status.TreeStatusVerifier( 'https://nativeclient-status.appspot.com')) return pending_manager.PendingManager( context_obj, verifiers_no_patch, verifiers)
def _gen_skia(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for Skia. Adds the following verifiers to the PendingManager: * ProjectBaseUrlVerifier * ReviewerLgtmVerifier * PresubmitCheckVerifier * TreeStatusVerifier * TryRunnerRietveld (runs compile trybots) """ naked_url = '://skia.googlecode.com/svn/trunk' local_checkout = checkout.SvnCheckout( root_dir, 'skia', user, None, 'https' + naked_url) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush( 'https://skia-tree-status.appspot.com/cq', _skia_status_pwd(root_dir)), server_hooks_missing=True) project_bases = [ '^%s(|/.*)$' % re.escape(base + naked_url) for base in ('http', 'https') ] verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier( _get_skia_committers(), [re.escape(user)]), ] verifiers = [ presubmit_check.PresubmitCheckVerifier(context_obj), tree_status.TreeStatusVerifier( 'https://skia-tree-status.appspot.com') ] if not no_try: # TODO(rmistry): This should instead be a URL that does not change. try_server_url = 'http://108.170.217.252:10117/' compile_required_build_steps = [ 'BuildBench', 'BuildGm', 'BuildMost', 'BuildSkiaLib', 'BuildTests', 'BuildTools', ] builder_names = list( json.load(urllib2.urlopen(try_server_url + 'json/cqtrybots'))) step_verifiers = [] for builder_name in builder_names: step_verifiers.append( try_job_steps.TryJobSteps( builder_name=builder_name, steps=compile_required_build_steps)) verifiers.append(try_job_on_rietveld.TryRunnerRietveld( context_obj=context_obj, try_server_url=try_server_url, commit_user=user, step_verifiers=step_verifiers, ignored_steps=[], solution='src')) return pending_manager.PendingManager( context_obj, verifiers_no_patch, verifiers)
def _gen_chromium(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for chrome/trunk/src.""" svn_creds = creds.Credentials(os.path.join(root_dir, '.svn_pwd')) local_checkout = checkout.SvnCheckout( root_dir, 'chromium', user, svn_creds.get(user), 'svn://svn.chromium.org/chrome/trunk/src', [chromium_copyright.process]) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush('https://chromium-status.appspot.com/cq', _chromium_status_pwd(root_dir))) project_bases = [ '^%s/trunk/src(|/.*)$' % re.escape(base) for base in CHROME_SVN_BASES ] aliases = ( # Old path. 'git.chromium.org/git/chromium.git', # New path. 'git.chromium.org/chromium/src.git', 'git.chromium.org/git/chromium/src', 'git.chromium.org/git/chromium/src.git', ) aliases_urls = sum((['https://' + i, 'http://' + i] for i in aliases), []) project_bases.extend(r'^%s\@[a-z\-_]+$' % re.escape(i) for i in aliases_urls) verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier(_get_chromium_committers(), [re.escape(user)]), ] verifiers = [ presubmit_check.PresubmitCheckVerifier(context_obj), ] if not no_try: # To add tests to this list, they MUST be in # /chrome/trunk/tools/build/masters/master.chromium/master_gatekeeper_cfg.py # or somehow close the tree whenever they break. standard_tests = ( 'base_unittests', 'browser_tests', 'check_deps', 'cacheinvalidation_unittests', 'chrome_frame_net_tests', 'chrome_frame_unittests', 'content_unittests', 'crypto_unittests', 'gfx_unittests', # Broken in release. #'googleurl_unittests', 'gpu_unittests', 'ipc_tests', 'interactive_ui_tests', 'installer_util_unittests', 'jingle_unittests', 'media_unittests', 'mini_installer_test', #'nacl_integration', 'net_unittests', 'printing_unittests', # Too flaky. #'pyauto_functional_tests', 'remoting_unittests', 'safe_browsing_tests', 'sql_unittests', 'sync_unit_tests', # Tends to be broken by webkit roll and not fixed fast enough. #'test_shell_tests', 'unit_tests', 'ui_tests', 'views_unittests', #'webkit_unit_tests', ) builders_and_tests = { # TODO(maruel): Figure out a way to run 'sizes' where people can # effectively update the perf expectation correctly. This requires a # clobber=True build running 'sizes'. 'sizes' is not accurate with # incremental build. Reference: # http://chromium.org/developers/tree-sheriffs/perf-sheriffs. # TODO(maruel): An option would be to run 'sizes' but not count a failure # of this step as a try job failure. 'linux_rel': standard_tests, 'mac_rel': standard_tests, 'win_rel': standard_tests, 'win': ['compile'], 'linux_clang': ['compile'], 'linux_chromeos': ['compile'], # tests are ignored for the annotated build of android trybots at this # time. 'android': [], } verifiers.append( try_server.TryRunner( context_obj, 'http://build.chromium.org/p/tryserver.chromium/', user, builders_and_tests, IGNORED_STEPS, ['--root', 'src'], _chromium_lkgr, 'src')) verifiers.append( tree_status.TreeStatusVerifier('http://chromium-status.appspot.com')) return pending_manager.PendingManager(context_obj, verifiers_no_patch, verifiers)
def setUp(self): super(RawCheckout, self).setUp() # Use a svn checkout as the base. self.base_co = checkout.SvnCheckout(self.root_dir, self.name, None, None, self.svn_url) self.base_co.prepare(None)
def main(): # TODO(pgervais): This function is way too long. Split. sys.stdout = Unbuffered(sys.stdout) parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option('-v', '--verbose', action='count', default=0, help='Prints debugging infos') parser.add_option( '-e', '--email', help='Email address to access rietveld. If not specified, anonymous ' 'access will be used.') parser.add_option( '-E', '--email-file', help='File containing the email address to access rietveld. ' 'If not specified, anonymous access will be used.') parser.add_option( '-k', '--private-key-file', help='Path to file containing a private key in p12 format for OAuth2 ' 'authentication with "notasecret" password (as generated by Google ' 'Cloud Console).') parser.add_option('-i', '--issue', type='int', help='Rietveld issue number') parser.add_option('-p', '--patchset', type='int', help='Rietveld issue\'s patchset number') parser.add_option('-r', '--root_dir', default=os.getcwd(), help='Root directory to apply the patch') parser.add_option('-s', '--server', default='http://codereview.chromium.org', help='Rietveld server') parser.add_option('--no-auth', action='store_true', help='Do not attempt authenticated requests.') parser.add_option('--revision-mapping', default='{}', help='When running gclient, annotate the got_revisions ' 'using the revision-mapping.') parser.add_option('-f', '--force', action='store_true', help='Really run apply_issue, even if .update.flag ' 'is detected.') parser.add_option('-b', '--base_ref', help='DEPRECATED do not use.') parser.add_option('--whitelist', action='append', default=[], help='Patch only specified file(s).') parser.add_option('--blacklist', action='append', default=[], help='Don\'t patch specified file(s).') parser.add_option('-d', '--ignore_deps', action='store_true', help='Don\'t run gclient sync on DEPS changes.') auth.add_auth_options(parser) options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if options.whitelist and options.blacklist: parser.error('Cannot specify both --whitelist and --blacklist') if options.email and options.email_file: parser.error('-e and -E options are incompatible') if (os.path.isfile(os.path.join(os.getcwd(), 'update.flag')) and not options.force): print 'update.flag file found: bot_update has run and checkout is already ' print 'in a consistent state. No actions will be performed in this step.' return 0 logging.basicConfig( format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s', level=[logging.WARNING, logging.INFO, logging.DEBUG][min(2, options.verbose)]) if args: parser.error('Extra argument(s) "%s" not understood' % ' '.join(args)) if not options.issue: parser.error('Require --issue') options.server = options.server.rstrip('/') if not options.server: parser.error('Require a valid server') options.revision_mapping = json.loads(options.revision_mapping) # read email if needed if options.email_file: if not os.path.exists(options.email_file): parser.error('file does not exist: %s' % options.email_file) with open(options.email_file, 'rb') as f: options.email = f.read().strip() print('Connecting to %s' % options.server) # Always try un-authenticated first, except for OAuth2 if options.private_key_file: # OAuth2 authentication obj = rietveld.JwtOAuth2Rietveld(options.server, options.email, options.private_key_file) properties = obj.get_issue_properties(options.issue, False) else: # Passing None as auth_config disables authentication. obj = rietveld.Rietveld(options.server, None) properties = None # Bad except clauses order (HTTPError is an ancestor class of # ClientLoginError) # pylint: disable=E0701 try: properties = obj.get_issue_properties(options.issue, False) except urllib2.HTTPError as e: if e.getcode() != 302: raise if options.no_auth: exit('FAIL: Login detected -- is issue private?') # TODO(maruel): A few 'Invalid username or password.' are printed first, # we should get rid of those. except rietveld.upload.ClientLoginError as e: # Fine, we'll do proper authentication. pass if properties is None: obj = rietveld.Rietveld(options.server, auth_config, options.email) try: properties = obj.get_issue_properties(options.issue, False) except rietveld.upload.ClientLoginError as e: print('Accessing the issue requires proper credentials.') return 1 if not options.patchset: options.patchset = properties['patchsets'][-1] print('No patchset specified. Using patchset %d' % options.patchset) issues_patchsets_to_apply = [(options.issue, options.patchset)] depends_on_info = obj.get_depends_on_patchset(options.issue, options.patchset) while depends_on_info: depends_on_issue = int(depends_on_info['issue']) depends_on_patchset = int(depends_on_info['patchset']) try: depends_on_info = obj.get_depends_on_patchset( depends_on_issue, depends_on_patchset) issues_patchsets_to_apply.insert( 0, (depends_on_issue, depends_on_patchset)) except urllib2.HTTPError: print( 'The patchset that was marked as a dependency no longer ' 'exists: %s/%d/#ps%d' % (options.server, depends_on_issue, depends_on_patchset)) print 'Therefore it is likely that this patch will not apply cleanly.' print depends_on_info = None num_issues_patchsets_to_apply = len(issues_patchsets_to_apply) if num_issues_patchsets_to_apply > 1: print print 'apply_issue.py found %d dependent CLs.' % ( num_issues_patchsets_to_apply - 1) print 'They will be applied in the following order:' num = 1 for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: print ' #%d %s/%d/#ps%d' % (num, options.server, issue_to_apply, patchset_to_apply) num += 1 print for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply, patchset_to_apply) print('Downloading patch from %s' % issue_url) try: patchset = obj.get_patch(issue_to_apply, patchset_to_apply) except urllib2.HTTPError as e: print( 'Failed to fetch the patch for issue %d, patchset %d.\n' 'Try visiting %s/%d') % (issue_to_apply, patchset_to_apply, options.server, issue_to_apply) return 1 if options.whitelist: patchset.patches = [ patch for patch in patchset.patches if patch.filename in options.whitelist ] if options.blacklist: patchset.patches = [ patch for patch in patchset.patches if patch.filename not in options.blacklist ] for patch in patchset.patches: print(patch) full_dir = os.path.abspath(options.root_dir) scm_type = scm.determine_scm(full_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckout(full_dir, None, None, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(full_dir, None, None) else: parser.error('Couldn\'t determine the scm') # TODO(maruel): HACK, remove me. # When run a build slave, make sure buildbot knows that the checkout was # modified. if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot': # See sourcedirIsPatched() in: # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/ # chromium_commands.py?view=markup open('.buildbot-patched', 'w').close() print('\nApplying the patch from %s' % issue_url) try: scm_obj.apply_patch(patchset, verbose=True) except checkout.PatchApplicationFailed as e: print(str(e)) print('CWD=%s' % os.getcwd()) print('Checkout path=%s' % scm_obj.project_path) return 1 if ('DEPS' in map(os.path.basename, patchset.filenames) and not options.ignore_deps): gclient_root = gclient_utils.FindGclientRoot(full_dir) if gclient_root and scm_type: print( 'A DEPS file was updated inside a gclient checkout, running gclient ' 'sync.') gclient_path = os.path.join(BASE_DIR, 'gclient') if sys.platform == 'win32': gclient_path += '.bat' with annotated_gclient.temp_filename(suffix='gclient') as f: cmd = [ gclient_path, 'sync', '--nohooks', '--delete_unversioned_trees', ] if scm_type == 'svn': cmd.extend(['--revision', 'BASE']) if options.revision_mapping: cmd.extend(['--output-json', f]) retcode = subprocess.call(cmd, cwd=gclient_root) if retcode == 0 and options.revision_mapping: revisions = annotated_gclient.parse_got_revision( f, options.revision_mapping) annotated_gclient.emit_buildprops(revisions) return retcode return 0
def main(): # TODO(pgervais,tandrii): split this func, it's still too long. sys.stdout = Unbuffered(sys.stdout) parser = _get_arg_parser() options, args = parser.parse_args() auth_config = auth.extract_auth_config_from_options(options) if options.whitelist and options.blacklist: parser.error('Cannot specify both --whitelist and --blacklist') if options.email and options.email_file: parser.error('-e and -E options are incompatible') if (os.path.isfile(os.path.join(os.getcwd(), 'update.flag')) and not options.force): print 'update.flag file found: bot_update has run and checkout is already ' print 'in a consistent state. No actions will be performed in this step.' return 0 logging.basicConfig( format='%(levelname)5s %(module)11s(%(lineno)4d): %(message)s', level=[logging.WARNING, logging.INFO, logging.DEBUG][min(2, options.verbose)]) if args: parser.error('Extra argument(s) "%s" not understood' % ' '.join(args)) if not options.issue: parser.error('Require --issue') options.server = options.server.rstrip('/') if not options.server: parser.error('Require a valid server') options.revision_mapping = json.loads(options.revision_mapping) # read email if needed if options.email_file: if not os.path.exists(options.email_file): parser.error('file does not exist: %s' % options.email_file) with open(options.email_file, 'rb') as f: options.email = f.read().strip() print('Connecting to %s' % options.server) # Always try un-authenticated first, except for OAuth2 if options.private_key_file: # OAuth2 authentication rietveld_obj = rietveld.JwtOAuth2Rietveld(options.server, options.email, options.private_key_file) try: properties = rietveld_obj.get_issue_properties( options.issue, False) except urllib2.URLError: logging.exception('failed to fetch issue properties') sys.exit(RETURN_CODE_INFRA_FAILURE) else: # Passing None as auth_config disables authentication. rietveld_obj = rietveld.Rietveld(options.server, None) properties = None # Bad except clauses order (HTTPError is an ancestor class of # ClientLoginError) # pylint: disable=E0701 try: properties = rietveld_obj.get_issue_properties( options.issue, False) except urllib2.HTTPError as e: if e.getcode() != 302: raise if options.no_auth: exit('FAIL: Login detected -- is issue private?') # TODO(maruel): A few 'Invalid username or password.' are printed first, # we should get rid of those. except urllib2.URLError: logging.exception('failed to fetch issue properties') return RETURN_CODE_INFRA_FAILURE except rietveld.upload.ClientLoginError as e: # Fine, we'll do proper authentication. pass if properties is None: rietveld_obj = rietveld.Rietveld(options.server, auth_config, options.email) try: properties = rietveld_obj.get_issue_properties( options.issue, False) except rietveld.upload.ClientLoginError as e: print('Accessing the issue requires proper credentials.') return RETURN_CODE_OTHER_FAILURE except urllib2.URLError: logging.exception('failed to fetch issue properties') return RETURN_CODE_INFRA_FAILURE if not options.patchset: options.patchset = properties['patchsets'][-1] print('No patchset specified. Using patchset %d' % options.patchset) issues_patchsets_to_apply = [(options.issue, options.patchset)] try: depends_on_info = rietveld_obj.get_depends_on_patchset( options.issue, options.patchset) except urllib2.URLError: logging.exception('failed to fetch depends_on_patchset') return RETURN_CODE_INFRA_FAILURE while depends_on_info: depends_on_issue = int(depends_on_info['issue']) depends_on_patchset = int(depends_on_info['patchset']) try: depends_on_info = rietveld_obj.get_depends_on_patchset( depends_on_issue, depends_on_patchset) issues_patchsets_to_apply.insert( 0, (depends_on_issue, depends_on_patchset)) except urllib2.HTTPError: print( 'The patchset that was marked as a dependency no longer ' 'exists: %s/%d/#ps%d' % (options.server, depends_on_issue, depends_on_patchset)) print 'Therefore it is likely that this patch will not apply cleanly.' print depends_on_info = None except urllib2.URLError: logging.exception('failed to fetch dependency issue') return RETURN_CODE_INFRA_FAILURE num_issues_patchsets_to_apply = len(issues_patchsets_to_apply) if num_issues_patchsets_to_apply > 1: print print 'apply_issue.py found %d dependent CLs.' % ( num_issues_patchsets_to_apply - 1) print 'They will be applied in the following order:' num = 1 for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: print ' #%d %s/%d/#ps%d' % (num, options.server, issue_to_apply, patchset_to_apply) num += 1 print for issue_to_apply, patchset_to_apply in issues_patchsets_to_apply: issue_url = '%s/%d/#ps%d' % (options.server, issue_to_apply, patchset_to_apply) print('Downloading patch from %s' % issue_url) try: patchset = rietveld_obj.get_patch(issue_to_apply, patchset_to_apply) except urllib2.HTTPError: print( 'Failed to fetch the patch for issue %d, patchset %d.\n' 'Try visiting %s/%d') % (issue_to_apply, patchset_to_apply, options.server, issue_to_apply) # If we got this far, then this is likely missing patchset. # Thus, it's not infra failure. return RETURN_CODE_OTHER_FAILURE except urllib2.URLError: logging.exception( 'Failed to fetch the patch for issue %d, patchset %d', issue_to_apply, patchset_to_apply) return RETURN_CODE_INFRA_FAILURE if options.whitelist: patchset.patches = [ patch for patch in patchset.patches if patch.filename in options.whitelist ] if options.blacklist: patchset.patches = [ patch for patch in patchset.patches if patch.filename not in options.blacklist ] for patch in patchset.patches: print(patch) full_dir = os.path.abspath(options.root_dir) scm_type = scm.determine_scm(full_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(full_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckout(full_dir, None, None, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(full_dir, None, None) else: parser.error('Couldn\'t determine the scm') # TODO(maruel): HACK, remove me. # When run a build slave, make sure buildbot knows that the checkout was # modified. if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot': # See sourcedirIsPatched() in: # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/ # chromium_commands.py?view=markup open('.buildbot-patched', 'w').close() print('\nApplying the patch from %s' % issue_url) try: scm_obj.apply_patch(patchset, verbose=True) except checkout.PatchApplicationFailed as e: print(str(e)) print('CWD=%s' % os.getcwd()) print('Checkout path=%s' % scm_obj.project_path) return RETURN_CODE_OTHER_FAILURE if ('DEPS' in map(os.path.basename, patchset.filenames) and not options.ignore_deps): gclient_root = gclient_utils.FindGclientRoot(full_dir) if gclient_root and scm_type: print( 'A DEPS file was updated inside a gclient checkout, running gclient ' 'sync.') gclient_path = os.path.join(BASE_DIR, 'gclient') if sys.platform == 'win32': gclient_path += '.bat' with annotated_gclient.temp_filename(suffix='gclient') as f: cmd = [ gclient_path, 'sync', '--nohooks', '--delete_unversioned_trees', ] if scm_type == 'svn': cmd.extend(['--revision', 'BASE']) if options.revision_mapping: cmd.extend(['--output-json', f]) retcode = subprocess.call(cmd, cwd=gclient_root) if retcode == 0 and options.revision_mapping: revisions = annotated_gclient.parse_got_revision( f, options.revision_mapping) annotated_gclient.emit_buildprops(revisions) return retcode return RETURN_CODE_OK
print('No patchset specified. Using patchset %d' % options.patchset) print('Downloading the patch.') try: patchset = obj.get_patch(options.issue, options.patchset) except urllib2.HTTPError, e: print >> sys.stderr, ( 'Failed to fetch the patch for issue %d, patchset %d.\n' 'Try visiting %s/%d') % (options.issue, options.patchset, options.server, options.issue) return 1 for patch in patchset.patches: print(patch) scm_type = scm.determine_scm(options.root_dir) if scm_type == 'svn': scm_obj = checkout.SvnCheckout(options.root_dir, None, None, None, None) elif scm_type == 'git': scm_obj = checkout.GitCheckoutBase(options.root_dir, None, None) elif scm_type == None: scm_obj = checkout.RawCheckout(options.root_dir, None, None) else: parser.error('Couldn\'t determine the scm') # TODO(maruel): HACK, remove me. # When run a build slave, make sure buildbot knows that the checkout was # modified. if options.root_dir == 'src' and getpass.getuser() == 'chrome-bot': # See sourcedirIsPatched() in: # http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/ # chromium_commands.py?view=markup open('.buildbot-patched', 'w').close()
def _get_co(self, post_processors): self.assertNotEqual(False, post_processors) return checkout.ReadOnlyCheckout( checkout.SvnCheckout(self.root_dir, self.name, None, None, self.svn_url, None), post_processors)
def _gen_chromium(user, root_dir, rietveld_obj, no_try): """Generates a PendingManager commit queue for chrome/trunk/src.""" local_checkout = checkout.SvnCheckout( root_dir, 'chromium', user, None, 'svn://svn.chromium.org/chrome/trunk/src', [chromium_copyright.process]) context_obj = context.Context( rietveld_obj, local_checkout, async_push.AsyncPush( 'https://chromium-status.appspot.com/cq', _chromium_status_pwd(root_dir))) project_bases = [ '^%s/trunk/src(|/.*)$' % re.escape(base) for base in CHROME_SVN_BASES] aliases = ( # Old path. 'git.chromium.org/git/chromium', # New path. 'git.chromium.org/chromium/src', 'chromium.googlesource.com/chromium/src', ) project_bases.extend( r'^https?\:\/\/%s(?:\.git)?%s$' % (re.escape(i), BRANCH_MATCH) for i in aliases) verifiers_no_patch = [ project_base.ProjectBaseUrlVerifier(project_bases), reviewer_lgtm.ReviewerLgtmVerifier( _get_chromium_committers(), [re.escape(user)]), ] verifiers = [] prereq_builder = 'chromium_presubmit' prereq_tests = ['presubmit'] step_verifiers = [ try_job_steps.TryJobSteps(builder_name=prereq_builder, steps=prereq_tests)] if not no_try: # To add tests to this list, they MUST be in # /chrome/trunk/tools/build/masters/master.chromium/master_gatekeeper_cfg.py # or somehow close the tree whenever they break. standard_tests = [ 'base_unittests', 'browser_tests', 'cacheinvalidation_unittests', 'check_deps', 'content_browsertests', 'content_unittests', 'crypto_unittests', #'gfx_unittests', # Broken in release. #'url_unittests', 'gpu_unittests', 'ipc_tests', 'interactive_ui_tests', 'jingle_unittests', 'media_unittests', 'net_unittests', 'ppapi_unittests', 'printing_unittests', 'sql_unittests', 'sync_unit_tests', 'unit_tests', #'webkit_unit_tests', ] # Use a smaller set of tests for *_aura, since there's a lot of overlap with # the corresponding *_rel builders. # Note: *_aura are Release builders even if their names convey otherwise. aura_tests = [ 'aura_unittests', 'browser_tests', 'compositor_unittests', 'content_browsertests', 'content_unittests', 'interactive_ui_tests', 'unit_tests', 'views_unittests', ] linux_aura_tests = aura_tests[:] linux_aura_tests.remove('views_unittests') builders_and_tests = { # TODO(maruel): Figure out a way to run 'sizes' where people can # effectively update the perf expectation correctly. This requires a # clobber=True build running 'sizes'. 'sizes' is not accurate with # incremental build. Reference: # http://chromium.org/developers/tree-sheriffs/perf-sheriffs. # TODO(maruel): An option would be to run 'sizes' but not count a failure # of this step as a try job failure. 'android_dbg': ['slave_steps'], 'android_clang_dbg': ['slave_steps'], 'android_aosp': ['compile'], 'ios_dbg_simulator': [ 'compile', 'base_unittests', 'content_unittests', 'crypto_unittests', 'url_unittests', 'media_unittests', 'net_unittests', 'sql_unittests', 'ui_unittests', ], 'ios_rel_device': ['compile'], 'linux_aura': linux_aura_tests, 'linux_clang': ['compile'], 'linux_chromeos_clang': ['compile'], # Note: It is a Release builder even if its name convey otherwise. 'linux_chromeos': standard_tests + [ 'app_list_unittests', 'aura_unittests', 'ash_unittests', 'chromeos_unittests', 'components_unittests', 'dbus_unittests', 'device_unittests', 'sandbox_linux_unittests', ], 'linux_rel': standard_tests + [ 'cc_unittests', 'chromedriver2_unittests', 'components_unittests', 'nacl_integration', 'remoting_unittests', 'sandbox_linux_unittests', 'sync_integration_tests', ], 'mac': ['compile'], 'mac_rel': standard_tests + [ 'cc_unittests', 'chromedriver2_unittests', 'components_unittests', 'nacl_integration', 'remoting_unittests', 'sync_integration_tests', 'telemetry_unittests', ], 'win': ['compile'], 'win7_aura': aura_tests + [ 'ash_unittests', ], 'win_rel': standard_tests + [ 'cc_unittests', 'chrome_frame_net_tests', 'chrome_frame_tests', 'chrome_frame_unittests', 'chromedriver2_unittests', 'components_unittests', 'installer_util_unittests', 'mini_installer_test', 'nacl_integration', 'remoting_unittests', 'sync_integration_tests', 'telemetry_unittests', ], 'win_x64_rel': [ 'compile', ], } swarm_enabled_tests = ( 'base_unittests', 'browser_tests', 'interactive_ui_tests', 'net_unittests', 'unit_tests', ) swarm_test_map = dict( (test, test + '_swarm') for test in swarm_enabled_tests) swarm_enabled_builders_and_tests = { 'linux_rel': swarm_test_map, # TODO(csharp): Enable once isoalte works on Mac again # 'mac_rel': swarm_test_map, 'win_rel': swarm_test_map, } step_verifiers += [ try_job_steps.TryJobSteps( builder_name=b, prereq_builder=prereq_builder, prereq_tests=prereq_tests, steps=s) for b, s in builders_and_tests.iteritems() if b not in swarm_enabled_builders_and_tests ] + [ try_job_steps.TryJobTriggeredSteps( builder_name='android_dbg_triggered_tests', trigger_name='android_dbg', prereq_builder=prereq_builder, prereq_tests=prereq_tests, steps={'slave_steps': 'slave_steps'}), ] # Add the swarm enabled builders with swarm accepted tests. for builder, swarm_enabled_tests in ( swarm_enabled_builders_and_tests.iteritems()): regular_tests = list(set(builders_and_tests[builder]) - set(swarm_enabled_tests)) step_verifiers.append( try_job_steps.TryJobTriggeredOrNormalSteps( builder_name='swarm_triggered', trigger_name=builder, prereq_builder=prereq_builder, prereq_tests=prereq_tests, steps=swarm_enabled_tests, trigger_bot_steps=regular_tests, use_triggered_bot=False)) verifiers.append(try_job_on_rietveld.TryRunnerRietveld( context_obj, 'http://build.chromium.org/p/tryserver.chromium/', user, step_verifiers, IGNORED_STEPS, 'src')) verifiers.append(tree_status.TreeStatusVerifier( 'https://chromium-status.appspot.com')) return pending_manager.PendingManager( context_obj, verifiers_no_patch, verifiers)