def test_will_not_bootstrap_if_disabled(self, get_params):
    get_params.side_effect = Exception('Tried to bootstrap')
    opts = self.opts._replace(logdog_disable=True)

    with self.assertRaises(ldbs.NotBootstrapped):
      ldbs.bootstrap(self.rt, opts, self.basedir, self.tdir,
                     self.properties, [])
  def test_will_not_bootstrap_if_recursive(self, get_params):
    get_params.side_effect = Exception('Tried to bootstrap')
    os.environ['LOGDOG_STREAM_PREFIX'] = 'foo'

    with self.assertRaises(ldbs.NotBootstrapped):
      ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                    self.properties, [])
  def test_cipd_install_failure_raises_bootstrap_error(self, get_config):
    cipd_bootstrap_v2.install_cipd_packages.side_effect = (
        subprocess.CalledProcessError(0, [], 'PROCESS ERROR'))

    with self.assertRaises(ldbs.BootstrapError) as e:
      ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                    self.properties, [])

    self.assertEqual(e.exception.message, 'Failed to install CIPD packages.')
    cipd_bootstrap_v2.install_cipd_packages.assert_called_once()
  def test_registered_apis_work(self, tempdir, get_params, service_account,
                                isfile):
    tempdir.return_value = 'foo'
    isfile.return_value = True
    service_account.return_value = 'creds.json'

    for api in sorted(ldbs._CIPD_TAG_API_MAP.values()):
      get_params.return_value = self.base._replace(api=api)
      ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                     self.properties, [])
  def test_bootstrap_command_linux_stable(self, tempdir, get_params, isfile):
    gce.Authenticator.is_gce.return_value = True
    recipe_cmd = ['run_recipe.py', 'recipe_params...']

    tempdir.return_value = 'foo'
    get_params.return_value = ldbs.Params(
        project='myproject', cipd_tag='stable', api=self.stable_api,
        mastername='mastername', buildername='buildername', buildnumber=1337,
        logdog_only=False, generation=None)
    isfile.return_value = True

    streamserver_uri = 'unix:%s' % (os.path.join('foo', 'butler.sock'),)

    bs = ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                        self.properties, recipe_cmd)

    # Check CIPD installation.
    cipd_dir = os.path.join(self.basedir, '.recipe_cipd')
    cipd_bootstrap_v2.install_cipd_packages.assert_called_once_with(
        cipd_dir,
        cipd.CipdPackage(
            name='infra/tools/luci/logdog/butler/${platform}',
            version='stable'),
        cipd.CipdPackage(
            name='infra/tools/luci/logdog/annotee/${platform}',
            version='stable'),
    )

    # Check bootstrap command.
    self.assertEqual(
        bs.cmd,
        [os.path.join(cipd_dir, 'logdog_butler'),
            '-log-level', 'warning',
            '-project', 'myproject',
            '-prefix', 'bb/mastername/buildername/1337',
            '-coordinator-host', 'luci-logdog.appspot.com',
            '-output', 'logdog',
            '-tag', 'buildbot.master=mastername',
            '-tag', 'buildbot.builder=buildername',
            '-tag', 'buildbot.buildnumber=1337',
            '-tag', 'logdog.viewer_url=https://luci-milo.appspot.com/buildbot/'
                    'mastername/buildername/1337',
            '-service-account-json', ':gce',
            '-output-max-buffer-age', '30s',
            'run',
            '-stdout', 'tee=stdout',
            '-stderr', 'tee=stderr',
            '-streamserver-uri', streamserver_uri,
            '--',
            os.path.join(cipd_dir, 'logdog_annotee'),
                '-log-level', 'warning',
                '-name-base', 'recipes',
                '-print-summary',
                '-tee', 'annotations,text',
                '-json-args-path', self._tp('logdog_annotee_cmd.json'),
                '-result-path', self._tp('bootstrap_result.json'),
        ])

    self._assertAnnoteeCommand(recipe_cmd)
Пример #6
0
def _exec_recipe(rt, opts, basedir, tdir, properties):
    # Find out if the recipe we intend to run is in build_internal's recipes. If
    # so, use recipes.py from there, otherwise use the one from build.
    recipe_file = properties['recipe'].replace('/', os.path.sep) + '.py'

    # Use the standard recipe runner unless the recipes are explicitly in the
    # "build_limited" repository.
    recipe_runner = os.path.join(env.Build, 'scripts', 'slave', 'recipes.py')
    if env.BuildInternal:
        build_limited = os.path.join(env.BuildInternal, 'scripts', 'slave')
        if os.path.exists(os.path.join(build_limited, 'recipes', recipe_file)):
            recipe_runner = os.path.join(build_limited, 'recipes.py')

    # Dump properties to JSON and build recipe command.
    props_file = os.path.join(tdir, 'recipe_properties.json')
    with open(props_file, 'w') as fh:
        json.dump(properties, fh)

    recipe_cmd = [
        sys.executable,
        '-u',
        recipe_runner,
        '--verbose',
        'run',
        '--workdir=%s' % _build_dir(),
        '--properties-file=%s' % props_file,
        properties['recipe'],
    ]

    recipe_return_code = None
    try:
        bs = logdog_bootstrap.bootstrap(rt, opts, basedir, tdir, properties,
                                        recipe_cmd)

        LOGGER.info('Bootstrapping through LogDog: %s', bs.cmd)
        _, _ = _run_command(bs.cmd, dry_run=opts.dry_run)
        recipe_return_code = bs.get_result()
    except logdog_bootstrap.NotBootstrapped as e:
        LOGGER.info('Not bootstrapped: %s', e.message)
    except logdog_bootstrap.BootstrapError as e:
        LOGGER.warning('Could not bootstrap LogDog: %s', e.message)
    except Exception as e:
        LOGGER.exception('Exception while bootstrapping LogDog.')
    finally:
        if recipe_return_code is None:
            LOGGER.info('Not using LogDog. Invoking `recipes.py` directly.')
            recipe_return_code, _ = _run_command(recipe_cmd,
                                                 dry_run=opts.dry_run)
    return recipe_return_code
  def test_bootstrap_command_win_canary(self, tempdir, get_params,
                                        service_account, isfile):
    infra_platform.get.return_value = ('win', 'x86_64', 64)

    recipe_cmd = ['run_recipe.py', 'recipe_params...']

    tempdir.return_value = 'foo'
    get_params.return_value = ldbs.Params(
        project='myproject', cipd_tag='canary',
        api=self.latest_api, mastername='mastername',
        buildername='buildername', buildnumber=1337, logdog_only=True,
        generation=None)
    service_account.return_value = 'creds.json'
    isfile.return_value = True

    bs = ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                        self.properties, recipe_cmd)

    # Check CIPD installation.
    cipd_dir = os.path.join(self.basedir, '.recipe_cipd')
    cipd_bootstrap_v2.install_cipd_packages.assert_called_once_with(
        cipd_dir,
        cipd.CipdPackage(
            name='infra/tools/luci/logdog/butler/${platform}',
            version='canary'),
        cipd.CipdPackage(
            name='infra/tools/luci/logdog/annotee/${platform}',
            version='canary'),
    )

    # Check bootstrap command.
    self.assertEqual(
        bs.cmd,
        [os.path.join(cipd_dir, 'logdog_butler.exe'),
            '-log-level', 'warning',
            '-project', 'myproject',
            '-prefix', 'bb/mastername/buildername/1337',
            '-coordinator-host', 'luci-logdog.appspot.com',
            '-output', 'logdog',
            '-tag', 'buildbot.master=mastername',
            '-tag', 'buildbot.builder=buildername',
            '-tag', 'buildbot.buildnumber=1337',
            '-tag', 'logdog.viewer_url=https://luci-milo.appspot.com/buildbot/'
                    'mastername/buildername/1337',
            '-service-account-json', 'creds.json',
            '-output-max-buffer-age', '30s',
            '-io-keepalive-stderr', '5m',
            'run',
            '-stdout', 'tee=stdout',
            '-stderr', 'tee=stderr',
            '-streamserver-uri', 'net.pipe:LUCILogDogButler',
            '--',
            os.path.join(cipd_dir, 'logdog_annotee.exe'),
                '-log-level', 'warning',
                '-name-base', 'recipes',
                '-print-summary',
                '-tee', 'annotations',
                '-json-args-path', self._tp('logdog_annotee_cmd.json'),
                '-result-path', self._tp('bootstrap_result.json'),
        ])

    service_account.assert_called_once_with(
        self.opts, ldbs._PLATFORM_CONFIG[('win',)]['credential_path'])
    self._assertAnnoteeCommand(recipe_cmd)
Пример #8
0
def _exec_recipe(args, rt, stream, basedir, buildbot_build_dir):
    tempdir = rt.tempdir(basedir)
    LOGGER.info('Using temporary directory: [%s].', tempdir)

    build_data_dir = rt.tempdir(basedir)
    LOGGER.info('Using build data directory: [%s].', build_data_dir)

    # Construct our properties.
    properties = copy.copy(args.factory_properties)
    properties.update(args.build_properties)

    # Determine our pins.
    mastername = properties.get('mastername')
    buildername = properties.get('buildername')

    # Determine if this build is an opt-in build.
    is_opt_in = get_is_opt_in(properties)

    # Determine our CIPD pins.
    #
    # If a property includes "remote_run_canary", we will explicitly use canary
    # pins. This can be done by manually submitting a build to the waterfall.
    is_canary = (_get_is_canary(mastername) or is_opt_in
                 or 'remote_run_canary' in properties or args.canary)
    pins = _STABLE_CIPD_PINS if not is_canary else _CANARY_CIPD_PINS

    # Determine if we're running Kitchen.
    #
    # If a property includes "remote_run_kitchen", we will explicitly use canary
    # pins. This can be done by manually submitting a build to the waterfall.
    is_kitchen = (_get_is_kitchen(mastername, buildername) or is_opt_in
                  or 'remote_run_kitchen' in properties)

    # Allow command-line "--kitchen" to override.
    if args.kitchen:
        pins = pins._replace(kitchen=args.kitchen)
        is_kitchen = True

    # Augment our input properties...
    properties['build_data_dir'] = build_data_dir
    properties['builder_id'] = 'master.%s:%s' % (mastername, buildername)

    if not is_kitchen:
        # path_config property defines what paths a build uses for checkout, git
        # cache, goma cache, etc.
        #
        # TODO(dnj or phajdan): Rename "kitchen" path config to "remote_run_legacy".
        # "kitchen" was never correct, and incorrectly implies that Kitchen is
        # somehow involved int his path config.
        properties['path_config'] = 'kitchen'
        properties['bot_id'] = properties['slavename']
    else:
        # If we're using Kitchen, our "path_config" must be empty or "kitchen".
        path_config = properties.pop('path_config', None)
        if path_config and path_config != 'kitchen':
            raise ValueError(
                "Users of 'remote_run.py' MUST specify either 'kitchen' "
                "or no 'path_config', not [%s]." % (path_config, ))

    LOGGER.info('Using properties: %r', properties)

    monitoring_utils.write_build_monitoring_event(build_data_dir, properties)

    # Ensure that the CIPD client is installed and available on PATH.
    from slave import cipd_bootstrap_v2
    cipd_bootstrap_v2.high_level_ensure_cipd_client(basedir, mastername)

    # "/b/c" as a cache directory.
    cache_dir = os.path.join(BUILDBOT_ROOT, 'c')

    # Cleanup data from old builds.
    _cleanup_old_layouts(is_kitchen, properties, buildbot_build_dir, cache_dir)

    # (Canary) Use Kitchen if configured.
    # TODO(dnj): Make this the only path once we move to Kitchen.
    if is_kitchen:
        return _remote_run_with_kitchen(args, stream, is_canary, pins.kitchen,
                                        properties, tempdir, basedir,
                                        cache_dir)

    ##
    # Classic Remote Run
    #
    # TODO(dnj): Delete this in favor of Kitchen.
    ##

    properties_file = os.path.join(tempdir, 'remote_run_properties.json')
    with open(properties_file, 'w') as f:
        json.dump(properties, f)

    cipd_path = os.path.join(basedir, '.remote_run_cipd')

    cipd_bootstrap_v2.install_cipd_packages(
        cipd_path, cipd.CipdPackage(_RECIPES_PY_CIPD_PACKAGE, pins.recipes))

    engine_flags = {
        'use_result_proto': True,
    }

    engine_args = []
    if engine_flags:
        engine_flags_path = os.path.join(tempdir, 'engine_flags.json')
        with open(engine_flags_path, 'w') as f:
            json.dump({'engine_flags': engine_flags}, f)

        engine_args = ['--operational-args-path', engine_flags_path]

    recipe_result_path = os.path.join(tempdir, 'recipe_result.json')
    recipe_cmd = [
        sys.executable,
        os.path.join(cipd_path, 'recipes.py'),
    ] + engine_args + [
        '--verbose',
        'remote',
        '--repository',
        args.repository,
        '--workdir',
        os.path.join(tempdir, 'rw'),
    ]
    if args.revision:
        recipe_cmd.extend(['--revision', args.revision])
    if args.use_gitiles:
        recipe_cmd.append('--use-gitiles')
    recipe_cmd.extend([
        '--',
    ] + (engine_args) + [
        '--verbose',
        'run',
        '--properties-file',
        properties_file,
        '--workdir',
        os.path.join(tempdir, 'w'),
        '--output-result-json',
        recipe_result_path,
        properties.get('recipe') or args.recipe,
    ])
    # If we bootstrap through logdog, the recipe command line gets written
    # to a temporary file and does not appear in the log.
    LOGGER.info('Recipe command line: %r', recipe_cmd)

    # Default to return code != 0 is for the benefit of buildbot, which uses
    # return code to decide if a step failed or not.
    recipe_return_code = 1
    try:
        raise logdog_bootstrap.NotBootstrapped()
        bs = logdog_bootstrap.bootstrap(rt, args, basedir, tempdir, properties,
                                        recipe_cmd)

        LOGGER.info('Bootstrapping through LogDog: %s', bs.cmd)
        bs.annotate(stream)
        _ = _call(bs.cmd)
        recipe_return_code = bs.get_result()
    except logdog_bootstrap.NotBootstrapped:
        LOGGER.info('Not using LogDog. Invoking `recipes.py` directly.')
        recipe_return_code = _call(recipe_cmd)

    # Try to open recipe result JSON. Any failure will result in an exception
    # and an infra failure.
    with open(recipe_result_path) as f:
        return_value = json.load(f)

    f = return_value.get('failure')
    if f is not None and not f.get('step_failure'):
        # The recipe engine used to return -1, which got interpreted as 255
        # by os.exit in python, since process exit codes are a single byte.
        recipe_return_code = 255

    return recipe_return_code
Пример #9
0
 def test_(self):
     os.environ['LOGDOG_STREAM_PREFIX'] = 'foo'
     with self.assertRaises(ldbs.NotBootstrapped):
         ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                        self.properties, [])
Пример #10
0
    def test_bootstrap_command_windows(self, tempdir, get_params,
                                       service_account, install_cipd, isfile):
        infra_platform.get.return_value = ('win', 64)

        recipe_cmd = ['run_recipe.py', 'recipe_params...']

        tempdir.return_value = 'foo'
        get_params.return_value = ldbs.Params(project='myproject',
                                              cipd_tag='stable',
                                              mastername='mastername',
                                              buildername='buildername',
                                              buildnumber=1337)
        install_cipd.return_value = ('logdog_butler.exe', 'logdog_annotee.exe')
        service_account.return_value = 'creds.json'
        isfile.return_value = True

        streamserver_uri = 'net.pipe:LUCILogDogButler'

        bs = ldbs.bootstrap(self.rt, self.opts, self.basedir, self.tdir,
                            self.properties, recipe_cmd)
        self.assertEqual(bs.cmd, [
            'logdog_butler.exe',
            '-log-level',
            'warning',
            '-project',
            'myproject',
            '-prefix',
            'bb/mastername/buildername/1337',
            '-output',
            'logdog,host="services-dot-luci-logdog.appspot.com"',
            '-service-account-json',
            'creds.json',
            '-output-max-buffer-age',
            '30s',
            'run',
            '-stdout',
            'tee=stdout',
            '-stderr',
            'tee=stderr',
            '-streamserver-uri',
            streamserver_uri,
            '--',
            'logdog_annotee.exe',
            '-log-level',
            'warning',
            '-project',
            'myproject',
            '-butler-stream-server',
            streamserver_uri,
            '-logdog-host',
            'luci-logdog.appspot.com',
            '-annotate',
            'tee',
            '-name-base',
            'recipes',
            '-print-summary',
            '-tee',
            '-json-args-path',
            self._tp('logdog_annotee_cmd.json'),
            '-result-path',
            self._tp('bootstrap_result.json'),
        ])

        service_account.assert_called_once_with(
            self.opts, ldbs._PLATFORM_CONFIG[('win', )]['credential_path'])
        self._assertAnnoteeCommand(recipe_cmd)
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument('--repository',
                        required=True,
                        help='URL of a git repository to fetch.')
    parser.add_argument('--revision', help='Git commit hash to check out.')
    parser.add_argument('--recipe',
                        required=True,
                        help='Name of the recipe to run')
    parser.add_argument('--build-properties-gz',
                        dest='build_properties',
                        type=chromium_utils.convert_gz_json_type,
                        default={},
                        help='Build properties in b64 gz JSON format')
    parser.add_argument('--factory-properties-gz',
                        dest='factory_properties',
                        type=chromium_utils.convert_gz_json_type,
                        default={},
                        help='factory properties in b64 gz JSON format')
    parser.add_argument('--leak',
                        action='store_true',
                        help='Refrain from cleaning up generated artifacts.')
    parser.add_argument('--verbose', action='store_true')

    group = parser.add_argument_group('LogDog Bootstrap')
    logdog_bootstrap.add_arguments(group)

    args = parser.parse_args(argv[1:])

    with robust_tempdir.RobustTempdir(prefix='rr', leak=args.leak) as rt:
        try:
            basedir = chromium_utils.FindUpward(os.getcwd(), 'b')
        except chromium_utils.PathNotFound as e:
            LOGGER.warn(e)
            # Use base directory inside system temporary directory - if we use slave
            # one (cwd), the paths get too long. Recipes which need different paths
            # or persistent directories should do so explicitly.
            basedir = tempfile.gettempdir()

        # Explicitly clean up possibly leaked temporary directories
        # from previous runs.
        rt.cleanup(basedir)

        tempdir = rt.tempdir(basedir)
        LOGGER.info('Using temporary directory: [%s].', tempdir)

        build_data_dir = rt.tempdir(basedir)
        LOGGER.info('Using build data directory: [%s].', build_data_dir)

        properties = copy.copy(args.factory_properties)
        properties.update(args.build_properties)
        properties['build_data_dir'] = build_data_dir
        LOGGER.info('Using properties: %r', properties)
        properties_file = os.path.join(tempdir, 'remote_run_properties.json')
        with open(properties_file, 'w') as f:
            json.dump(properties, f)

        monitoring_utils.write_build_monitoring_event(build_data_dir,
                                                      properties)

        # Make switching to remote_run easier: we do not use buildbot workdir,
        # and it takes disk space leading to out of disk errors.
        buildbot_workdir = properties.get('workdir')
        if buildbot_workdir:
            try:
                if os.path.exists(buildbot_workdir):
                    buildbot_workdir = os.path.realpath(buildbot_workdir)
                    cwd = os.path.realpath(os.getcwd())
                    if cwd.startswith(buildbot_workdir):
                        buildbot_workdir = cwd

                    LOGGER.info('Cleaning up buildbot workdir %r',
                                buildbot_workdir)

                    # Buildbot workdir is usually used as current working directory,
                    # so do not remove it, but delete all of the contents. Deleting
                    # current working directory of a running process may cause
                    # confusing errors.
                    for p in (os.path.join(buildbot_workdir, x)
                              for x in os.listdir(buildbot_workdir)):
                        LOGGER.info('Deleting %r', p)
                        chromium_utils.RemovePath(p)
            except Exception as e:
                # It's preferred that we keep going rather than fail the build
                # on optional cleanup.
                LOGGER.exception('Buildbot workdir cleanup failed: %s', e)

        # Should we use a CIPD pin?
        mastername = properties.get('mastername')
        cipd_pin = None
        if mastername:
            cipd_pin = _CIPD_PINS.get(mastername)
        if not cipd_pin:
            cipd_pin = _CIPD_PINS[None]

        cipd_path = os.path.join(basedir, '.remote_run_cipd')
        _install_cipd_packages(cipd_path,
                               cipd.CipdPackage('infra/recipes-py', cipd_pin))

        recipe_result_path = os.path.join(tempdir, 'recipe_result.json')
        recipe_cmd = [
            sys.executable,
            os.path.join(cipd_path, 'recipes.py'),
            '--verbose',
            'remote',
            '--repository',
            args.repository,
            '--revision',
            args.revision,
            '--workdir',
            os.path.join(tempdir, 'rw'),
            '--',
            '--verbose',
            'run',
            '--properties-file',
            properties_file,
            '--workdir',
            os.path.join(tempdir, 'w'),
            '--output-result-json',
            recipe_result_path,
            args.recipe,
        ]
        # If we bootstrap through logdog, the recipe command line gets written
        # to a temporary file and does not appear in the log.
        LOGGER.info('Recipe command line: %r', recipe_cmd)
        recipe_return_code = None
        try:
            bs = logdog_bootstrap.bootstrap(rt, args, basedir, tempdir,
                                            properties, recipe_cmd)

            LOGGER.info('Bootstrapping through LogDog: %s', bs.cmd)
            _ = _call(bs.cmd)
            recipe_return_code = bs.get_result()
        except logdog_bootstrap.NotBootstrapped as e:
            LOGGER.info('Not bootstrapped: %s', e.message)
        except logdog_bootstrap.BootstrapError as e:
            LOGGER.warning('Could not bootstrap LogDog: %s', e.message)
        except Exception as e:
            LOGGER.exception('Exception while bootstrapping LogDog.')
        finally:
            if recipe_return_code is None:
                LOGGER.info(
                    'Not using LogDog. Invoking `recipes.py` directly.')
                recipe_return_code = _call(recipe_cmd)

            # Try to open recipe result JSON. Any failure will result in an exception
            # and an infra failure.
            with open(recipe_result_path) as f:
                json.load(f)
        return recipe_return_code