def _pull_and_execute_tasks(task_client, queue_name):
    pull_task_response = _pull_task(task_client, queue_name)
    tasks = pull_task_response.get('tasks', [])
    if not tasks:
        # Sleep for 30 seconds if there is no tasks returned.
        logger.debug('There is no pending task. Sleep for 10 seconds.')
        time.sleep(10)
    for task in tasks:
        task_id, tmp_root, artman_user_config, log_file_path = _prepare_dir()
        log_file_handler = None
        try:
            log_file_handler = _setup_logger(log_file_path)
            logger.info('Starting to execute task %s' % task)
            if int(task['taskStatus']['attemptDispatchCount']) > MAX_ATTEMPTS:
                logger.info('Delete task which exceeds max attempts.')
                _delete_task(task_client, task)
                continue
            _execute_task(artman_user_config, task)
            _ack_task(task_client, task)
            logger.info('Task execution finished')
        except Exception as e:
            logger.error('\n'.join(traceback.format_tb(sys.exc_info()[2])))
            _cancel_task_lease(task_client, task)
        finally:
            logger.info('Cleanup tmp directory %s' % tmp_root)
            # Use task id as log name
            _write_to_cloud_logging(task_id, log_file_path)
            _cleanup(tmp_root, log_file_handler)
Beispiel #2
0
 def execute(self,
             src_proto_path,
             import_proto_path,
             output_dir,
             api_name,
             api_version,
             organization_name,
             toolkit_path,
             desc_proto_path=None,
             excluded_proto_path=[]):
     desc_proto_path = desc_proto_path or []
     desc_protos = list(
         protoc_utils.find_protos(src_proto_path + desc_proto_path,
                                  excluded_proto_path))
     header_proto_path = import_proto_path + desc_proto_path
     header_proto_path.extend(src_proto_path)
     desc_out_file = task_utils.api_full_name(api_name, api_version,
                                              organization_name) + '.desc'
     logger.debug('Compiling descriptors for {0}'.format(desc_protos))
     self.exec_command(['mkdir', '-p', output_dir])
     # DescGen don't use _group_by_dirname right now because
     #   - it doesn't have to
     #   - and multiple invocation will overwrite the desc_out_file
     self.exec_command(
         ['protoc'] + protoc_utils.protoc_header_params(
             header_proto_path, toolkit_path) +
         protoc_utils.protoc_desc_params(output_dir, desc_out_file) +
         desc_protos)
     return os.path.join(output_dir, desc_out_file)
def _pull_and_execute_tasks(task_client, queue_name):
    pull_task_response = _pull_task(task_client, queue_name)
    tasks = pull_task_response.get('tasks', [])
    if not tasks:
        # Sleep for 30 seconds if there is no tasks returned.
        logger.debug('There is no pending task. Sleep for 10 seconds.')
        time.sleep(10)
    for task in tasks:
        task_id, tmp_root, artman_user_config, log_file_path = _prepare_dir()
        log_file_handler = None
        try:
            log_file_handler = _setup_logger(log_file_path)
            logger.info('Starting to execute task %s' % task)
            if int(task['taskStatus']['attemptDispatchCount']) > MAX_ATTEMPTS:
                logger.info('Delete task which exceeds max attempts.')
                _delete_task(task_client, task)
                continue
            _execute_task(artman_user_config, task)
            _ack_task(task_client, task)
            logger.info('Task execution finished')
        except Exception as e:
            logger.error('\n'.join(traceback.format_tb(sys.exc_info()[2])))
            _cancel_task_lease(task_client, task)
        finally:
            logger.info('Cleanup tmp directory %s' % tmp_root)
            # Use task id as log name
            _write_to_cloud_logging(task_id, log_file_path)
            _cleanup(tmp_root, log_file_handler)
Beispiel #4
0
def _find_protobuf_path(toolkit_path):
    """Fetch and locate protobuf source"""
    global _protobuf_path
    if not _protobuf_path:
        logger.debug('Searching for latest protobuf source')
        _protobuf_path = task_utils.get_java_tool_path(toolkit_path, 'protobufJavaDir')
    return _protobuf_path
def _find_protobuf_path(toolkit_path):
    """Fetch and locate protobuf source"""
    global _protobuf_path
    if not _protobuf_path:
        logger.debug('Searching for latest protobuf source')
        _protobuf_path = task_utils.get_java_tool_path(toolkit_path, 'protobufJavaDir')
    return _protobuf_path
Beispiel #6
0
 def execute(self,
             src_proto_path,
             import_proto_path,
             output_dir,
             api_name,
             api_version,
             organization_name,
             toolkit_path,
             root_dir,
             excluded_proto_path=[],
             proto_deps=[]):
     desc_proto_paths = []
     for dep in proto_deps:
         if 'proto_path' in dep and dep['proto_path']:
             desc_proto_paths.append(
                 os.path.join(root_dir, dep['proto_path']))
     desc_protos = list(
         protoc_utils.find_protos(src_proto_path + desc_proto_paths,
                                  excluded_proto_path))
     header_proto_path = import_proto_path + desc_proto_paths
     header_proto_path.extend(src_proto_path)
     desc_out_file = task_utils.api_full_name(api_name, api_version,
                                              organization_name) + '.desc'
     logger.debug('Compiling descriptors for {0}'.format(desc_protos))
     self.exec_command(['mkdir', '-p', output_dir])
     # DescGen doesn't use group protos by package right now because
     #   - it doesn't have to
     #   - and multiple invocation will overwrite the desc_out_file
     self.exec_command(
         ['protoc'] + protoc_utils.protoc_header_params(
             header_proto_path, toolkit_path) +
         protoc_utils.protoc_desc_params(output_dir, desc_out_file) +
         desc_protos)
     return os.path.join(output_dir, desc_out_file)
 def execute(self, src_proto_path, import_proto_path, output_dir,
             api_name, api_version, organization_name, toolkit_path,
             root_dir, excluded_proto_path=[], proto_deps=[]):
     desc_proto_paths = []
     for dep in proto_deps:
         if 'proto_path' in dep and dep['proto_path']:
             desc_proto_paths.append(os.path.join(root_dir, dep['proto_path']))
     desc_protos = list(
         protoc_utils.find_protos(src_proto_path + desc_proto_paths,
                                  excluded_proto_path))
     header_proto_path = import_proto_path + desc_proto_paths
     header_proto_path.extend(src_proto_path)
     desc_out_file = task_utils.api_full_name(
         api_name, api_version, organization_name) + '.desc'
     logger.debug('Compiling descriptors for {0}'.format(desc_protos))
     self.exec_command(['mkdir', '-p', output_dir])
     # DescGen don't use _group_by_dirname right now because
     #   - it doesn't have to
     #   - and multiple invocation will overwrite the desc_out_file
     self.exec_command(
         ['protoc'] +
         protoc_utils.protoc_header_params(header_proto_path, toolkit_path) +
         protoc_utils.protoc_desc_params(output_dir, desc_out_file) +
         desc_protos)
     return os.path.join(output_dir, desc_out_file)
Beispiel #8
0
def _run_artman_in_docker(flags):
    """Executes artman command.

    Args:
        root_dir: The input directory that will be mounted to artman docker
            container as local googleapis directory.
    Returns:
        The output directory with artman-generated files.
    """
    ARTMAN_CONTAINER_NAME = 'artman-docker'
    root_dir = flags.root_dir
    output_dir = flags.output_dir
    artman_config_dirname = os.path.dirname(flags.config)
    docker_image = flags.image

    inner_artman_cmd_str = ' '.join(sys.argv[1:])
    # Because artman now supports setting root dir in either command line or
    # user config, make sure `--root-dir` flag gets explicitly passed to the
    # artman command running inside Artman Docker container.
    if '--root-dir' not in inner_artman_cmd_str:
      inner_artman_cmd_str = '--root-dir %s %s' % (
          root_dir, inner_artman_cmd_str)

    # TODO(ethanbao): Such folder to folder mounting won't work on windows.
    base_cmd = [
        'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t',
        '-e', 'HOST_USER_ID=%s' % os.getuid(),
        '-e', 'HOST_GROUP_ID=%s' % os.getgid(),
        '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN,
        '-v', '%s:%s' % (root_dir, root_dir),
        '-v', '%s:%s' % (output_dir, output_dir),
        '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname),
        '-w', root_dir
    ]
    base_cmd.extend([docker_image, '/bin/bash', '-c'])

    inner_artman_debug_cmd_str = inner_artman_cmd_str
    # Because debug_cmd is run inside the Docker image, we want to
    # make sure --local is set
    if '--local' not in inner_artman_debug_cmd_str:
        inner_artman_debug_cmd_str = '--local %s' % inner_artman_debug_cmd_str
    debug_cmd = list(base_cmd)
    debug_cmd.append('"artman %s; bash"' % inner_artman_debug_cmd_str)

    cmd = base_cmd
    cmd.append('artman --local %s' % (inner_artman_cmd_str))
    try:
        output = subprocess.check_output(cmd)
        logger.info(output.decode('utf8'))
        return output_dir
    except subprocess.CalledProcessError as e:
        logger.error(e.output.decode('utf8'))
        logger.error(
            'Artman execution failed. For additional logging, re-run the '
            'command with the "--verbose" flag')
        sys.exit(32)
    finally:
        logger.debug('For further inspection inside docker container, run `%s`'
                     % ' '.join(debug_cmd))
Beispiel #9
0
def _run_artman_in_docker(flags):
    """Executes artman command.

    Args:
        root_dir: The input directory that will be mounted to artman docker
            container as local googleapis directory.
    Returns:
        The output directory with artman-generated files.
    """
    ARTMAN_CONTAINER_NAME = 'artman-docker'
    root_dir = flags.root_dir
    output_dir = flags.output_dir
    artman_config_dirname = os.path.dirname(flags.config)
    docker_image = flags.image

    inner_artman_cmd_str = ' '.join(["'" + arg + "'" for arg in sys.argv[1:]])
    # Because artman now supports setting root dir in either command line or
    # user config, make sure `--root-dir` flag gets explicitly passed to the
    # artman command running inside Artman Docker container.
    if '--root-dir' not in inner_artman_cmd_str:
      inner_artman_cmd_str = '--root-dir %s %s' % (
          root_dir, inner_artman_cmd_str)

    # TODO(ethanbao): Such folder to folder mounting won't work on windows.
    base_cmd = [
        'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t',
        '-e', 'HOST_USER_ID=%s' % os.getuid(),
        '-e', 'HOST_GROUP_ID=%s' % os.getgid(),
        '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN,
        '-v', '%s:%s' % (root_dir, root_dir),
        '-v', '%s:%s' % (output_dir, output_dir),
        '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname),
        '-w', root_dir
    ]
    base_cmd.extend([docker_image, '/bin/bash', '-c'])

    inner_artman_debug_cmd_str = inner_artman_cmd_str
    # Because debug_cmd is run inside the Docker image, we want to
    # make sure --local is set
    if '--local' not in inner_artman_debug_cmd_str:
        inner_artman_debug_cmd_str = '--local %s' % inner_artman_debug_cmd_str
    debug_cmd = list(base_cmd)
    debug_cmd.append('"artman %s; bash"' % inner_artman_debug_cmd_str)

    cmd = base_cmd
    cmd.append('artman --local %s' % (inner_artman_cmd_str))
    try:
        output = subprocess.check_output(cmd)
        logger.info(output.decode('utf8'))
        return output_dir
    except subprocess.CalledProcessError as e:
        logger.error(e.output.decode('utf8'))
        logger.error(
            'Artman execution failed. For additional logging, re-run the '
            'command with the "--verbose" flag')
        sys.exit(32)
    finally:
        logger.debug('For further inspection inside docker container, run `%s`'
                     % ' '.join(debug_cmd))
Beispiel #10
0
 def execute(self, gapic_code_dir, toolkit_path):
     logger.debug('Formatting files in %s.' %
                 os.path.abspath(gapic_code_dir))
     path = task_utils.get_java_tool_path(toolkit_path, 'googleJavaFormatJar')
     targetFiles = []
     for root, dirs, files in os.walk(gapic_code_dir):
         for filename in files:
             if filename.endswith('.java'):
                 targetFile = os.path.abspath(os.path.join(root, filename))
                 targetFiles.append(targetFile)
     self.exec_command(
             ['java', '-jar', path, '--replace'] + targetFiles)
Beispiel #11
0
 def execute(self, gapic_code_dir):
     logger.debug('Formatting files in %s.' %
                 os.path.abspath(gapic_code_dir))
     targetFiles = []
     for root, dirs, files in os.walk(gapic_code_dir):
         for filename in files:
             if filename.endswith('.py'):
                 targetFile = os.path.abspath(os.path.join(root, filename))
                 targetFiles.append(targetFile)
     # yapf returns code 2 when it formats, so we can't use `check_call`.
     exit_code = subprocess.call(['yapf', '-i'] + targetFiles)
     if exit_code not in [0, 2]:
         raise subprocess.CalledProcessError(exit_code, 'yapf')
Beispiel #12
0
def _run_artman_in_docker(flags):
    """Executes artman command.

    Args:
        input_dir: The input directory that will be mounted to artman docker
            container as local googleapis directory.
    Returns:
        The output directory with artman-generated files.
    """
    ARTMAN_CONTAINER_NAME = 'artman-docker'
    input_dir = flags.input_dir
    output_dir = flags.output_dir
    artman_config_dirname = os.path.dirname(flags.config)
    user_config = os.path.join(os.path.expanduser('~'), '.artman')
    docker_image = flags.image

    inner_artman_cmd_str = ' '.join(sys.argv[1:])

    # TODO(ethanbao): Such folder to folder mounting won't work on windows.
    base_cmd = [
        'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t',
        '-e',
        'HOST_USER_ID=%s' % os.getuid(), '-e',
        'HOST_GROUP_ID=%s' % os.getgid(), '-e',
        '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN, '-v',
        '%s:%s' % (input_dir, input_dir), '-v',
        '%s:%s' % (output_dir, output_dir), '-v',
        '%s:%s' % (artman_config_dirname, artman_config_dirname), '-v',
        '%s:/home/.artman' % user_config, '-w', input_dir, docker_image,
        '/bin/bash', '-c'
    ]

    debug_cmd = list(base_cmd)
    debug_cmd.append('"artman2 %s; bash"' % inner_artman_cmd_str)

    cmd = base_cmd
    cmd.append('artman2 --local %s' % (inner_artman_cmd_str))
    try:
        output = subprocess.check_output(cmd)
        logger.info(output.decode('utf8'))
        return output_dir
    except subprocess.CalledProcessError as e:
        logger.error(e.output.decode('utf8'))
        logger.error(
            'Artman execution failed. For additional logging, re-run the '
            'command with the "--verbose" flag')
        raise
    finally:
        logger.debug(
            'For further inspection inside docker container, run `%s`' %
            ' '.join(debug_cmd))
Beispiel #13
0
def _run_artman_in_docker(flags):
    """Executes artman command.

    Args:
        root_dir: The input directory that will be mounted to artman docker
            container as local googleapis directory.
    Returns:
        The output directory with artman-generated files.
    """
    ARTMAN_CONTAINER_NAME = 'artman-docker'
    root_dir = flags.root_dir
    output_dir = flags.output_dir
    artman_config_dirname = os.path.dirname(flags.config)
    user_config = os.path.join(os.path.expanduser('~'), '.artman')
    docker_image = flags.image

    inner_artman_cmd_str = ' '.join(sys.argv[1:])

    # TODO(ethanbao): Such folder to folder mounting won't work on windows.
    base_cmd = [
        'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t',
        '-e', 'HOST_USER_ID=%s' % os.getuid(),
        '-e', 'HOST_GROUP_ID=%s' % os.getgid(),
        '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN,
        '-v', '%s:%s' % (root_dir, root_dir),
        '-v', '%s:%s' % (output_dir, output_dir),
        '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname),
        '-v', '%s:/home/.artman' % user_config,
        '-w', root_dir,
        docker_image, '/bin/bash', '-c'
    ]

    debug_cmd = list(base_cmd)
    debug_cmd.append('"artman %s; bash"' % inner_artman_cmd_str)

    cmd = base_cmd
    cmd.append('artman --local %s' % (inner_artman_cmd_str))
    try:
        output = subprocess.check_output(cmd)
        logger.info(output.decode('utf8'))
        return output_dir
    except subprocess.CalledProcessError as e:
        logger.error(e.output.decode('utf8'))
        logger.error(
            'Artman execution failed. For additional logging, re-run the '
            'command with the "--verbose" flag')
        raise
    finally:
        logger.debug('For further inspection inside docker container, run `%s`'
                     % ' '.join(debug_cmd))
Beispiel #14
0
 def execute(self, gapic_code_dir):
     abs_code_dir = os.path.abspath(gapic_code_dir)
     logger.debug('Formatting file using php-cs-fixer in %s.' % abs_code_dir)
     subprocess.call(['php-cs-fixer', 'fix',
                      '--rules=@Symfony,-phpdoc_annotation_without_dot',
                      gapic_code_dir])
     # We require a second call to php-cs-fixer because instances of @type
     # have been converted to @var. We cannot disable this conversion in
     # the first call without affecting other aspects of the formatting.
     subprocess.call(['php-cs-fixer',
                      'fix',
                      '--rules={"phpdoc_no_alias_tag" : {"replacements" : '
                      '{"var" : "type"}}}',
                      gapic_code_dir])
     logger.debug('Formatting file using phpcbf in %s.' % abs_code_dir)
     subprocess.call(['phpcbf', '--standard=PSR2', '--no-patch',
                      gapic_code_dir])
Beispiel #15
0
    def execute(self,
                src_proto_path,
                import_proto_path,
                output_dir,
                api_name,
                api_version,
                organization_name,
                toolkit_path,
                root_dir,
                excluded_proto_path=[],
                proto_deps=[],
                language='python'):
        desc_proto_paths = []
        for dep in proto_deps:
            if 'proto_path' in dep and dep['proto_path']:
                desc_proto_paths.append(
                    os.path.join(root_dir, dep['proto_path']))
        desc_protos = list(
            protoc_utils.find_protos(src_proto_path + desc_proto_paths,
                                     excluded_proto_path))
        header_proto_path = import_proto_path + desc_proto_paths
        header_proto_path.extend(src_proto_path)
        desc_out_file = task_utils.api_full_name(api_name, api_version,
                                                 organization_name) + '.desc'
        logger.debug('Compiling descriptors for {0}'.format(desc_protos))
        self.exec_command(['mkdir', '-p', output_dir])

        proto_params = protoc_utils.PROTO_PARAMS_MAP[language]

        proto_compiler_command = proto_params.proto_compiler_command
        logger.debug(
            'Using protoc command: {0}'.format(proto_compiler_command))
        # DescGen doesn't use group protos by package right now because
        #   - it doesn't have to
        #   - and multiple invocation will overwrite the desc_out_file
        (common_resources_includes, common_resources_paths) = \
            protoc_utils.protoc_common_resources_params(root_dir)
        params = proto_params.proto_compiler_command + \
            common_resources_includes + \
            protoc_utils.protoc_header_params(header_proto_path, toolkit_path) + \
            protoc_utils.protoc_desc_params(output_dir, desc_out_file) + \
            common_resources_paths + \
            desc_protos

        self.exec_command(params)
        return os.path.join(output_dir, desc_out_file)
Beispiel #16
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=root_dir,
        shared_config_name=shared_config_name,
    )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                'GOOGLEAPIS': root_dir,
                'DISCOVERY_ARTIFACT_MANAGER': root_dir,
                'TOOLKIT': user_config.local.toolkit
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags,
                    github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Beispiel #17
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit_path'] = user_config.local.toolkit
    pipeline_args['generator_args'] = flags.generator_args

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.aspect)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    pipeline_args['aspect'] = Artifact.Aspect.Name(artifact_config.aspect)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
        if os.path.abspath(flags.output_dir) != os.path.abspath(DEFAULT_OUTPUT_DIR):
            logger.warning("`output_dir` is ignored in DiscoGapicConfigGen. "
             + "Yamls are saved at the path specified by `gapic_yaml`.")
        pipeline_args['output_dir'] = tempfile.mkdtemp()
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    config_args.update(pipeline_args)
    pipeline_args = config_args
    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Beispiel #18
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    flags.input_dir = os.path.abspath(flags.input_dir)
    flags.output_dir = os.path.abspath(flags.output_dir)
    flags.config = os.path.abspath(flags.config)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.input_dir)

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.input_dir)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, flags.input_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    googleapis = os.path.realpath(
        os.path.expanduser(
            pipeline_args['local_paths']['googleapis'], ))
    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=googleapis,
        shared_config_name=shared_config_name, )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline as well as package_type and packaging
    artifact_type = artifact_config.type
    if artifact_type in (Artifact.GAPIC, Artifact.GAPIC_ONLY):
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type in (Artifact.GRPC, Artifact.GRPC_COMMON):
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            pipeline_args['publish'] = 'local' if flags.dry_run else 'github'
            pipeline_args['github'] = support.parse_github_credentials(
                argv_flags=flags,
                config=user_config.get('github', {}), )
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Beispiel #19
0
 def execute(self, gapic_code_dir):
     logger.debug('Formatting files in %s.' %
                 os.path.abspath(gapic_code_dir))
     self.exec_command(['gofmt', '-w', gapic_code_dir])
Beispiel #20
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.input_dir:
        flags.input_dir = os.path.abspath(flags.input_dir)
    flags.output_dir = os.path.abspath(flags.output_dir)
    flags.config = os.path.abspath(flags.config)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.input_dir)

    # Save the input directory back to flags if it was not explicitly set.
    if not flags.input_dir:
        flags.input_dir = pipeline_args['local_paths']['googleapis']

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.input_dir)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, flags.input_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    googleapis = os.path.realpath(
        os.path.expanduser(
            pipeline_args['local_paths']['googleapis'], ))
    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=googleapis,
        shared_config_name=shared_config_name,
    )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline as well as package_type and packaging
    artifact_type = artifact_config.type
    if artifact_type in (Artifact.GAPIC, Artifact.GAPIC_ONLY):
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type in (Artifact.GRPC, Artifact.GRPC_COMMON):
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            pipeline_args['publish'] = 'local' if flags.dry_run else 'github'
            pipeline_args['github'] = support.parse_github_credentials(
                argv_flags=flags,
                config=user_config.get('github', {}), )
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Beispiel #21
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit_path'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.aspect)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    pipeline_args['aspect'] = Artifact.Aspect.Name(artifact_config.aspect)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags,
                    github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Beispiel #22
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    # TODO two args reference the same concept - clean this up
    pipeline_args['toolkit'] = user_config.local.toolkit
    pipeline_args['toolkit_path'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error('Artman config file `%s` doesn\'t exist.' %
                     artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(artman_config_path,
                                                      flags.artifact_name)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags, github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2,
    )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args