Ejemplo n.º 1
0
 def test_read_from_config_file_and_argv(self):
     # If we get values both in the config file and in sys.argv, the latter
     # should win.
     user_config = {'username': '******', 'token': 'bar'}
     flags = argparse.Namespace(github_username='******', github_token='eggs')
     result = support.parse_github_credentials(user_config, flags)
     assert result == {'username': '******', 'token': 'eggs'}
Ejemplo n.º 2
0
 def test_read_from_config_file(self):
     github_config = GitHubConfig()
     github_config.username = '******'
     github_config.token = 'bar'
     flags = argparse.Namespace(github_username=None, github_token=None)
     result = support.parse_github_credentials(github_config, flags)
     assert {'username': '******', 'token': 'bar'} == result
Ejemplo n.º 3
0
 def test_read_from_config_file_and_argv(self):
     # If we get values both in the config file and in sys.argv, the latter
     # should win.
     user_config = {'username': '******', 'token': 'bar'}
     flags = argparse.Namespace(github_username='******', github_token='eggs')
     result = support.parse_github_credentials(user_config, flags)
     assert result == {'username': '******', 'token': 'eggs'}
Ejemplo n.º 4
0
 def test_no_credentials_error(self):
     user_config = {}
     flags = argparse.Namespace(github_username=None, github_token=None)
     with pytest.raises(SystemExit):
         support.parse_github_credentials(user_config, flags)
Ejemplo n.º 5
0
 def test_read_from_argv(self):
     user_config = {}
     flags = argparse.Namespace(github_username='******', github_token='eggs')
     result = support.parse_github_credentials(user_config, flags)
     assert result == {'username': '******', 'token': 'eggs'}
Ejemplo n.º 6
0
 def test_read_from_config_file(self):
     user_config = {'username': '******', 'token': 'bar'}
     flags = argparse.Namespace(github_username=None, github_token=None)
     result = support.parse_github_credentials(user_config, flags)
     assert result == user_config
Ejemplo n.º 7
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit_path'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.aspect)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    pipeline_args['aspect'] = Artifact.Aspect.Name(artifact_config.aspect)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags,
                    github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Ejemplo n.º 8
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    # TODO two args reference the same concept - clean this up
    pipeline_args['toolkit'] = user_config.local.toolkit
    pipeline_args['toolkit_path'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error('Artman config file `%s` doesn\'t exist.' %
                     artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(artman_config_path,
                                                      flags.artifact_name)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags, github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2,
    )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Ejemplo n.º 9
0
 def test_read_from_argv(self):
     user_config = {}
     flags = argparse.Namespace(github_username='******', github_token='eggs')
     result = support.parse_github_credentials(user_config, flags)
     assert result == {'username': '******', 'token': 'eggs'}
Ejemplo n.º 10
0
 def test_no_credentials_error(self):
     github_config = GitHubConfig()
     flags = argparse.Namespace(github_username=None, github_token=None)
     with pytest.raises(SystemExit):
         support.parse_github_credentials(github_config, flags)
Ejemplo n.º 11
0
 def test_read_from_argv(self):
     github_config = GitHubConfig()
     flags = argparse.Namespace(github_username='******', github_token='eggs')
     result = support.parse_github_credentials(github_config, flags)
     assert {'username': '******', 'token': 'eggs'} == result
Ejemplo n.º 12
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    flags.input_dir = os.path.abspath(flags.input_dir)
    flags.output_dir = os.path.abspath(flags.output_dir)
    flags.config = os.path.abspath(flags.config)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.input_dir)

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.input_dir)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, flags.input_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    googleapis = os.path.realpath(
        os.path.expanduser(
            pipeline_args['local_paths']['googleapis'], ))
    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=googleapis,
        shared_config_name=shared_config_name, )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline as well as package_type and packaging
    artifact_type = artifact_config.type
    if artifact_type in (Artifact.GAPIC, Artifact.GAPIC_ONLY):
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type in (Artifact.GRPC, Artifact.GRPC_COMMON):
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            pipeline_args['publish'] = 'local' if flags.dry_run else 'github'
            pipeline_args['github'] = support.parse_github_credentials(
                argv_flags=flags,
                config=user_config.get('github', {}), )
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Ejemplo n.º 13
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.input_dir:
        flags.input_dir = os.path.abspath(flags.input_dir)
    flags.output_dir = os.path.abspath(flags.output_dir)
    flags.config = os.path.abspath(flags.config)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.input_dir)

    # Save the input directory back to flags if it was not explicitly set.
    if not flags.input_dir:
        flags.input_dir = pipeline_args['local_paths']['googleapis']

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.input_dir)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, flags.input_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    googleapis = os.path.realpath(
        os.path.expanduser(
            pipeline_args['local_paths']['googleapis'], ))
    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=googleapis,
        shared_config_name=shared_config_name,
    )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline as well as package_type and packaging
    artifact_type = artifact_config.type
    if artifact_type in (Artifact.GAPIC, Artifact.GAPIC_ONLY):
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type in (Artifact.GRPC, Artifact.GRPC_COMMON):
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            pipeline_args['publish'] = 'local' if flags.dry_run else 'github'
            pipeline_args['github'] = support.parse_github_credentials(
                argv_flags=flags,
                config=user_config.get('github', {}), )
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Ejemplo n.º 14
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from ~/.artman/config.yaml.

    Returns:
        tuple (str, dict, str): 3-tuple containing:
            - pipeline name
            - pipeline arguments
            - 'remote' or None
    """
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(user_config,
                                                             flags.googleapis)

    # In most cases, we get a language.
    if flags.language:
        pipeline_args['language'] = flags.language
    elif flags.pipeline_name != 'GapicConfigPipeline':
        logger.critical('--language is required for every pipeline except '
                        'GapicConfigPipeline.')
        sys.exit(64)

    # If this is remote execution, configure that.
    if flags.remote:
        pipeline_id = str(uuid.uuid4())
        # Use a unique random temp directory for remote execution.
        # TODO(ethanbao): Let remote artman decide the temp directory.
        pipeline_args['local_paths']['reporoot'] = '/tmp/artman/{id}'.format(
            id=pipeline_id,
        )
        pipeline_args['pipeline_id'] = pipeline_id

    # Specify the default pipeline settings - this may change if
    # BATCH is set
    default_pipeline = 'GapicClientPipeline'

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    if flags.api:
        shared_config_name = 'common.yaml'
        if flags.language == 'ruby':
            shared_config_name = 'doc.yaml'

        googleapis = os.path.realpath(os.path.expanduser(
            pipeline_args['local_paths']['googleapis'],
        ))
        flags.config = ','.join([
            '{googleapis}/gapic/api/artman_{api}.yaml',
            '{googleapis}/gapic/lang/{shared_config_name}',
        ]).format(
            api=flags.api,
            googleapis=googleapis,
            shared_config_name=shared_config_name,
        )
    elif flags.batch:
        googleapis = os.path.realpath(os.path.expanduser(
            pipeline_args['local_paths']['googleapis'],
        ))
        flags.config = '{googleapis}/gapic/batch/common.yaml'.format(
            googleapis=googleapis,
        )
        default_pipeline = 'GapicClientBatchPipeline'
        if not flags.publish:
            # If publish flag was not set by the user, set it here.
            # This prevents the user config yaml from causing a
            # publish event when batch mode is used.
            flags.publish = 'noop'
        if flags.target:
            logger.critical('--target and --batch cannot both be specified; '
                            'when using --batch, the repo must be the default '
                            'specified in the artman config yaml file (or '
                            'staging if no default is provided).')
            sys.exit(64)

    # Set the pipeline if none was specified
    if not flags.pipeline_name:
        flags.pipeline_name = default_pipeline

    # Determine where to publish.
    pipeline_args['publish'] = support.resolve('publish', user_config, flags,
        default='local',
    )

    # Parse out the GitHub credentials iff we are publishing to GitHub.
    if pipeline_args['publish'] == 'github':
        pipeline_args['github'] = support.parse_github_credentials(
            argv_flags=flags,
            config=user_config.get('github', {}),
        )

    # Parse out the full configuration.
    config_sections = ['common']
    for config_spec in flags.config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={k.upper(): v for k, v in
                       pipeline_args['local_paths'].items()},
            language=flags.language,
        )
        pipeline_args.update(config_args)

    # Add any arbitrary keyword arguments.
    if flags.pipeline_kwargs != '{}':
        logger.warn('The use of --pipeline-kwargs is discouraged.')
        cmd_args = ast.literal_eval(flags.pipeline_kwargs)
        pipeline_args.update(cmd_args)

    # Coerce `git_repos` and `target_repo` into a single git_repo.
    if pipeline_args['publish'] in ('github', 'local') and not flags.batch:
        # Temporarily give our users a nice error if they have an older
        # googleapis checkout.
        # DEPRECATED: 2017-04-20
        # REMOVE: 2017-05-20
        if 'git_repo' in pipeline_args:
            logger.error('Your git repos are configured in your artman YAML '
                         'using a older format. Please git pull.')
            sys.exit(96)

        # Pop the git repos off of the pipeline args and select the
        # correct one.
        repos = pipeline_args.pop('git_repos')
        pipeline_args['git_repo'] = support.select_git_repo(repos, flags.target)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2,
    )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    # This includes a pipeline to run, arguments, and whether to run remotely.
    return (
        flags.pipeline_name,
        pipeline_args,
        'remote' if flags.remote else None,
    )
Ejemplo n.º 15
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from ~/.artman/config.yaml.

    Returns:
        tuple (str, dict, str): 3-tuple containing:
            - pipeline name
            - pipeline arguments
            - 'remote' or None
    """
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.googleapis)

    # In most cases, we get a language.
    if flags.language:
        pipeline_args['language'] = flags.language
    elif flags.pipeline_name != 'GapicConfigPipeline':
        logger.critical('--language is required for every pipeline except '
                        'GapicConfigPipeline.')
        sys.exit(64)

    # If this is remote execution, configure that.
    if flags.remote:
        pipeline_id = str(uuid.uuid4())
        # Use a unique random temp directory for remote execution.
        # TODO(ethanbao): Let remote artman decide the temp directory.
        pipeline_args['local_paths']['reporoot'] = '/tmp/artman/{id}'.format(
            id=pipeline_id, )
        pipeline_args['pipeline_id'] = pipeline_id

    # Specify the default pipeline settings - this may change if
    # BATCH is set
    default_pipeline = 'GapicClientPipeline'

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    if flags.api:
        shared_config_name = 'common.yaml'
        if flags.language in (
                'ruby',
                'nodejs',
        ):
            shared_config_name = 'doc.yaml'

        googleapis = os.path.realpath(
            os.path.expanduser(pipeline_args['local_paths']['googleapis'], ))
        flags.config = ','.join([
            '{googleapis}/gapic/api/artman_{api}.yaml',
            '{googleapis}/gapic/lang/{shared_config_name}',
        ]).format(
            api=flags.api,
            googleapis=googleapis,
            shared_config_name=shared_config_name,
        )
    elif flags.batch:
        googleapis = os.path.realpath(
            os.path.expanduser(pipeline_args['local_paths']['googleapis'], ))
        flags.config = '{googleapis}/gapic/batch/common.yaml'.format(
            googleapis=googleapis, )
        default_pipeline = 'GapicClientBatchPipeline'
        if not flags.publish:
            # If publish flag was not set by the user, set it here.
            # This prevents the user config yaml from causing a
            # publish event when batch mode is used.
            flags.publish = 'noop'
        if flags.target:
            logger.critical('--target and --batch cannot both be specified; '
                            'when using --batch, the repo must be the default '
                            'specified in the artman config yaml file (or '
                            'staging if no default is provided).')
            sys.exit(64)

    # Set the pipeline if none was specified
    if not flags.pipeline_name:
        flags.pipeline_name = default_pipeline

    # Determine where to publish.
    pipeline_args['publish'] = support.resolve(
        'publish',
        user_config,
        flags,
        default='local',
    )

    # Parse out the GitHub credentials iff we are publishing to GitHub.
    if pipeline_args['publish'] == 'github':
        pipeline_args['github'] = support.parse_github_credentials(
            argv_flags=flags,
            config=user_config.get('github', {}),
        )

    # Parse out the full configuration.
    config_sections = ['common']
    for config_spec in flags.config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=flags.language,
        )
        pipeline_args.update(config_args)

    # Add any arbitrary keyword arguments.
    if flags.pipeline_kwargs != '{}':
        logger.warn('The use of --pipeline-kwargs is discouraged.')
        cmd_args = ast.literal_eval(flags.pipeline_kwargs)
        pipeline_args.update(cmd_args)

    # Coerce `git_repos` and `target_repo` into a single git_repo.
    if pipeline_args['publish'] in ('github', 'local') and not flags.batch:
        # Temporarily give our users a nice error if they have an older
        # googleapis checkout.
        # DEPRECATED: 2017-04-20
        # REMOVE: 2017-05-20
        if 'git_repo' in pipeline_args:
            logger.error('Your git repos are configured in your artman YAML '
                         'using a older format. Please git pull.')
            sys.exit(96)

        # Pop the git repos off of the pipeline args and select the
        # correct one.
        repos = pipeline_args.pop('git_repos')
        pipeline_args['git_repo'] = support.select_git_repo(
            repos, flags.target)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2,
    )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    # This includes a pipeline to run, arguments, and whether to run remotely.
    return (
        flags.pipeline_name,
        pipeline_args,
        'remote' if flags.remote else None,
    )
Ejemplo n.º 16
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=root_dir,
        shared_config_name=shared_config_name,
    )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                'GOOGLEAPIS': root_dir,
                'DISCOVERY_ARTIFACT_MANAGER': root_dir,
                'TOOLKIT': user_config.local.toolkit
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags,
                    github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Ejemplo n.º 17
0
 def test_read_from_config_file(self):
     user_config = {'username': '******', 'token': 'bar'}
     flags = argparse.Namespace(github_username=None, github_token=None)
     result = support.parse_github_credentials(user_config, flags)
     assert result == user_config