Esempio n. 1
0
 def _test(self, artman_yaml, artifact_name, expected_legacy_config):
     artifact_config = loader.load_artifact_config(os.path.join(
         self.TESTDATA, artman_yaml), artifact_name, '/tmp/input')
     actual_legacy_config_dict = converter.convert_to_legacy_config_dict(
         artifact_config, '/tmp/input', '/tmp/output')
     with io.open(os.path.join(
             self.TESTDATA, expected_legacy_config), 'r') as yaml_file:
         expected_legacy_config_dict = yaml.load(yaml_file)
         self.assertDictEqual(
             expected_legacy_config_dict, actual_legacy_config_dict)
Esempio n. 2
0
 def _test(self, artman_yaml, artifact_name, expected_legacy_config):
     artifact_config = loader.load_artifact_config(
         os.path.join(self.TESTDATA, artman_yaml), artifact_name,
         '/tmp/input')
     actual_legacy_config_dict = converter.convert_to_legacy_config_dict(
         artifact_config, '/tmp/input', '/tmp/output')
     with io.open(os.path.join(self.TESTDATA, expected_legacy_config),
                  'r') as yaml_file:
         expected_legacy_config_dict = yaml.load(yaml_file)
         self.assertDictEqual(expected_legacy_config_dict,
                              actual_legacy_config_dict)
Esempio n. 3
0
 def _test(self, artman_yaml, artifact_name, expected_legacy_config):
     artifact_config = loader.load_artifact_config(os.path.join(
         self.TESTDATA, artman_yaml), artifact_name)
     actual_legacy_config_dict = converter.convert_to_legacy_config_dict(
         artifact_config, '/tmp/input', '/tmp/output')
     with io.open(os.path.join(
             self.TESTDATA, expected_legacy_config), 'r') as yaml_file:
         expected_legacy_config_dict = yaml.load(yaml_file)
         self.assertDictEqual(
             expected_legacy_config_dict, actual_legacy_config_dict,
             'Actual yaml is:\n{}\nExpected yaml is:\n{}\n'.format(
                 yaml.dump(actual_legacy_config_dict, default_flow_style=False),
                 yaml.dump(expected_legacy_config_dict, default_flow_style=False)))
Esempio n. 4
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit_path'] = user_config.local.toolkit
    pipeline_args['generator_args'] = flags.generator_args

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.aspect)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    pipeline_args['aspect'] = Artifact.Aspect.Name(artifact_config.aspect)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
        if os.path.abspath(flags.output_dir) != os.path.abspath(DEFAULT_OUTPUT_DIR):
            logger.warning("`output_dir` is ignored in DiscoGapicConfigGen. "
             + "Yamls are saved at the path specified by `gapic_yaml`.")
        pipeline_args['output_dir'] = tempfile.mkdtemp()
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    config_args.update(pipeline_args)
    pipeline_args = config_args
    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Esempio n. 5
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=root_dir,
        shared_config_name=shared_config_name,
    )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                'GOOGLEAPIS': root_dir,
                'DISCOVERY_ARTIFACT_MANAGER': root_dir,
                'TOOLKIT': user_config.local.toolkit
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags,
                    github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Esempio n. 6
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    pipeline_args['toolkit_path'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.aspect)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    pipeline_args['aspect'] = Artifact.Aspect.Name(artifact_config.aspect)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags,
                    github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Esempio n. 7
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.root_dir:
        flags.root_dir = os.path.abspath(flags.root_dir)
        flags.config = os.path.join(flags.root_dir, flags.config)
    else:
        flags.root_dir = os.getcwd()
        flags.config = os.path.abspath(flags.config)
    root_dir = flags.root_dir
    flags.output_dir = os.path.abspath(flags.output_dir)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = INFO
    if getattr(flags, 'verbosity', None):
        verbosity = getattr(flags, 'verbosity')
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['root_dir'] = root_dir
    # TODO two args reference the same concept - clean this up
    pipeline_args['toolkit'] = user_config.local.toolkit
    pipeline_args['toolkit_path'] = user_config.local.toolkit

    if flags.subcommand == 'publish' and flags.local_repo_dir:
        if not flags.dry_run:
            logger.error('`--dry-run` flag must be passed when '
                         '`--local-repo-dir` is specified')
            sys.exit(96)
        flags.local_repo_dir = os.path.abspath(flags.local_repo_dir)
        pipeline_args['local_repo_dir'] = flags.local_repo_dir

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error('Artman config file `%s` doesn\'t exist.' %
                     artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(artman_config_path,
                                                      flags.artifact_name)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, root_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))

    language = Artifact.Language.Name(artifact_config.language).lower()

    # Set the pipeline
    artifact_type = artifact_config.type
    pipeline_args['artifact_type'] = Artifact.Type.Name(artifact_type)
    if artifact_type == Artifact.GAPIC_ONLY:
        pipeline_name = 'GapicOnlyClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC:
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.DISCOGAPIC:
        pipeline_name = 'DiscoGapicClientPipeline'
        pipeline_args['language'] = language
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.GRPC:
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    elif artifact_type == Artifact.DISCOGAPIC_CONFIG:
        pipeline_name = 'DiscoGapicConfigPipeline'
        pipeline_args['discovery_doc'] = artifact_config.discovery_doc
    elif artifact_type == Artifact.PROTOBUF:
        pipeline_name = 'ProtoClientPipeline'
        pipeline_args['language'] = language
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    config_args = config_util.load_config_spec(legacy_config_dict, language)
    pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            if flags.dry_run:
                pipeline_args['publish'] = 'local'
            else:
                pipeline_args['publish'] = 'github'
                pipeline_args['github'] = support.parse_github_credentials(
                    argv_flags=flags, github_config=user_config.github)
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2,
    )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Return the final arguments.
    return pipeline_name, pipeline_args
Esempio n. 8
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    flags.input_dir = os.path.abspath(flags.input_dir)
    flags.output_dir = os.path.abspath(flags.output_dir)
    flags.config = os.path.abspath(flags.config)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.input_dir)

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.input_dir)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, flags.input_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    googleapis = os.path.realpath(
        os.path.expanduser(
            pipeline_args['local_paths']['googleapis'], ))
    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=googleapis,
        shared_config_name=shared_config_name, )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline as well as package_type and packaging
    artifact_type = artifact_config.type
    if artifact_type in (Artifact.GAPIC, Artifact.GAPIC_ONLY):
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type in (Artifact.GRPC, Artifact.GRPC_COMMON):
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            pipeline_args['publish'] = 'local' if flags.dry_run else 'github'
            pipeline_args['github'] = support.parse_github_credentials(
                argv_flags=flags,
                config=user_config.get('github', {}), )
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args
Esempio n. 9
0
def normalize_flags(flags, user_config):
    """Combine the argparse flags and user configuration together.

    Args:
        flags (argparse.Namespace): The flags parsed from sys.argv
        user_config (dict): The user configuration taken from
                            ~/.artman/config.yaml.

    Returns:
        tuple (str, dict): 2-tuple containing:
            - pipeline name
            - pipeline arguments
    """
    if flags.input_dir:
        flags.input_dir = os.path.abspath(flags.input_dir)
    flags.output_dir = os.path.abspath(flags.output_dir)
    flags.config = os.path.abspath(flags.config)
    pipeline_args = {}

    # Determine logging verbosity and then set up logging.
    verbosity = support.resolve('verbosity', user_config, flags, default=INFO)
    setup_logging(verbosity)

    # Save local paths, if applicable.
    # This allows the user to override the path to api-client-staging or
    # toolkit on his or her machine.
    pipeline_args['local_paths'] = support.parse_local_paths(
        user_config, flags.input_dir)

    # Save the input directory back to flags if it was not explicitly set.
    if not flags.input_dir:
        flags.input_dir = pipeline_args['local_paths']['googleapis']

    artman_config_path = flags.config
    if not os.path.isfile(artman_config_path):
        logger.error(
            'Artman config file `%s` doesn\'t exist.' % artman_config_path)
        sys.exit(96)

    try:
        artifact_config = loader.load_artifact_config(
            artman_config_path, flags.artifact_name, flags.input_dir)
    except ValueError as ve:
        logger.error('Artifact config loading failed with `%s`' % ve)
        sys.exit(96)

    # If we were given just an API or BATCH, then expand it into the --config
    # syntax.
    shared_config_name = 'common.yaml'
    if artifact_config.language in (Artifact.RUBY, Artifact.NODEJS,):
        shared_config_name = 'doc.yaml'

    legacy_config_dict = converter.convert_to_legacy_config_dict(
        artifact_config, flags.input_dir, flags.output_dir)
    logger.debug('Below is the legacy config after conversion:\n%s' %
                 pprint.pformat(legacy_config_dict))
    tmp_legacy_config_yaml = '%s.tmp' % artman_config_path
    with io.open(tmp_legacy_config_yaml, 'w') as outfile:
        yaml.dump(legacy_config_dict, outfile, default_flow_style=False)

    googleapis = os.path.realpath(
        os.path.expanduser(
            pipeline_args['local_paths']['googleapis'], ))
    config = ','.join([
        '{artman_config_path}',
        '{googleapis}/gapic/lang/{shared_config_name}',
    ]).format(
        artman_config_path=tmp_legacy_config_yaml,
        googleapis=googleapis,
        shared_config_name=shared_config_name,
    )

    language = Artifact.Language.Name(
        artifact_config.language).lower()

    # Set the pipeline as well as package_type and packaging
    artifact_type = artifact_config.type
    if artifact_type in (Artifact.GAPIC, Artifact.GAPIC_ONLY):
        pipeline_name = 'GapicClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type in (Artifact.GRPC, Artifact.GRPC_COMMON):
        pipeline_name = 'GrpcClientPipeline'
        pipeline_args['language'] = language
    elif artifact_type == Artifact.GAPIC_CONFIG:
        pipeline_name = 'GapicConfigPipeline'
    else:
        raise ValueError('Unrecognized artifact.')

    # Parse out the full configuration.
    # Note: the var replacement is still needed because they are still being
    # used in some shared/common config yamls.
    config_sections = ['common']
    for config_spec in config.split(','):
        config_args = config_util.load_config_spec(
            config_spec=config_spec,
            config_sections=config_sections,
            repl_vars={
                k.upper(): v
                for k, v in pipeline_args['local_paths'].items()
            },
            language=language, )
        pipeline_args.update(config_args)

    # Setup publishing related config if needed.
    if flags.subcommand == 'generate':
        pipeline_args['publish'] = 'noop'
    elif flags.subcommand == 'publish':
        publishing_config = _get_publishing_config(artifact_config,
                                                   flags.target)
        if publishing_config.type == Artifact.PublishTarget.GITHUB:
            pipeline_args['publish'] = 'local' if flags.dry_run else 'github'
            pipeline_args['github'] = support.parse_github_credentials(
                argv_flags=flags,
                config=user_config.get('github', {}), )
            repos = pipeline_args.pop('git_repos')
            pipeline_args['git_repo'] = support.select_git_repo(
                repos, publishing_config.name)
        else:
            logger.error(
                'Publishing type `%s` is not supported yet.' %
                Artifact.PublishTarget.Type.Name(publishing_config.type))
            sys.exit(96)

    # Print out the final arguments to stdout, to help the user with
    # possible debugging.
    pipeline_args_repr = yaml.dump(
        pipeline_args,
        block_seq_indent=2,
        default_flow_style=False,
        indent=2, )
    logger.info('Final args:')
    for line in pipeline_args_repr.split('\n'):
        if 'token' in line:
            index = line.index(':')
            line = line[:index + 2] + '<< REDACTED >>'
        logger.info('  {0}'.format(line))

    # Clean up the tmp legacy artman config.
    os.remove(tmp_legacy_config_yaml)

    # Return the final arguments.
    return pipeline_name, pipeline_args