예제 #1
0
def find_config_files(metadata_or_path,
                      additional_files=None,
                      ignore_system_config=False):
    """Find files to load variables from.  Note that order here determines clobbering.

    Later files clobber earlier ones.  Preference is system-wide, then """
    files = []

    if hasattr(metadata_or_path, 'path'):
        recipe_config = os.path.join(metadata_or_path.path,
                                     "conda_build_config.yaml")
    else:
        recipe_config = os.path.join(metadata_or_path,
                                     "conda_build_config.yaml")

    if not ignore_system_config:
        if cc_conda_build.get('config_file'):
            system_path = cc_conda_build['config_file']
        else:
            system_path = os.path.join(os.path.expanduser('~'),
                                       "conda_build_config.yaml")
        if os.path.isfile(system_path):
            files.append(system_path)
    if os.path.isfile(recipe_config):
        files.append(recipe_config)
    if additional_files:
        files.extend([
            os.path.expanduser(additional_file)
            for additional_file in additional_files
        ])
    return files
예제 #2
0
def find_config_files(metadata_or_path, additional_files=None, ignore_system_config=False,
                      exclusive_config_files=None):
    """Find files to load variables from.  Note that order here determines clobbering.

    Later files clobber earlier ones.  order is user-wide < cwd < recipe dir < additional files"""
    files = [
        os.path.abspath(os.path.expanduser(config_file))
        for config_file in (exclusive_config_files or [])
    ]

    if not ignore_system_config and not exclusive_config_files:
        if cc_conda_build.get('config_file'):
            system_path = abspath(expanduser(expandvars(cc_conda_build['config_file'])))
        else:
            system_path = os.path.join(expanduser('~'), "conda_build_config.yaml")
        if os.path.isfile(system_path):
            files.append(system_path)

        cwd = os.path.join(os.getcwd(), 'conda_build_config.yaml')
        if os.path.isfile(cwd):
            files.append(cwd)

    if hasattr(metadata_or_path, 'path'):
        recipe_config = os.path.join(metadata_or_path.path, "conda_build_config.yaml")
    else:
        recipe_config = os.path.join(metadata_or_path, "conda_build_config.yaml")
    if os.path.isfile(recipe_config):
        files.append(recipe_config)

    if additional_files:
        files.extend([os.path.expanduser(additional_file) for additional_file in additional_files])

    return files
예제 #3
0
def find_config_files(metadata_or_path, additional_files=None, ignore_system_config=False,
                      exclusive_config_file=None):
    """Find files to load variables from.  Note that order here determines clobbering.

    Later files clobber earlier ones.  order is user-wide < cwd < recipe dir < additional files"""
    files = ([os.path.abspath(os.path.expanduser(exclusive_config_file))]
             if exclusive_config_file else [])

    if not ignore_system_config and not exclusive_config_file:
        if cc_conda_build.get('config_file'):
            system_path = abspath(expanduser(expandvars(cc_conda_build['config_file'])))
        else:
            system_path = os.path.join(expanduser('~'), "conda_build_config.yaml")
        if os.path.isfile(system_path):
            files.append(system_path)

        cwd = os.path.join(os.getcwd(), 'conda_build_config.yaml')
        if os.path.isfile(cwd):
            files.append(cwd)

    if hasattr(metadata_or_path, 'path'):
        recipe_config = os.path.join(metadata_or_path.path, "conda_build_config.yaml")
    else:
        recipe_config = os.path.join(metadata_or_path, "conda_build_config.yaml")
    if os.path.isfile(recipe_config):
        files.append(recipe_config)

    if additional_files:
        files.extend([os.path.expanduser(additional_file) for additional_file in additional_files])

    return files
예제 #4
0
def find_config_files(metadata_or_path, config):
    """
    Find config files to load. Config files are stacked in the following order:
        1. exclusive config files (see config.exclusive_config_files)
        2. user config files
           (see context.conda_build["config_file"] or ~/conda_build_config.yaml)
        3. cwd config files (see ./conda_build_config.yaml)
        4. recipe config files (see ${RECIPE_DIR}/conda_build_config.yaml)
        5. additional config files (see config.variant_config_files)

    .. note::
        Order determines clobbering with later files clobbering earlier ones.

    :param metadata_or_path: the metadata or path within which to find recipe config files
    :type metadata_or_path:
    :param config: config object specifying config file settings
                   (see exclusive_config_files, ignore_system_variants, and variant_config_files)
    :type config: :class:`Config`
    :return: List of config files
    :rtype: `list` of paths (`str`)
    """
    resolve = lambda p: os.path.abspath(
        os.path.expanduser(os.path.expandvars(p)))

    # exclusive configs
    files = [resolve(f) for f in ensure_list(config.exclusive_config_files)]

    if not files and not config.ignore_system_variants:
        # user config
        if cc_conda_build.get('config_file'):
            cfg = resolve(cc_conda_build['config_file'])
        else:
            cfg = resolve(os.path.join('~', "conda_build_config.yaml"))
        if os.path.isfile(cfg):
            files.append(cfg)

        cfg = resolve('conda_build_config.yaml')
        if os.path.isfile(cfg):
            files.append(cfg)

    path = getattr(metadata_or_path, "path", metadata_or_path)
    cfg = resolve(os.path.join(path, "conda_build_config.yaml"))
    if os.path.isfile(cfg):
        files.append(cfg)

    files.extend(
        [resolve(f) for f in ensure_list(config.variant_config_files)])

    return files
예제 #5
0
def find_config_files(metadata_or_path, additional_files=None, ignore_system_config=False):
    """Find files to load variables from.  Note that order here determines clobbering.

    Later files clobber earlier ones.  Preference is system-wide, then """
    files = []

    if hasattr(metadata_or_path, 'path'):
        recipe_config = os.path.join(metadata_or_path.path, "conda_build_config.yaml")
    else:
        recipe_config = os.path.join(metadata_or_path, "conda_build_config.yaml")

    if not ignore_system_config:
        if cc_conda_build.get('config_file'):
            system_path = cc_conda_build['config_file']
        else:
            system_path = os.path.join(os.path.expanduser('~'), "conda_build_config.yaml")
        if os.path.isfile(system_path):
            files.append(system_path)
    if os.path.isfile(recipe_config):
        files.append(recipe_config)
    if additional_files:
        files.extend([os.path.expanduser(additional_file) for additional_file in additional_files])
    return files
예제 #6
0
def parse_args(args):
    p = get_render_parser()
    p.description = """
Tool for building conda packages. A conda package is a binary tarball
containing system-level libraries, Python modules, executable programs, or
other components. conda keeps track of dependencies between packages and
platform specifics, making it simple to create working environments from
different sets of packages."""
    p.add_argument(
        "--check",
        action="store_true",
        help="Only check (validate) the recipe.",
    )
    p.add_argument(
        "--no-anaconda-upload",
        action="store_false",
        help="Do not ask to upload the package to anaconda.org.",
        dest='anaconda_upload',
        default=binstar_upload,
    )
    p.add_argument(
        "--no-binstar-upload",
        action="store_false",
        help=argparse.SUPPRESS,
        dest='anaconda_upload',
        default=binstar_upload,
    )
    p.add_argument(
        "--no-include-recipe",
        action="store_false",
        help="Don't include the recipe inside the built package.",
        dest='include_recipe',
        default=cc_conda_build.get('include_recipe', 'true').lower() == 'true',
    )
    p.add_argument(
        '-s',
        "--source",
        action="store_true",
        help="Only obtain the source (but don't build).",
    )
    p.add_argument(
        '-t',
        "--test",
        action="store_true",
        help=
        "Test package (assumes package is already built).  RECIPE_DIR argument can be either "
        "recipe directory, in which case source download may be necessary to resolve package "
        "version, or path to built package .tar.bz2 file, in which case no source is necessary.",
    )
    p.add_argument(
        '--no-test',
        action='store_true',
        dest='notest',
        help="Do not test the package.",
    )
    p.add_argument(
        '-b',
        '--build-only',
        action="store_true",
        help="""Only run the build, without any post processing or
        testing. Implies --no-test and --no-anaconda-upload.""",
    )
    p.add_argument(
        '-p',
        '--post',
        action="store_true",
        help=
        "Run the post-build logic. Implies --no-test and --no-anaconda-upload.",
    )
    p.add_argument(
        'recipe',
        metavar='RECIPE_PATH',
        nargs='+',
        help="Path to recipe directory.  Pass 'purge' here to clean the "
        "work and test intermediates.",
    )
    p.add_argument(
        '--skip-existing',
        action='store_true',
        help=("Skip recipes for which there already exists an existing build "
              "(locally or in the channels)."),
        default=cc_conda_build.get('skip_existing', 'false').lower() == 'true',
    )
    p.add_argument(
        '--keep-old-work',
        action='store_true',
        dest='keep_old_work',
        help="Do not remove anything from environment, even after successful "
        "build and test.")
    p.add_argument(
        '--dirty',
        action='store_true',
        help='Do not remove work directory or _build environment, '
        'to speed up debugging.  Does not apply patches or download source.')
    p.add_argument(
        '-q',
        "--quiet",
        action="store_true",
        help="do not display progress bar",
        default=cc_conda_build.get('quiet', 'false').lower() == 'true',
    )
    p.add_argument(
        '--debug',
        action="store_true",
        help="Show debug output from source checkouts and conda",
    )
    p.add_argument(
        '--token',
        help="Token to pass through to anaconda upload",
        default=cc_conda_build.get('anaconda_token'),
    )
    p.add_argument(
        '--user',
        help="User/organization to upload packages to on anaconda.org or pypi",
        default=cc_conda_build.get('user'),
    )
    p.add_argument(
        '--label',
        action='append',
        dest='labels',
        default=[],
        help="Label argument to pass through to anaconda upload",
    )
    p.add_argument(
        '--no-force-upload',
        help=
        "Disable force upload to anaconda.org, preventing overwriting any existing packages",
        dest='force_upload',
        default=True,
        action='store_false',
    )
    pypi_grp = p.add_argument_group("PyPI upload parameters (twine)")
    pypi_grp.add_argument(
        '--password',
        help="password to use when uploading packages to pypi",
    )
    pypi_grp.add_argument('--sign',
                          default=False,
                          help="sign files when uploading to pypi")
    pypi_grp.add_argument(
        '--sign-with',
        default='gpg',
        dest='sign_with',
        help="program to use to sign files when uploading to pypi")
    pypi_grp.add_argument(
        '--identity',
        help="GPG identity to use to sign files when uploading to pypi")
    pypi_grp.add_argument(
        '--config-file',
        help="path to .pypirc file to use when uploading to pypi",
        default=(abspath(expanduser(expandvars(cc_conda_build.get('pypirc'))))
                 if cc_conda_build.get('pypirc') else
                 cc_conda_build.get('pypirc')),
    )
    pypi_grp.add_argument(
        '--repository',
        '-r',
        help="PyPI repository to upload to",
        default=cc_conda_build.get('pypi_repository', 'pypitest'),
    )
    p.add_argument(
        "--no-activate",
        action="store_false",
        help="do not activate the build and test envs; just prepend to PATH",
        dest='activate',
        default=cc_conda_build.get('activate', 'true').lower() == 'true',
    )
    p.add_argument(
        "--no-build-id",
        action="store_false",
        help=
        ("do not generate unique build folder names.  Use if having issues with "
         "paths being too long."),
        dest='set_build_id',
        # note: inverted - dest stores positive logic
        default=cc_conda_build.get('set_build_id', 'true').lower() == 'true',
    )
    p.add_argument(
        "--croot",
        help=
        ("Build root folder.  Equivalent to CONDA_BLD_PATH, but applies only "
         "to this call of conda-build."))
    p.add_argument(
        "--verify",
        action="store_true",
        help="run verification on recipes or packages when building",
        default=cc_conda_build.get('verify', 'true').lower() == 'true',
    )
    p.add_argument(
        "--no-verify",
        action="store_false",
        dest="verify",
        help="do not run verification on recipes or packages when building",
        default=cc_conda_build.get('verify', 'true').lower() == 'true',
    )
    p.add_argument(
        "--strict-verify",
        action="store_true",
        dest="exit_on_verify_error",
        help=
        "Exit if any conda-verify check fail, instead of only printing them",
        default=cc_conda_build.get('exit_on_verify_error',
                                   'false').lower() == 'true',
    )
    p.add_argument(
        "--output-folder",
        help=
        ("folder to dump output package to.  Package are moved here if build or test succeeds."
         "  Destination folder must exist prior to using this."),
        default=cc_conda_build.get('output_folder'))
    p.add_argument(
        "--no-prefix-length-fallback",
        dest='prefix_length_fallback',
        action="store_false",
        help=
        ("Disable fallback to older 80 character prefix length if environment creation"
         " fails due to insufficient prefix length in dependency packages"),
        default=True,
    )
    p.add_argument(
        "--prefix-length-fallback",
        dest='prefix_length_fallback',
        action="store_true",
        help=
        ("Disable fallback to older 80 character prefix length if environment creation"
         " fails due to insufficient prefix length in dependency packages"),
        # this default will change to false in the future, when we deem that the community has
        #     had enough time to build long-prefix length packages.
        default=True,
    )
    p.add_argument(
        "--prefix-length",
        dest='_prefix_length',
        help=
        ("length of build prefix.  For packages with binaries that embed the path, this is"
         " critical to ensuring that your package can run as many places as possible.  Note"
         "that this value can be altered by the OS below conda-build (e.g. encrypted "
         "filesystems on Linux), and you should prefer to set --croot to a non-encrypted "
         "location instead, so that you maintain a known prefix length."),
        # this default will change to false in the future, when we deem that the community has
        #     had enough time to build long-prefix length packages.
        default=255,
        type=int,
    )
    p.add_argument(
        "--no-locking",
        dest='locking',
        default=True,
        action="store_false",
        help=
        ("Disable locking, to avoid unresolved race condition issues.  Unsafe to run multiple "
         "builds at once on one system with this set."))
    p.add_argument(
        "--no-remove-work-dir",
        dest='remove_work_dir',
        default=True,
        action="store_false",
        help=
        ("Disable removal of the work dir before testing.  Be careful using this option, as"
         " you package may depend on files that are not included in the package, and may pass "
         "tests, but ultimately fail on installed systems."))
    p.add_argument(
        "--error-overlinking",
        dest='error_overlinking',
        action="store_true",
        help=
        ("Enable error when shared libraries from transitive dependencies are directly "
         "linked to any executables or shared libraries in built packages.  This is disabled "
         "by default, but will be enabled by default in conda-build 4.0."),
        default=cc_conda_build.get('error_overlinking',
                                   'false').lower() == 'true',
    )
    p.add_argument(
        "--no-error-overlinking",
        dest='error_overlinking',
        action="store_false",
        help=
        ("Disable error when shared libraries from transitive dependencies are directly "
         "linked to any executables or shared libraries in built packages.  This is currently "
         "the default behavior, but will change in conda-build 4.0."),
        default=cc_conda_build.get('error_overlinking',
                                   'false').lower() == 'true',
    )
    p.add_argument(
        "--error-overdepending",
        dest='error_overdepending',
        action="store_true",
        help=
        ("Enable error when packages with names beginning `lib` or which have "
         "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by "
         "any of the files in this package."),
        default=cc_conda_build.get('error_overdepending',
                                   'false').lower() == 'true',
    )
    p.add_argument(
        "--no-error-overdepending",
        dest='error_overdepending',
        action="store_false",
        help=
        ("Disable error when packages with names beginning `lib` or which have "
         "`run_exports` are not auto-loaded by the OSes DSO loading mechanism by "
         "any of the files in this package."),
        default=cc_conda_build.get('error_overdepending',
                                   'false').lower() == 'true',
    )
    p.add_argument(
        "--long-test-prefix",
        action="store_true",
        help=
        ("Use a long prefix for the test prefix, as well as the build prefix.  Affects only "
         "Linux and Mac.  Prefix length matches the --prefix-length flag.  This is on by "
         "default in conda-build 3.0+"),
        default=cc_conda_build.get('long_test_prefix',
                                   'true').lower() == 'true',
    )
    p.add_argument(
        "--no-long-test-prefix",
        dest="long_test_prefix",
        action="store_false",
        help=
        ("Do not use a long prefix for the test prefix, as well as the build prefix."
         "  Affects only Linux and Mac.  Prefix length matches the --prefix-length flag.  "
         ),
        default=cc_conda_build.get('long_test_prefix',
                                   'true').lower() == 'true',
    )
    p.add_argument(
        '--keep-going',
        '-k',
        action='store_true',
        help=
        ("When running tests, keep going after each failure.  Default is to stop on the first "
         "failure."))
    p.add_argument(
        '--cache-dir',
        help=
        ('Path to store the source files (archives, git clones, etc.) during the build.'
         ),
        default=(abspath(
            expanduser(expandvars(cc_conda_build.get('cache_dir'))))
                 if cc_conda_build.get('cache_dir') else
                 cc_conda_build.get('cache_dir')),
    )
    p.add_argument(
        "--no-copy-test-source-files",
        dest="copy_test_source_files",
        action="store_false",
        default=cc_conda_build.get('copy_test_source_files',
                                   'true').lower() == 'true',
        help=
        ("Disables copying the files necessary for testing the package into "
         "the info/test folder.  Passing this argument means it may not be possible "
         "to test the package without internet access.  There is also a danger that "
         "the source archive(s) containing the files could become unavailable sometime "
         "in the future."))
    p.add_argument(
        '--merge-build-host',
        action="store_true",
        help=
        ('Merge the build and host directories, even when host section or compiler '
         'jinja2 is present'),
        default=cc_conda_build.get('merge_build_host',
                                   'false').lower() == 'true',
    )
    p.add_argument(
        '--stats-file',
        help=('File path to save build statistics to.  Stats are '
              'in JSON format'),
    )
    p.add_argument(
        '--extra-deps',
        nargs='+',
        help=
        ('Extra dependencies to add to all environment creation steps.  This '
         'is only enabled for testing with the -t or --test flag.  Change '
         'meta.yaml or use templates otherwise.'),
    )

    add_parser_channels(p)

    args = p.parse_args(args)
    return p, args
예제 #7
0
def parse_args(parse_this=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('--version',
                        action='version',
                        help='Show the conda-build version number and exit.',
                        version='conda-concourse-ci %s' % __version__)
    sp = parser.add_subparsers(title='subcommands', dest='subparser_name')
    examine_parser = sp.add_parser('examine',
                                   help='examine path for changed recipes')
    examine_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")
    examine_parser.add_argument(
        "path",
        default='.',
        nargs='?',
        help="path in which to examine/build/test recipes")
    examine_parser.add_argument(
        '--folders',
        default=[],
        nargs="+",
        help="Rather than determine tree from git, specify folders to build")
    examine_parser.add_argument(
        '--steps',
        type=int,
        help=("Number of downstream steps to follow in the DAG when "
              "computing what to test.  Used for making sure that an "
              "update does not break downstream packages.  Set to -1 "
              "to follow the complete dependency tree."),
        default=0),
    examine_parser.add_argument(
        '--max-downstream',
        default=5,
        type=int,
        help=
        ("Limit the total number of downstream packages built.  Only applies "
         "if steps != 0.  Set to -1 for unlimited."))
    examine_parser.add_argument(
        '--git-rev',
        default='HEAD',
        help=('start revision to examine.  If stop not '
              'provided, changes are THIS_VAL~1..THIS_VAL'))
    examine_parser.add_argument(
        '--stop-rev',
        default=None,
        help=('stop revision to examine.  When provided,'
              'changes are git_rev..stop_rev'))
    examine_parser.add_argument(
        '--test',
        action='store_true',
        help='test packages (instead of building AND testing them)')
    examine_parser.add_argument(
        '--matrix-base-dir',
        help='path to matrix configuration, if different from recipe path',
        default=cc_conda_build.get('matrix_base_dir'))
    examine_parser.add_argument(
        '--output-dir',
        help="folder where output plan and recipes live",
        default='../output')
    examine_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    examine_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    examine_parser.add_argument(
        '--worker-tag',
        '-t',
        action='append',
        help="set worker tag(s) to limit where jobs will run.  Applies "
        "to all jobs.  For finer control, use extra/worker_tags in "
        "meta.yaml with selectors.",
        dest='worker_tags')
    examine_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    examine_parser.add_argument('--no-skip-existing',
                                help="Do not skip existing builds",
                                dest="skip_existing",
                                action="store_false")
    submit_parser = sp.add_parser(
        'submit', help="submit plan director to configured server")
    submit_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")
    submit_parser.add_argument('--pipeline-name',
                               help="name for the submitted pipeline",
                               default='{base_name} plan director')
    submit_parser.add_argument(
        '--pipeline-file',
        default='plan_director.yml',
        help="path to pipeline .yml file containing plan")
    submit_parser.add_argument(
        '--config-root-dir',
        help=
        "path containing build-config.yml (optional), config.yml and matrix definitions"
    )

    submit_parser.add_argument(
        '--src-dir',
        help="folder where git repo of source code lives",
        default=os.getcwd())
    submit_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')

    bootstrap_parser = sp.add_parser(
        'bootstrap',
        help="create default configuration files to help you start")
    bootstrap_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")

    one_off_parser = sp.add_parser(
        'one-off', help="submit local recipes and plan to configured server")
    one_off_parser.add_argument(
        'pipeline_label',
        help="name of your project, to distinguish it from other projects")
    one_off_parser.add_argument(
        '--build-config',
        nargs="+",
        help=(
            "Specify VAR=VAL to override values defined in build-config.yml"))
    one_off_parser.add_argument(
        'folders',
        nargs="+",
        help=("Specify folders, relative to --recipe-root-dir, to upload "
              "and build"))
    one_off_parser.add_argument(
        '--automated-pipeline',
        action='store_true',
        default=False,
        help=
        "Flag to run this one_off command as an automated pipeline. Default is False",
    )
    one_off_parser.add_argument(
        '--branches',
        nargs='+',
        default=None,
        help=
        ("Only used when --automated_pipeline is specified. "
         "List of repository branches that recipes will be pulled from. "
         "Either pass in one branch or n number of branches where "
         "n is equal to the number of recipes you are building. "
         "The default is to use the 'automated-build' branch. "
         "Specific this option after the list of folders to avoid "
         "confusing which arguments are folders and which are branches, "
         "for example: "
         "c3i one-off pipeline_label folder1 folder2 --branches branch1 branch2"
         ))
    one_off_parser.add_argument(
        "--pr-num",
        action="store",
        help=
        "The PR number on which to make a comment when using the automated pipeline"
    )
    one_off_parser.add_argument(
        "--repository",
        action="store",
        help="The git repo where the PR lives. This should look like: Org/Repo"
    )
    one_off_parser.add_argument("--pr-file",
                                action="store",
                                help="File added to the git repo by the PR")
    one_off_parser.add_argument(
        '--stage-for-upload',
        action='store_true',
        help="create job that stages package for upload as part of the pipeline"
    )
    one_off_parser.add_argument(
        '--push-branch',
        action='store_true',
        help=
        "create a job that push the branch(es) used for the build to master")
    one_off_parser.add_argument(
        '--destroy-pipeline',
        action='store_true',
        help="destroys the pipeline once the review branch has been merged, "
        "the artifacts have been staged, and the reciepe repo has been updated. "
        "This requires --stage-for-upload and --push-branch options.")
    one_off_parser.add_argument(
        '--commit-msg',
        action='store',
        help=("git commit message to record when packages are uploaded, "
              "required when --stage-for-upload specified"))
    one_off_parser.add_argument(
        '--recipe-root-dir',
        default=os.getcwd(),
        help="path containing recipe folders to upload")
    one_off_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    one_off_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')
    one_off_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    one_off_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    one_off_parser.add_argument(
        '--worker-tag',
        '-t',
        action='append',
        help="set worker tag(s) to limit where jobs will run.  Applies "
        "to all jobs.  For finer control, use extra/worker_tags in "
        "meta.yaml with selectors.",
        dest='worker_tags')
    one_off_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    one_off_parser.add_argument(
        '--output-dir',
        help=("folder where output plan and recipes live."
              "Defaults to temp folder.  Set to something to save output."))
    one_off_parser.add_argument(
        '--append-file',
        help=
        """Append data in meta.yaml with fields from this file.  Jinja2 is not done
        on appended fields""",
        dest='append_sections_file',
    )
    one_off_parser.add_argument(
        '--clobber-file',
        help=
        """Clobber data in meta.yaml with fields from this file.  Jinja2 is not done
        on clobbered fields.""",
        dest='clobber_sections_file',
    )
    one_off_parser.add_argument('--no-skip-existing',
                                help="Do not skip existing builds",
                                dest="skip_existing",
                                action="store_false")
    one_off_parser.add_argument(
        '--use-repo-access',
        help="Pass the repo access credentials to the workers",
        action="store_true",
    )

    one_off_parser.add_argument(
        '--use-staging-channel',
        help="Uploads built packages to staging channel",
        action="store_true",
    )
    one_off_parser.add_argument(
        '--dry-run',
        action="store_true",
        help=
        ("Dry run, prepare concourse plan and files but do not submit. "
         "Best used with the --output-dir option so the output can be inspected"
         ),
    )

    batch_parser = sp.add_parser('batch',
                                 help="submit a batch of one-off jobs.")
    batch_parser.add_argument(
        'batch_file',
        help="""File describing batch job.  Each lines defines a seperate
        one-off job.  List one or more folders on each line.  Job specific
        arguments can be specified after a ';' using param=value, multiple
        arguments are seperated by a ','.  For example:

            recipe-feedstock; channel=conda-forge,clobber_sections_file=clobber.yaml
        """)

    # batch specific arguments
    batch_parser.add_argument(
        '--max-builds',
        default=6,
        type=int,
        help=("maximum number of activate builds allowed before starting a new"
              "job, default is 6"))
    batch_parser.add_argument(
        '--poll-time',
        default=120,
        type=int,
        help=("time in seconds between checking concourse server for active "
              "builds, default is 120 seconds."))
    batch_parser.add_argument(
        '--build-lookback',
        default=500,
        type=int,
        help="number of builds to examine for active builds, default is 500")
    batch_parser.add_argument(
        '--label-prefix',
        default='autobot_',
        help="prefix for pipeline labels, default is autobot_")

    # one-off arguments
    batch_parser.add_argument('--recipe-root-dir',
                              default=os.getcwd(),
                              help="path containing recipe folders to upload")
    batch_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    batch_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')
    batch_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    batch_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    batch_parser.add_argument(
        '--worker-tag',
        '-t',
        action='append',
        help="set worker tag(s) to limit where jobs will run.  Applies "
        "to all jobs.  For finer control, use extra/worker_tags in "
        "meta.yaml with selectors.",
        dest='worker_tags')
    batch_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    batch_parser.add_argument(
        '--output-dir',
        help=("folder where output plan and recipes live."
              "Defaults to temp folder.  Set to something to save output."))
    batch_parser.add_argument(
        '--append-file',
        help=
        """Append data in meta.yaml with fields from this file.  Jinja2 is not done
        on appended fields""",
        dest='append_sections_file',
    )
    batch_parser.add_argument(
        '--clobber-file',
        help=
        """Clobber data in meta.yaml with fields from this file.  Jinja2 is not done
        on clobbered fields.""",
        dest='clobber_sections_file',
    )
    batch_parser.add_argument('--no-skip-existing',
                              help="Do not skip existing builds",
                              dest="skip_existing",
                              action="store_false")
    batch_parser.add_argument(
        '--use-repo-access',
        help="Pass the repo access credentials to the workers",
        action="store_true",
    )
    batch_parser.add_argument(
        '--use-staging-channel',
        help="Uploads built packages to staging channel",
        action="store_true",
    )
    rm_parser = sp.add_parser('rm', help='remove pipelines from server')
    rm_parser.add_argument('pipeline_names',
                           nargs="+",
                           help=("Specify pipeline names on server to remove"))
    rm_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    rm_parser.add_argument('--do-it-dammit',
                           '-y',
                           help="YOLO",
                           action="store_true")
    rm_parser.add_argument(
        '--days',
        help='only remove specified packages older than n days',
        action='store')

    pause_parser = sp.add_parser('pause', help='pause pipelines on the server')
    pause_parser.add_argument(
        'pipeline_names',
        nargs="+",
        help=("Specify pipeline names on server to pause"))
    pause_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    pause_parser.add_argument('--do-it-dammit',
                              '-y',
                              help="YOLO",
                              action="store_true")

    unpause_parser = sp.add_parser('unpause',
                                   help='pause pipelines on the server')
    unpause_parser.add_argument(
        'pipeline_names',
        nargs="+",
        help=("Specify pipeline names on server to pause"))
    unpause_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    unpause_parser.add_argument('--do-it-dammit',
                                '-y',
                                help="YOLO",
                                action="store_true")

    trigger_parser = sp.add_parser('trigger',
                                   help='trigger (failed) jobs of a pipeline')
    trigger_parser.add_argument('pipeline_names',
                                nargs='+',
                                help=("Specify pipeline names to trigger"))
    trigger_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    trigger_parser.add_argument('--all',
                                dest="trigger_all",
                                action="store_true",
                                help="trigger all jobs")

    abort_parser = sp.add_parser('abort', help='abort jobs of a pipeline')
    abort_parser.add_argument('pipeline_names',
                              nargs='+',
                              help=("Specify pipeline names to abort"))
    abort_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))

    return parser.parse_known_args(parse_this)
예제 #8
0
def get_render_parser():
    p = ArgumentParser(description="""
Tool for building conda packages. A conda package is a binary tarball
containing system-level libraries, Python modules, executable programs, or
other components. conda keeps track of dependencies between packages and
platform specifics, making it simple to create working environments from
        different sets of packages.""",
                       conflict_handler='resolve')
    p.add_argument(
        '-V',
        '--version',
        action='version',
        help='Show the conda-build version number and exit.',
        version='conda-build %s' % __version__,
    )
    p.add_argument(
        '-n',
        "--no-source",
        action="store_true",
        help="When templating can't be completed, do not obtain the \
source to try fill in related template variables.",
    )
    p.add_argument(
        "--output",
        action="store_true",
        help="Output the conda package filename which would have been "
        "created",
    )
    p.add_argument(
        '--python',
        action="append",
        help="Set the Python version used by conda build.",
    )
    p.add_argument(
        '--perl',
        action="append",
        help="Set the Perl version used by conda build.",
    )
    p.add_argument(
        '--numpy',
        action="append",
        help="Set the NumPy version used by conda build.",
    )
    p.add_argument('--R',
                   action="append",
                   help="""Set the R version used by conda build.""",
                   dest="r_base")
    p.add_argument(
        '--lua',
        action="append",
        help="Set the Lua version used by conda build.",
    )
    p.add_argument(
        '--bootstrap',
        help="""Provide initial configuration in addition to recipe.
        Can be a path to or name of an environment, which will be emulated
        in the package.""",
    )
    p.add_argument(
        '--append-file',
        help=
        """Append data in meta.yaml with fields from this file.  Jinja2 is not done
        on appended fields""",
        dest='append_sections_file',
    )
    p.add_argument(
        '--clobber-file',
        help=
        """Clobber data in meta.yaml with fields from this file.  Jinja2 is not done
        on clobbered fields.""",
        dest='clobber_sections_file',
    )
    p.add_argument(
        '-m',
        '--variant-config-files',
        dest='variant_config_files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    p.add_argument(
        "--old-build-string",
        dest="filename_hashing",
        action="store_false",
        default=cc_conda_build.get('filename_hashing',
                                   'true').lower() == 'true',
        help=("Disable hash additions to filenames to distinguish package "
              "variants from one another. NOTE: any filename collisions are "
              "yours to handle. Any variants with overlapping names within a "
              "build will clobber each other."))

    add_parser_channels(p)
    return p
예제 #9
0
def get_render_parser():
    p = ArgumentParser(
        description="""
Tool for building conda packages. A conda package is a binary tarball
containing system-level libraries, Python modules, executable programs, or
other components. conda keeps track of dependencies between packages and
platform specifics, making it simple to create working environments from
        different sets of packages.""",
        conflict_handler='resolve'
    )
    p.add_argument(
        '-V', '--version',
        action='version',
        help='Show the conda-build version number and exit.',
        version='conda-build %s' % __version__,
    )
    p.add_argument(
        '-n', "--no-source",
        action="store_true",
        help="When templating can't be completed, do not obtain the \
source to try fill in related template variables.",
    )
    p.add_argument(
        "--output",
        action="store_true",
        help="Output the conda package filename which would have been "
               "created",
    )
    p.add_argument(
        '--python',
        action="append",
        help="Set the Python version used by conda build.",
    )
    p.add_argument(
        '--perl',
        action="append",
        help="Set the Perl version used by conda build.",
    )
    p.add_argument(
        '--numpy',
        action="append",
        help="Set the NumPy version used by conda build.",
    )
    p.add_argument(
        '--R',
        action="append",
        help="""Set the R version used by conda build.""",
        dest="r_base"
    )
    p.add_argument(
        '--lua',
        action="append",
        help="Set the Lua version used by conda build.",
    )
    p.add_argument(
        '--bootstrap',
        help="""Provide initial configuration in addition to recipe.
        Can be a path to or name of an environment, which will be emulated
        in the package.""",
    )
    p.add_argument(
        '--append-file',
        help="""Append data in meta.yaml with fields from this file.  Jinja2 is not done
        on appended fields""",
        dest='append_sections_file',
    )
    p.add_argument(
        '--clobber-file',
        help="""Clobber data in meta.yaml with fields from this file.  Jinja2 is not done
        on clobbered fields.""",
        dest='clobber_sections_file',
    )
    p.add_argument(
        '-m', '--variant-config-files',
        dest='variant_config_files',
        action="append",
        help="""Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    p.add_argument(
        "--old-build-string", dest="filename_hashing", action="store_false",
        default=cc_conda_build.get('filename_hashing', 'true').lower() == 'true',
        help=("Disable hash additions to filenames to distinguish package "
              "variants from one another. NOTE: any filename collisions are "
              "yours to handle. Any variants with overlapping names within a "
              "build will clobber each other.")
    )

    add_parser_channels(p)
    return p
예제 #10
0
def compute_builds(path,
                   base_name,
                   folders,
                   matrix_base_dir=None,
                   steps=0,
                   max_downstream=5,
                   test=False,
                   public=True,
                   output_dir='../output',
                   output_folder_label='git',
                   config_overrides=None,
                   platform_filters=None,
                   worker_tags=None,
                   clobber_sections_file=None,
                   append_sections_file=None,
                   pass_throughs=None,
                   skip_existing=True,
                   use_repo_access=False,
                   use_staging_channel=False,
                   **kw):
    build_config = kw.get('build_config', []) or []
    if kw.get('stage_for_upload', False):
        if kw.get('commit_msg') is None:
            raise ValueError(
                "--stage-for-upload requires --commit-msg to be specified")
    if kw.get('destroy_pipeline', False):
        if not kw.get('stage_for_upload', False) or not kw.get(
                'push_branch', False):
            raise ValueError("--destroy-pipeline requires that --push-branch "
                             "and stage-for-upload be specified as well.")
    folders = folders
    path = path.replace('"', '')
    if not folders:
        print(
            "No folders specified to build, and nothing changed in git.  Exiting."
        )
        return
    matrix_base_dir = os.path.expanduser(matrix_base_dir or path)
    # clean up quoting from concourse template evaluation
    matrix_base_dir = matrix_base_dir.replace('"', '')

    append_sections_file = append_sections_file or cc_conda_build.get(
        'append_sections_file')
    clobber_sections_file = clobber_sections_file or cc_conda_build.get(
        'clobber_sections_file')

    repo_commit = ''
    git_identifier = ''

    build_config_yml = os.path.join(matrix_base_dir, 'build-config.yml')
    matched = {}
    build_config_vars = {}
    try:
        with open(build_config_yml) as build_config_file:
            build_config_vars = yaml.safe_load(build_config_file)
    except (OSError, IOError):
        print('WARNING :: open(build_config_yml={}) failed'.format(
            build_config_yml))
        pass
    for bcv in build_config_vars:
        for var_val in build_config:
            var = var_val.split('=', 1)[0]
            val = var_val.split('=', 1)[1]
            if fnmatch(bcv, var):
                matched[var_val] = 1
                log.info("Overriding build-config.yaml with {}={}".format(
                    var, val))
                build_config_vars[bcv] = val
    for var_val in build_config:
        if var_val not in matched:
            var = var_val.split('=', 1)[0]
            if '*' in var:
                log.warning(
                    "Did not find match for --build-config={} (it has no effect)"
                    .format(var_val))
            else:
                val = var_val.split('=', 1)[1]
                log.info("Adding {}={} to build configuration".format(
                    var, val))
                build_config_vars[var] = val
    task_graph = collect_tasks(path,
                               folders=folders,
                               steps=steps,
                               max_downstream=max_downstream,
                               test=test,
                               matrix_base_dir=matrix_base_dir,
                               channels=kw.get('channel', []),
                               variant_config_files=kw.get(
                                   'variant_config_files', []),
                               platform_filters=platform_filters,
                               append_sections_file=append_sections_file,
                               clobber_sections_file=clobber_sections_file,
                               pass_throughs=pass_throughs,
                               skip_existing=skip_existing,
                               build_config_vars=build_config_vars)

    with open(os.path.join(matrix_base_dir, 'config.yml')) as src:
        config_vars = yaml.safe_load(src)
    config_vars['recipe-repo-commit'] = repo_commit

    if config_overrides:
        config_vars.update(config_overrides)

    plconfig = graph_to_plan_with_jobs(os.path.abspath(path),
                                       task_graph,
                                       commit_id=repo_commit,
                                       matrix_base_dir=matrix_base_dir,
                                       config_vars=config_vars,
                                       public=public,
                                       worker_tags=worker_tags,
                                       pass_throughs=pass_throughs,
                                       use_repo_access=use_repo_access,
                                       use_staging_channel=use_staging_channel,
                                       automated_pipeline=kw.get(
                                           "automated_pipeline", False),
                                       branches=kw.get("branches", None),
                                       pr_num=kw.get("pr_num", None),
                                       repository=kw.get("repository", None),
                                       folders=folders)

    if kw.get('pr_file'):
        pr_merged_resource = "pr-merged"  # TODO actually a name
        plconfig.add_pr_merged_resource(config_vars['pr-repo'],
                                        kw.get("pr_file"))
    else:
        pr_merged_resource = None

    if kw.get('stage_for_upload', False):
        # TODO move this
        if 'stage-for-upload-config' not in config_vars:
            raise Exception((
                "--stage-for-upload specified but configuration file contains "
                "to 'stage-for-upload-config entry"))
        plconfig.add_upload_job(config_vars, kw['commit_msg'],
                                pr_merged_resource)

    if kw.get('push_branch', False):
        # TODO move this
        if 'push-branch-config' not in config_vars:
            raise Exception(
                ("--push-branch specified but configuration file contains "
                 "to 'push-branch-config entry"))
        if kw.get('stage_for_upload', False):
            stage_job_name = 'stage_for_upload'
        else:
            stage_job_name = None
        plconfig.add_push_branch_job(config_vars, folders, kw['branches'],
                                     kw['feedstock_pr_num'],
                                     pr_merged_resource, stage_job_name)
    if kw.get('destroy_pipeline', False):
        # TODO move this
        if 'destroy-pipeline-config' not in config_vars:
            raise Exception(
                "--destroy-pipeline specified but configuration file does not "
                "have that entry.")
        plconfig.add_destroy_pipeline_job(config_vars, folders)
    output_dir = output_dir.format(base_name=base_name,
                                   git_identifier=git_identifier)

    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)
    with open(os.path.join(output_dir, 'plan.yml'), 'w') as f:
        yaml.dump(plconfig.to_dict(), f, default_flow_style=False)

    # expand folders to include any dependency builds or tests
    if not os.path.isabs(path):
        path = os.path.normpath(os.path.join(os.getcwd(), path))
    for fn in glob.glob(os.path.join(output_dir, 'output_order*')):
        os.remove(fn)
    last_recipe_dir = None
    nodes = list(nx.topological_sort(task_graph))
    nodes.reverse()
    for node in nodes:
        meta = task_graph.nodes[node]['meta']
        if meta.meta_path:
            recipe = os.path.dirname(meta.meta_path)
        else:
            recipe = meta.meta.get('extra', {}).get('parent_recipe',
                                                    {}).get('path', '')
        assert recipe, ("no parent recipe set, and no path associated "
                        "with this metadata")
        # make recipe path relative
        recipe = recipe.replace(path + '/', '')
        # copy base recipe into a folder named for this node
        out_folder = os.path.join(output_dir, node)
        if os.path.isdir(out_folder):
            shutil.rmtree(out_folder)

        try:
            shutil.copytree(os.path.join(path, recipe), out_folder)
        except:  # noqa
            os.system("cp -Rf '{}' '{}'".format(os.path.join(path, recipe),
                                                out_folder))

        # write the conda_build_config.yml for this particular metadata into that recipe
        #   This should sit alongside meta.yaml, where conda-build will be able to find it
        with open(os.path.join(out_folder, 'conda_build_config.yaml'),
                  'w') as f:
            yaml.dump(meta.config.squished_variants,
                      f,
                      default_flow_style=False)

        # copy any clobber or append file that is specified either on CLI or via condarc
        if clobber_sections_file:
            shutil.copyfile(clobber_sections_file,
                            os.path.join(out_folder, 'recipe_clobber.yaml'))

        if append_sections_file:
            shutil.copyfile(append_sections_file,
                            os.path.join(out_folder, 'recipe_append.yaml'))

        order_fn = 'output_order_' + task_graph.nodes[node]['worker']['label']
        with open(os.path.join(output_dir, order_fn), 'a') as f:
            f.write(node + '\n')
        recipe_dir = os.path.dirname(recipe) if os.sep in recipe else recipe
        if not last_recipe_dir or last_recipe_dir != recipe_dir:
            order_recipes_fn = 'output_order_recipes_' + task_graph.nodes[
                node]['worker']['label']
            with open(os.path.join(output_dir, order_recipes_fn), 'a') as f:
                f.write(recipe_dir + '\n')
            last_recipe_dir = recipe_dir

    # clean up recipe_log.txt so that we don't leave a dirty git state
    for node in nodes:
        meta = task_graph.nodes[node]['meta']
        if meta.meta_path:
            recipe = os.path.dirname(meta.meta_path)
        else:
            recipe = meta.meta.get('extra', {}).get('parent_recipe',
                                                    {}).get('path', '')
        if os.path.isfile(os.path.join(recipe, 'recipe_log.json')):
            os.remove(os.path.join(recipe, 'recipe_log.json'))
        if os.path.isfile(os.path.join(recipe, 'recipe_log.txt')):
            os.remove(os.path.join(recipe, 'recipe_log.txt'))
예제 #11
0
def compute_builds(path, base_name, git_rev=None, stop_rev=None, folders=None, matrix_base_dir=None,
                   steps=0, max_downstream=5, test=False, public=True, output_dir='../output',
                   output_folder_label='git', config_overrides=None, platform_filters=None,
                   worker_tags=None, clobber_sections_file=None, append_sections_file=None,
                   pass_throughs=None, skip_existing=True, **kw):
    if not git_rev and not folders:
        raise ValueError("Either git_rev or folders list are required to know what to compute")
    checkout_rev = stop_rev or git_rev
    folders = folders
    path = path.replace('"', '')
    if not folders:
        folders = git_changed_recipes(git_rev, stop_rev, git_root=path)
    if not folders:
        print("No folders specified to build, and nothing changed in git.  Exiting.")
        return
    matrix_base_dir = os.path.expanduser(matrix_base_dir or path)
    # clean up quoting from concourse template evaluation
    matrix_base_dir = matrix_base_dir.replace('"', '')

    append_sections_file = append_sections_file or cc_conda_build.get('append_sections_file')
    clobber_sections_file = clobber_sections_file or cc_conda_build.get('clobber_sections_file')

    repo_commit = ''
    git_identifier = ''
    if checkout_rev:
        with checkout_git_rev(checkout_rev, path):
            git_identifier = _get_current_git_rev(path)
            task_graph = collect_tasks(path, folders=folders, steps=steps,
                                       max_downstream=max_downstream, test=test,
                                       matrix_base_dir=matrix_base_dir,
                                       channels=kw.get('channel', []),
                                       variant_config_files=kw.get('variant_config_files', []),
                                       platform_filters=platform_filters,
                                       append_sections_file=append_sections_file,
                                       clobber_sections_file=clobber_sections_file,
                                       pass_throughs=pass_throughs, skip_existing=skip_existing)
            try:
                repo_commit = _get_current_git_rev(path)
            except subprocess.CalledProcessError:
                repo_commit = 'master'
    else:
        task_graph = collect_tasks(path, folders=folders, steps=steps,
                                   max_downstream=max_downstream, test=test,
                                   matrix_base_dir=matrix_base_dir,
                                   channels=kw.get('channel', []),
                                   variant_config_files=kw.get('variant_config_files', []),
                                   platform_filters=platform_filters,
                                   append_sections_file=append_sections_file,
                                   clobber_sections_file=clobber_sections_file,
                                   pass_throughs=pass_throughs, skip_existing=skip_existing)

    with open(os.path.join(matrix_base_dir, 'config.yml')) as src:
        data = yaml.load(src)
    data['recipe-repo-commit'] = repo_commit

    if config_overrides:
        data.update(config_overrides)

    plan = graph_to_plan_with_jobs(os.path.abspath(path), task_graph,
                                   commit_id=repo_commit, matrix_base_dir=matrix_base_dir,
                                   config_vars=data, public=public, worker_tags=worker_tags,
                                   pass_throughs=pass_throughs)

    output_dir = output_dir.format(base_name=base_name, git_identifier=git_identifier)

    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)
    with open(os.path.join(output_dir, 'plan.yml'), 'w') as f:
        yaml.dump(plan, f, default_flow_style=False)

    # expand folders to include any dependency builds or tests
    if not os.path.isabs(path):
        path = os.path.normpath(os.path.join(os.getcwd(), path))
    for fn in glob.glob(os.path.join(output_dir, 'output_order*')):
        os.remove(fn)
    last_recipe_dir = None
    nodes = list(nx.topological_sort(task_graph))
    nodes.reverse()
    for node in nodes:
        meta = task_graph.node[node]['meta']
        if meta.meta_path:
            recipe = os.path.dirname(meta.meta_path)
        else:
            recipe = meta.meta.get('extra', {}).get('parent_recipe', {}).get('path', '')
        assert recipe, ("no parent recipe set, and no path associated "
                                "with this metadata")
        # make recipe path relative
        recipe = recipe.replace(path + '/', '')
        # copy base recipe into a folder named for this node
        out_folder = os.path.join(output_dir, node)
        if os.path.isdir(out_folder):
            shutil.rmtree(out_folder)

        shutil.copytree(os.path.join(path, recipe), out_folder)
        # write the conda_build_config.yml for this particular metadata into that recipe
        #   This should sit alongside meta.yaml, where conda-build will be able to find it
        with open(os.path.join(out_folder, 'conda_build_config.yaml'), 'w') as f:
            yaml.dump(meta.config.squished_variants, f, default_flow_style=False)

        # copy any clobber or append file that is specified either on CLI or via condarc
        if clobber_sections_file:
            shutil.copyfile(clobber_sections_file, os.path.join(out_folder, 'recipe_clobber.yaml'))

        if append_sections_file:
            shutil.copyfile(append_sections_file, os.path.join(out_folder, 'recipe_append.yaml'))

        order_fn = 'output_order_' + task_graph.node[node]['worker']['label']
        with open(os.path.join(output_dir, order_fn), 'a') as f:
            f.write(node + '\n')
        recipe_dir = os.path.dirname(recipe) if os.sep in recipe else recipe
        if not last_recipe_dir or last_recipe_dir != recipe_dir:
            order_recipes_fn = 'output_order_recipes_' + task_graph.node[node]['worker']['label']
            with open(os.path.join(output_dir, order_recipes_fn), 'a') as f:
                f.write(recipe_dir + '\n')
            last_recipe_dir = recipe_dir

    # clean up recipe_log.txt so that we don't leave a dirty git state
    for node in nodes:
        meta = task_graph.node[node]['meta']
        if meta.meta_path:
            recipe = os.path.dirname(meta.meta_path)
        else:
            recipe = meta.meta.get('extra', {}).get('parent_recipe', {}).get('path', '')
        if os.path.isfile(os.path.join(recipe, 'recipe_log.json')):
            os.remove(os.path.join(recipe, 'recipe_log.json'))
        if os.path.isfile(os.path.join(recipe, 'recipe_log.txt')):
            os.remove(os.path.join(recipe, 'recipe_log.txt'))
예제 #12
0
def parse_args(args):
    p = get_render_parser()
    p.description = """
Tool for building conda packages. A conda package is a binary tarball
containing system-level libraries, Python modules, executable programs, or
other components. conda keeps track of dependencies between packages and
platform specifics, making it simple to create working environments from
different sets of packages."""
    p.add_argument(
        "--check",
        action="store_true",
        help="Only check (validate) the recipe.",
    )
    p.add_argument(
        "--no-anaconda-upload",
        action="store_false",
        help="Do not ask to upload the package to anaconda.org.",
        dest='anaconda_upload',
        default=binstar_upload,
    )
    p.add_argument(
        "--no-binstar-upload",
        action="store_false",
        help=argparse.SUPPRESS,
        dest='anaconda_upload',
        default=binstar_upload,
    )
    p.add_argument(
        "--no-include-recipe",
        action="store_false",
        help="Don't include the recipe inside the built package.",
        dest='include_recipe',
        default=cc_conda_build.get('include_recipe', 'true').lower() == 'true',
    )
    p.add_argument(
        '-s',
        "--source",
        action="store_true",
        help="Only obtain the source (but don't build).",
    )
    p.add_argument(
        '-t',
        "--test",
        action="store_true",
        help=
        "Test package (assumes package is already built).  RECIPE_DIR argument can be either "
        "recipe directory, in which case source download may be necessary to resolve package"
        "version, or path to built package .tar.bz2 file, in which case no source is necessary.",
    )
    p.add_argument(
        '--no-test',
        action='store_true',
        dest='notest',
        help="Do not test the package.",
    )
    p.add_argument(
        '-b',
        '--build-only',
        action="store_true",
        help="""Only run the build, without any post processing or
        testing. Implies --no-test and --no-anaconda-upload.""",
    )
    p.add_argument(
        '-p',
        '--post',
        action="store_true",
        help=
        "Run the post-build logic. Implies --no-test and --no-anaconda-upload.",
    )
    p.add_argument(
        'recipe',
        metavar='RECIPE_PATH',
        nargs='+',
        help="Path to recipe directory.  Pass 'purge' here to clean the "
        "work and test intermediates.",
    )
    p.add_argument(
        '--skip-existing',
        action='store_true',
        help=("Skip recipes for which there already exists an existing build"
              "(locally or in the channels)."),
        default=cc_conda_build.get('skip_existing', 'false').lower() == 'true',
    )
    p.add_argument('--keep-old-work',
                   action='store_true',
                   dest='dirty',
                   help="Deprecated.  Same as --dirty.")
    p.add_argument(
        '--dirty',
        action='store_true',
        help='Do not remove work directory or _build environment, '
        'to speed up debugging.  Does not apply patches or download source.')
    p.add_argument(
        '-q',
        "--quiet",
        action="store_true",
        help="do not display progress bar",
        default=cc_conda_build.get('quiet', 'false').lower() == 'true',
    )
    p.add_argument(
        '--debug',
        action="store_true",
        help="Show debug output from source checkouts and conda",
    )
    p.add_argument(
        '--token',
        help="Token to pass through to anaconda upload",
        default=cc_conda_build.get('anaconda_token'),
    )
    p.add_argument(
        '--user',
        help="User/organization to upload packages to on anaconda.org or pypi",
        default=cc_conda_build.get('user'),
    )
    pypi_grp = p.add_argument_group("PyPI upload parameters (twine)")
    pypi_grp.add_argument(
        '--password',
        help="password to use when uploading packages to pypi",
    )
    pypi_grp.add_argument('--sign',
                          default=False,
                          help="sign files when uploading to pypi")
    pypi_grp.add_argument(
        '--sign-with',
        default='gpg',
        dest='sign_with',
        help="program to use to sign files when uploading to pypi")
    pypi_grp.add_argument(
        '--identity',
        help="GPG identity to use to sign files when uploading to pypi")
    pypi_grp.add_argument(
        '--config-file',
        help="path to .pypirc file to use when uploading to pypi",
        default=cc_conda_build.get('pypirc'),
    )
    pypi_grp.add_argument(
        '--repository',
        '-r',
        help="PyPI repository to upload to",
        default=cc_conda_build.get('pypi_repository', 'pypitest'),
    )
    p.add_argument(
        "--no-activate",
        action="store_false",
        help="do not activate the build and test envs; just prepend to PATH",
        dest='activate',
        default=cc_conda_build.get('activate', 'true').lower() == 'true',
    )
    p.add_argument(
        "--no-build-id",
        action="store_false",
        help=
        ("do not generate unique build folder names.  Use if having issues with "
         "paths being too long."),
        dest='set_build_id',
        # note: inverted - dest stores positive logic
        default=cc_conda_build.get('set_build_id', 'true').lower() == 'true',
    )
    p.add_argument(
        "--croot",
        help=
        ("Build root folder.  Equivalent to CONDA_BLD_PATH, but applies only "
         "to this call of conda-build."))
    p.add_argument(
        "--no-verify",
        action="store_true",
        help="do not run verification on recipes or packages when building",
        default=cc_conda_build.get('no_verify', 'false').lower() == 'true',
    )
    p.add_argument(
        "--output-folder",
        help=
        ("folder to dump output package to.  Package are moved here if build or test succeeds."
         "  Destination folder must exist prior to using this."))
    p.add_argument(
        "--no-prefix-length-fallback",
        dest='prefix_length_fallback',
        action="store_false",
        help=
        ("Disable fallback to older 80 character prefix length if environment creation"
         " fails due to insufficient prefix length in dependency packages"),
        default=True,
    )
    p.add_argument(
        "--prefix-length-fallback",
        dest='prefix_length_fallback',
        action="store_true",
        help=
        ("Disable fallback to older 80 character prefix length if environment creation"
         " fails due to insufficient prefix length in dependency packages"),
        # this default will change to false in the future, when we deem that the community has
        #     had enough time to build long-prefix length packages.
        default=True,
    )
    p.add_argument(
        "--prefix-length",
        dest='_prefix_length',
        help=
        ("length of build prefix.  For packages with binaries that embed the path, this is"
         " critical to ensuring that your package can run as many places as possible.  Note"
         "that this value can be altered by the OS below conda-build (e.g. encrypted "
         "filesystems on Linux), and you should prefer to set --croot to a non-encrypted "
         "location instead, so that you maintain a known prefix length."),
        # this default will change to false in the future, when we deem that the community has
        #     had enough time to build long-prefix length packages.
        default=255,
        type=int,
    )
    p.add_argument(
        "--no-locking",
        dest='locking',
        default=True,
        action="store_false",
        help=
        ("Disable locking, to avoid unresolved race condition issues.  Unsafe to run multiple"
         "builds at once on one system with this set."))
    p.add_argument(
        "--no-remove-work-dir",
        dest='remove_work_dir',
        default=True,
        action="store_false",
        help=
        ("Disable removal of the work dir before testing.  Be careful using this option, as"
         " you package may depend on files that are not included in the package, and may pass"
         "tests, but ultimately fail on installed systems."))
    p.add_argument(
        "--long-test-prefix",
        default=True,
        action="store_false",
        help=
        ("Use a long prefix for the test prefix, as well as the build prefix.  Affects only "
         "Linux and Mac.  Prefix length matches the --prefix-length flag.  This is on by "
         "default in conda-build 3.0+"))
    p.add_argument(
        "--no-long-test-prefix",
        dest="long_test_prefix",
        action="store_false",
        help=
        ("Do not use a long prefix for the test prefix, as well as the build prefix."
         "  Affects only Linux and Mac.  Prefix length matches the --prefix-length flag.  "
         ))
    add_parser_channels(p)

    args = p.parse_args(args)
    return p, args
예제 #13
0
def parse_args(args):
    p = get_render_parser()
    p.description = """
Tool for building conda packages. A conda package is a binary tarball
containing system-level libraries, Python modules, executable programs, or
other components. conda keeps track of dependencies between packages and
platform specifics, making it simple to create working environments from
different sets of packages."""
    p.add_argument(
        "--check",
        action="store_true",
        help="Only check (validate) the recipe.",
    )
    p.add_argument(
        "--no-anaconda-upload",
        action="store_false",
        help="Do not ask to upload the package to anaconda.org.",
        dest='anaconda_upload',
        default=binstar_upload,
    )
    p.add_argument(
        "--no-binstar-upload",
        action="store_false",
        help=argparse.SUPPRESS,
        dest='anaconda_upload',
        default=binstar_upload,
    )
    p.add_argument(
        "--no-include-recipe",
        action="store_false",
        help="Don't include the recipe inside the built package.",
        dest='include_recipe',
        default=cc_conda_build.get('include_recipe', 'true').lower() == 'true',
    )
    p.add_argument(
        '-s', "--source",
        action="store_true",
        help="Only obtain the source (but don't build).",
    )
    p.add_argument(
        '-t', "--test",
        action="store_true",
        help="Test package (assumes package is already built).  RECIPE_DIR argument can be either "
        "recipe directory, in which case source download may be necessary to resolve package "
        "version, or path to built package .tar.bz2 file, in which case no source is necessary.",
    )
    p.add_argument(
        '--no-test',
        action='store_true',
        dest='notest',
        help="Do not test the package.",
    )
    p.add_argument(
        '-b', '--build-only',
        action="store_true",
        help="""Only run the build, without any post processing or
        testing. Implies --no-test and --no-anaconda-upload.""",
    )
    p.add_argument(
        '-p', '--post',
        action="store_true",
        help="Run the post-build logic. Implies --no-test and --no-anaconda-upload.",
    )
    p.add_argument(
        'recipe',
        metavar='RECIPE_PATH',
        nargs='+',
        help="Path to recipe directory.  Pass 'purge' here to clean the "
        "work and test intermediates.",
    )
    p.add_argument(
        '--skip-existing',
        action='store_true',
        help=("Skip recipes for which there already exists an existing build "
              "(locally or in the channels)."),
        default=cc_conda_build.get('skip_existing', 'false').lower() == 'true',
    )
    p.add_argument(
        '--keep-old-work',
        action='store_true',
        dest='keep_old_work',
        help="Do not remove anything from environment, even after successful "
             "build and test."
    )
    p.add_argument(
        '--dirty',
        action='store_true',
        help='Do not remove work directory or _build environment, '
        'to speed up debugging.  Does not apply patches or download source.'
    )
    p.add_argument(
        '-q', "--quiet",
        action="store_true",
        help="do not display progress bar",
        default=cc_conda_build.get('quiet', 'false').lower() == 'true',
    )
    p.add_argument(
        '--debug',
        action="store_true",
        help="Show debug output from source checkouts and conda",
    )
    p.add_argument(
        '--token',
        help="Token to pass through to anaconda upload",
        default=cc_conda_build.get('anaconda_token'),
    )
    p.add_argument(
        '--user',
        help="User/organization to upload packages to on anaconda.org or pypi",
        default=cc_conda_build.get('user'),
    )
    p.add_argument(
        '--label', action='append', dest='labels', default=[],
        help="Label argument to pass through to anaconda upload",
    )
    p.add_argument(
        '--no-force-upload',
        help="Disable force upload to anaconda.org, preventing overwriting any existing packages",
        dest='force_upload',
        default=True,
        action='store_false',
    )
    pypi_grp = p.add_argument_group("PyPI upload parameters (twine)")
    pypi_grp.add_argument(
        '--password',
        help="password to use when uploading packages to pypi",
    )
    pypi_grp.add_argument(
        '--sign', default=False,
        help="sign files when uploading to pypi"
    )
    pypi_grp.add_argument(
        '--sign-with', default='gpg', dest='sign_with',
        help="program to use to sign files when uploading to pypi"
    )
    pypi_grp.add_argument(
        '--identity',
        help="GPG identity to use to sign files when uploading to pypi"
    )
    pypi_grp.add_argument(
        '--config-file',
        help="path to .pypirc file to use when uploading to pypi",
        default=(abspath(expanduser(expandvars(cc_conda_build.get('pypirc'))))
                 if cc_conda_build.get('pypirc')
                 else cc_conda_build.get('pypirc')),
    )
    pypi_grp.add_argument(
        '--repository', '-r', help="PyPI repository to upload to",
        default=cc_conda_build.get('pypi_repository', 'pypitest'),
    )
    p.add_argument(
        "--no-activate",
        action="store_false",
        help="do not activate the build and test envs; just prepend to PATH",
        dest='activate',
        default=cc_conda_build.get('activate', 'true').lower() == 'true',
    )
    p.add_argument(
        "--no-build-id",
        action="store_false",
        help=("do not generate unique build folder names.  Use if having issues with "
              "paths being too long."),
        dest='set_build_id',
        # note: inverted - dest stores positive logic
        default=cc_conda_build.get('set_build_id', 'true').lower() == 'true',
    )
    p.add_argument(
        "--croot",
        help=("Build root folder.  Equivalent to CONDA_BLD_PATH, but applies only "
              "to this call of conda-build.")
    )
    p.add_argument(
        "--verify",
        action="store_true",
        help="run verification on recipes or packages when building",
        default=cc_conda_build.get('verify', 'true').lower() == 'true',
    )
    p.add_argument(
        "--no-verify",
        action="store_false",
        dest="verify",
        help="do not run verification on recipes or packages when building",
        default=cc_conda_build.get('verify', 'true').lower() == 'true',
    )
    p.add_argument(
        "--strict-verify",
        action="store_true",
        dest="exit_on_verify_error",
        help="Exit if any conda-verify check fail, instead of only printing them",
        default=cc_conda_build.get('exit_on_verify_error', 'false').lower() == 'true',
    )
    p.add_argument(
        "--output-folder",
        help=("folder to dump output package to.  Package are moved here if build or test succeeds."
              "  Destination folder must exist prior to using this.")
    )
    p.add_argument(
        "--no-prefix-length-fallback", dest='prefix_length_fallback',
        action="store_false",
        help=("Disable fallback to older 80 character prefix length if environment creation"
              " fails due to insufficient prefix length in dependency packages"),
        default=True,
    )
    p.add_argument(
        "--prefix-length-fallback", dest='prefix_length_fallback',
        action="store_true",
        help=("Disable fallback to older 80 character prefix length if environment creation"
              " fails due to insufficient prefix length in dependency packages"),
        # this default will change to false in the future, when we deem that the community has
        #     had enough time to build long-prefix length packages.
        default=True,
    )
    p.add_argument(
        "--prefix-length", dest='_prefix_length',
        help=("length of build prefix.  For packages with binaries that embed the path, this is"
              " critical to ensuring that your package can run as many places as possible.  Note"
              "that this value can be altered by the OS below conda-build (e.g. encrypted "
              "filesystems on Linux), and you should prefer to set --croot to a non-encrypted "
              "location instead, so that you maintain a known prefix length."),
        # this default will change to false in the future, when we deem that the community has
        #     had enough time to build long-prefix length packages.
        default=255, type=int,
    )
    p.add_argument(
        "--no-locking", dest='locking', default=True, action="store_false",
        help=("Disable locking, to avoid unresolved race condition issues.  Unsafe to run multiple "
              "builds at once on one system with this set.")
    )
    p.add_argument(
        "--no-remove-work-dir", dest='remove_work_dir', default=True, action="store_false",
        help=("Disable removal of the work dir before testing.  Be careful using this option, as"
              " you package may depend on files that are not included in the package, and may pass "
              "tests, but ultimately fail on installed systems.")
    )
    p.add_argument(
        "--error-overlinking", dest='error_overlinking', action="store_true",
        help=("Enable error when shared libraries from transitive dependencies are directly "
              "linked to any executables or shared libraries in built packages.  This is disabled "
              "by default, but will be enabled by default in conda-build 4.0."),
        default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true',
    )
    p.add_argument(
        "--no-error-overlinking", dest='error_overlinking', action="store_false",
        help=("Disable error when shared libraries from transitive dependencies are directly "
              "linked to any executables or shared libraries in built packages.  This is currently "
              "the default behavior, but will change in conda-build 4.0."),
        default=cc_conda_build.get('error_overlinking', 'false').lower() == 'true',
    )
    p.add_argument(
        "--long-test-prefix", default=True, action="store_false",
        help=("Use a long prefix for the test prefix, as well as the build prefix.  Affects only "
              "Linux and Mac.  Prefix length matches the --prefix-length flag.  This is on by "
              "default in conda-build 3.0+")
    )
    p.add_argument(
        "--no-long-test-prefix", dest="long_test_prefix", action="store_false",
        help=("Do not use a long prefix for the test prefix, as well as the build prefix."
              "  Affects only Linux and Mac.  Prefix length matches the --prefix-length flag.  ")
    )
    p.add_argument(
        '--keep-going', '-k', action='store_true',
        help=("When running tests, keep going after each failure.  Default is to stop on the first "
              "failure.")
    )
    p.add_argument(
        '--cache-dir',
        help=('Path to store the source files (archives, git clones, etc.) during the build.'),
        default=(abspath(expanduser(expandvars(cc_conda_build.get('cache_dir'))))
                 if cc_conda_build.get('cache_dir')
                 else cc_conda_build.get('cache_dir')),
    )
    p.add_argument(
        "--no-copy-test-source-files", dest="copy_test_source_files", action="store_false",
        default=cc_conda_build.get('copy_test_source_files', 'true').lower() == 'true',
        help=("Disables copying the files necessary for testing the package into "
              "the info/test folder.  Passing this argument means it may not be possible "
              "to test the package without internet access.  There is also a danger that "
              "the source archive(s) containing the files could become unavailable sometime "
              "in the future.")
    )
    p.add_argument(
        '--merge-build-host', action="store_true",
        help=('Merge the build and host directories, even when host section or compiler '
              'jinja2 is present'),
        default=cc_conda_build.get('merge_build_host', 'false').lower() == 'true',
    )
    p.add_argument('--stats-file', help=('File path to save build statistics to.  Stats are '
                                         'in JSON format'), )
    p.add_argument('--extra-deps',
                   nargs='+',
                   help=('Extra dependencies to add to all environment creation steps.  This '
                         'is only enabled for testing with the -t or --test flag.  Change '
                         'meta.yaml or use templates otherwise.'), )

    add_parser_channels(p)

    args = p.parse_args(args)
    return p, args
예제 #14
0
파일: cran.py 프로젝트: tjd2002/conda-build
def add_parser(repos):
    # for loading default variant info
    cran = repos.add_parser(
        "cran",
        help="""
    Create recipe skeleton for packages hosted on the Comprehensive R Archive
    Network (CRAN) (cran.r-project.org).
        """,
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )
    cran.add_argument(
        "packages",
        nargs='+',
        help="""CRAN packages to create recipe skeletons for.""",
    )
    cran.add_argument(
        "--output-dir",
        help="Directory to write recipes to (default: %(default)s).",
        default=".",
    )
    cran.add_argument(
        "--output-suffix",
        help="Suffix to add to recipe dir, can contain other dirs (eg: -feedstock/recipe).",
        default="",
    )
    cran.add_argument(
        "--add-maintainer",
        help="Add this github username as a maintainer if not already present.",
    )
    cran.add_argument(
        "--version",
        help="Version to use. Applies to all packages.",
    )
    cran.add_argument(
        "--git-tag",
        help="Git tag to use for GitHub recipes.",
    )
    cran.add_argument(
        "--all-urls",
        action="store_true",
        help="""Look at all URLs, not just source URLs. Use this if it can't
                find the right URL.""",
    )
    cran.add_argument(
        "--cran-url",
        help="URL to use for as source package repository",
    )
    cran.add_argument(
        "--r-interp",
        default='r-base',
        help="Declare R interpreter package",
    )
    cran.add_argument(
        "--use-binaries-ver",
        help=("Repackage binaries from version provided by argument instead of building "
              "from source."),
    )
    cran.add_argument(
        "--use-noarch-generic",
        action='store_true',
        dest='use_noarch_generic',
        help=("Mark packages that do not need compilation as `noarch: generic`"),
    )
    cran.add_argument(
        "--use-rtools-win",
        action='store_true',
        help="Use Rtools when building from source on Windows",
    )
    cran.add_argument(
        "--recursive",
        action='store_true',
        help='Create recipes for dependencies if they do not already exist.',
    )
    cran.add_argument(
        "--no-recursive",
        action='store_false',
        dest='recursive',
        help="Don't create recipes for dependencies if they do not already exist.",
    )
    cran.add_argument(
        '--no-archive',
        action='store_false',
        dest='archive',
        help="Don't include an Archive download url.",
    )
    cran.add_argument(
        "--version-compare",
        action='store_true',
        help="""Compare the package version of the recipe with the one available
        on CRAN. Exits 1 if a newer version is available and 0 otherwise."""
    )
    cran.add_argument(
        "--update-policy",
        action='store',
        choices=('error',
                 'skip-up-to-date',
                 'skip-existing',
                 'overwrite',
                 'merge-keep-build-num',
                 'merge-incr-build-num'),
        default='error',
        help="""Dictates what to do when existing packages are encountered in the
        output directory (set by --output-dir). In the present implementation, the
        merge options avoid overwriting bld.bat and build.sh and only manage copying
        across patches, and the `build/{number,script_env}` fields. When the version
        changes, both merge options reset `build/number` to 0. When the version does
        not change they either keep the old `build/number` or else increase it by one."""
    )
    cran.add_argument(
        '-m', '--variant-config-files',
        default=cc_conda_build.get('skeleton_config_yaml', None),
        help="""Variant config file to add.  These yaml files can contain
        keys such as `cran_mirror`.  Only one can be provided here."""
    )
예제 #15
0
def parse_args(parse_this=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('--version',
                        action='version',
                        help='Show the conda-build version number and exit.',
                        version='conda-concourse-ci %s' % __version__)
    sp = parser.add_subparsers(title='subcommands', dest='subparser_name')
    examine_parser = sp.add_parser('examine',
                                   help='examine path for changed recipes')
    examine_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")
    examine_parser.add_argument(
        "path",
        default='.',
        nargs='?',
        help="path in which to examine/build/test recipes")
    examine_parser.add_argument(
        '--folders',
        default=[],
        nargs="+",
        help="Rather than determine tree from git, specify folders to build")
    examine_parser.add_argument(
        '--steps',
        type=int,
        help=("Number of downstream steps to follow in the DAG when "
              "computing what to test.  Used for making sure that an "
              "update does not break downstream packages.  Set to -1 "
              "to follow the complete dependency tree."),
        default=0),
    examine_parser.add_argument(
        '--max-downstream',
        default=5,
        type=int,
        help=
        ("Limit the total number of downstream packages built.  Only applies "
         "if steps != 0.  Set to -1 for unlimited."))
    examine_parser.add_argument(
        '--git-rev',
        default='HEAD',
        help=('start revision to examine.  If stop not '
              'provided, changes are THIS_VAL~1..THIS_VAL'))
    examine_parser.add_argument(
        '--stop-rev',
        default=None,
        help=('stop revision to examine.  When provided,'
              'changes are git_rev..stop_rev'))
    examine_parser.add_argument(
        '--test',
        action='store_true',
        help='test packages (instead of building AND testing them)')
    examine_parser.add_argument(
        '--matrix-base-dir',
        help='path to matrix configuration, if different from recipe path',
        default=cc_conda_build.get('matrix_base_dir'))
    examine_parser.add_argument(
        '--output-dir',
        help="folder where output plan and recipes live",
        default='../output')
    examine_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    examine_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    examine_parser.add_argument(
        '--worker-tag',
        '-t',
        action='append',
        help="set worker tag(s) to limit where jobs will run.  Applies "
        "to all jobs.  For finer control, use extra/worker_tags in "
        "meta.yaml with selectors.",
        dest='worker_tags')
    examine_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    examine_parser.add_argument('--no-skip-existing',
                                help="Do not skip existing builds",
                                dest="skip_existing",
                                action="store_false")

    submit_parser = sp.add_parser(
        'submit', help="submit plan director to configured server")
    submit_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")
    submit_parser.add_argument('--pipeline-name',
                               help="name for the submitted pipeline",
                               default='{base_name} plan director')
    submit_parser.add_argument(
        '--pipeline-file',
        default='plan_director.yml',
        help="path to pipeline .yml file containing plan")
    submit_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions")
    submit_parser.add_argument(
        '--src-dir',
        help="folder where git repo of source code lives",
        default=os.getcwd())
    submit_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')

    bootstrap_parser = sp.add_parser(
        'bootstrap',
        help="create default configuration files to help you start")
    bootstrap_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")

    one_off_parser = sp.add_parser(
        'one-off', help="submit local recipes and plan to configured server")
    one_off_parser.add_argument(
        'pipeline_label',
        help="name of your project, to distinguish it from other projects")
    one_off_parser.add_argument(
        'folders',
        nargs="+",
        help=("Specify folders, relative to --recipe-root-dir, to upload "
              "and build"))
    one_off_parser.add_argument(
        '--recipe-root-dir',
        default=os.getcwd(),
        help="path containing recipe folders to upload")
    one_off_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    one_off_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')
    one_off_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    one_off_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    one_off_parser.add_argument(
        '--worker-tag',
        '-t',
        action='append',
        help="set worker tag(s) to limit where jobs will run.  Applies "
        "to all jobs.  For finer control, use extra/worker_tags in "
        "meta.yaml with selectors.",
        dest='worker_tags')
    one_off_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    one_off_parser.add_argument(
        '--output-dir',
        help=("folder where output plan and recipes live."
              "Defaults to temp folder.  Set to something to save output."))
    one_off_parser.add_argument(
        '--append-file',
        help=
        """Append data in meta.yaml with fields from this file.  Jinja2 is not done
        on appended fields""",
        dest='append_sections_file',
    )
    one_off_parser.add_argument(
        '--clobber-file',
        help=
        """Clobber data in meta.yaml with fields from this file.  Jinja2 is not done
        on clobbered fields.""",
        dest='clobber_sections_file',
    )
    one_off_parser.add_argument('--no-skip-existing',
                                help="Do not skip existing builds",
                                dest="skip_existing",
                                action="store_false")

    batch_parser = sp.add_parser('batch',
                                 help="submit a batch of one-off jobs.")
    batch_parser.add_argument(
        'batch_file',
        help="""File describing batch job.  Each lines defines a seperate
        one-off job.  List one or more folders on each line.  Job specific
        arguments can be specified after a ';' using param=value, multiple
        arguments are seperated by a ','.  For example:

            recipe-feedstock; channel=conda-forge,clobber_sections_file=clobber.yaml
        """)

    # batch specific arguments
    batch_parser.add_argument(
        '--max-builds',
        default=36,
        help=
        "maximum number of activate builds allowed before starting a new job")
    batch_parser.add_argument(
        '--poll-time',
        default=120,
        help=
        "time in seconds between checking concourse server for active builds")
    batch_parser.add_argument(
        '--build-lookback',
        default=500,
        help="number of builds to examine for active builds.")
    batch_parser.add_argument('--label-prefix',
                              default='autobot_',
                              help="prefix for pipeline labels.")

    # one-off arguments
    batch_parser.add_argument('--recipe-root-dir',
                              default=os.getcwd(),
                              help="path containing recipe folders to upload")
    batch_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    batch_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')
    batch_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    batch_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    batch_parser.add_argument(
        '--worker-tag',
        '-t',
        action='append',
        help="set worker tag(s) to limit where jobs will run.  Applies "
        "to all jobs.  For finer control, use extra/worker_tags in "
        "meta.yaml with selectors.",
        dest='worker_tags')
    batch_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    batch_parser.add_argument(
        '--output-dir',
        help=("folder where output plan and recipes live."
              "Defaults to temp folder.  Set to something to save output."))
    batch_parser.add_argument(
        '--append-file',
        help=
        """Append data in meta.yaml with fields from this file.  Jinja2 is not done
        on appended fields""",
        dest='append_sections_file',
    )
    batch_parser.add_argument(
        '--clobber-file',
        help=
        """Clobber data in meta.yaml with fields from this file.  Jinja2 is not done
        on clobbered fields.""",
        dest='clobber_sections_file',
    )
    batch_parser.add_argument('--no-skip-existing',
                              help="Do not skip existing builds",
                              dest="skip_existing",
                              action="store_false")

    rm_parser = sp.add_parser('rm', help='remove pipelines from server')
    rm_parser.add_argument('pipeline_names',
                           nargs="+",
                           help=("Specify pipeline names on server to remove"))
    rm_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    rm_parser.add_argument('--do-it-dammit',
                           '-y',
                           help="YOLO",
                           action="store_true")
    return parser.parse_known_args(parse_this)
예제 #16
0
def parse_args(parse_this=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('--debug', action='store_true')
    parser.add_argument('--version',
                        action='version',
                        help='Show the conda-build version number and exit.',
                        version='conda-concourse-ci %s' % __version__)
    sp = parser.add_subparsers(title='subcommands', dest='subparser_name')
    examine_parser = sp.add_parser('examine',
                                   help='examine path for changed recipes')
    examine_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")
    examine_parser.add_argument(
        "path",
        default='.',
        nargs='?',
        help="path in which to examine/build/test recipes")
    examine_parser.add_argument(
        '--folders',
        default=[],
        nargs="+",
        help="Rather than determine tree from git, specify folders to build")
    examine_parser.add_argument(
        '--steps',
        type=int,
        help=("Number of downstream steps to follow in the DAG when "
              "computing what to test.  Used for making sure that an "
              "update does not break downstream packages.  Set to -1 "
              "to follow the complete dependency tree."),
        default=0),
    examine_parser.add_argument(
        '--max-downstream',
        default=5,
        type=int,
        help=
        ("Limit the total number of downstream packages built.  Only applies "
         "if steps != 0.  Set to -1 for unlimited."))
    examine_parser.add_argument(
        '--git-rev',
        default='HEAD',
        help=('start revision to examine.  If stop not '
              'provided, changes are THIS_VAL~1..THIS_VAL'))
    examine_parser.add_argument(
        '--stop-rev',
        default=None,
        help=('stop revision to examine.  When provided,'
              'changes are git_rev..stop_rev'))
    examine_parser.add_argument(
        '--test',
        action='store_true',
        help='test packages (instead of building AND testing them)')
    examine_parser.add_argument(
        '--matrix-base-dir',
        help='path to matrix configuration, if different from recipe path',
        default=cc_conda_build.get('matrix_base_dir'))
    examine_parser.add_argument(
        '--output-dir',
        help="folder where output plan and recipes live",
        default='../output')
    examine_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    examine_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    examine_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )

    submit_parser = sp.add_parser(
        'submit', help="submit plan director to configured server")
    submit_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")
    submit_parser.add_argument('--pipeline-name',
                               help="name for the submitted pipeline",
                               default='{base_name} plan director')
    submit_parser.add_argument(
        '--pipeline-file',
        default='plan_director.yml',
        help="path to pipeline .yml file containing plan")
    submit_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions")
    submit_parser.add_argument(
        '--src-dir',
        help="folder where git repo of source code lives",
        default=os.getcwd())
    submit_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')

    bootstrap_parser = sp.add_parser(
        'bootstrap',
        help="create default configuration files to help you start")
    bootstrap_parser.add_argument(
        'base_name',
        help="name of your project, to distinguish it from other projects")

    one_off_parser = sp.add_parser(
        'one-off', help="submit local recipes and plan to configured server")
    one_off_parser.add_argument(
        'pipeline_label',
        help="name of your project, to distinguish it from other projects")
    one_off_parser.add_argument(
        'folders',
        nargs="+",
        help=("Specify folders, relative to --recipe-root-dir, to upload "
              "and build"))
    one_off_parser.add_argument(
        '--recipe-root-dir',
        default=os.getcwd(),
        help="path containing recipe folders to upload")
    one_off_parser.add_argument(
        '--config-root-dir',
        help="path containing config.yml and matrix definitions",
        default=cc_conda_build.get('matrix_base_dir'))
    one_off_parser.add_argument(
        '--private',
        action='store_false',
        help=
        'hide build logs (overall graph still shown in Concourse web view)',
        dest='public')
    one_off_parser.add_argument(
        '--channel',
        '-c',
        action='append',
        help="Additional channel to use when building packages")
    one_off_parser.add_argument(
        '--platform-filter',
        '-p',
        action='append',
        help="glob pattern(s) to filter build platforms.  For example, "
        "linux* will build all platform files whose filenames start with "
        "linux",
        dest='platform_filters')
    one_off_parser.add_argument(
        '-m',
        '--variant-config-files',
        action="append",
        help=
        """Additional variant config files to add.  These yaml files can contain
        keys such as `c_compiler` and `target_platform` to form a build matrix."""
    )
    one_off_parser.add_argument(
        '--output-dir',
        help=("folder where output plan and recipes live."
              "Defaults to temp folder.  Set to something to save output."),
        default=None)

    return parser.parse_args(parse_this)