Ejemplo n.º 1
0
def test_filter_files():
    # Files that should be filtered out.
    files_list = ['.git/a', 'something/.git/a', '.git\\a', 'something\\.git\\a']
    assert not utils.filter_files(files_list, '')

    # Files that should *not* be filtered out.
    # Example of valid 'x.git' directory:
    #    lib/python3.4/site-packages/craftr/stl/craftr.utils.git/Craftrfile
    files_list = ['a', 'x.git/a', 'something/x.git/a',
                  'x.git\\a', 'something\\x.git\\a']
    assert len(utils.filter_files(files_list, '')) == len(files_list)
Ejemplo n.º 2
0
def test_filter_files():
    # Files that should be filtered out.
    files_list = ['.git/a', 'something/.git/a', '.git\\a', 'something\\.git\\a']
    assert not utils.filter_files(files_list, '')

    # Files that should *not* be filtered out.
    # Example of valid 'x.git' directory:
    #    lib/python3.4/site-packages/craftr/stl/craftr.utils.git/Craftrfile
    files_list = ['a', 'x.git/a', 'something/x.git/a',
                  'x.git\\a', 'something\\x.git\\a']
    assert len(utils.filter_files(files_list, '')) == len(files_list)
Ejemplo n.º 3
0
def test_filter_files():
    # Files that should be filtered out.
    files_list = ['.git/a', 'something/.git/a', '.git\\a', 'something\\.git\\a',
                  'file.la', 'something/file.la', 'python.exe.conda_trash', 
                  'bla.dll.conda_trash_1', 'bla.dll.conda_trash.conda_trash']
    assert not utils.filter_files(files_list, '')

    # Files that should *not* be filtered out.
    # Example of valid 'x.git' directory:
    #    lib/python3.4/site-packages/craftr/stl/craftr.utils.git/Craftrfile
    files_list = ['a', 'x.git/a', 'something/x.git/a',
                  'x.git\\a', 'something\\x.git\\a', 'something/.gitmodules',
                  'some/template/directory/.gitignore', 'another.lab',
                  'miniconda_trashcan.py', 'conda_trash_avoider.py']
    assert len(utils.filter_files(files_list, '')) == len(files_list)
Ejemplo n.º 4
0
def test_filter_files():
    # Files that should be filtered out.
    files_list = ['.git/a', 'something/.git/a', '.git\\a', 'something\\.git\\a',
                  'file.la', 'something/file.la', 'python.exe.conda_trash', 
                  'bla.dll.conda_trash_1', 'bla.dll.conda_trash.conda_trash']
    assert not utils.filter_files(files_list, '')

    # Files that should *not* be filtered out.
    # Example of valid 'x.git' directory:
    #    lib/python3.4/site-packages/craftr/stl/craftr.utils.git/Craftrfile
    files_list = ['a', 'x.git/a', 'something/x.git/a',
                  'x.git\\a', 'something\\x.git\\a', 'something/.gitmodules',
                  'some/template/directory/.gitignore', 'another.lab',
                  'miniconda_trashcan.py', 'conda_trash_avoider.py']
    assert len(utils.filter_files(files_list, '')) == len(files_list)
Ejemplo n.º 5
0
    def _get_hash_contents(self):
        sections = ['source', 'requirements', 'build']
        # make a copy of values, so that no sorting occurs in place
        composite = HashableDict({section: copy.copy(self.get_section(section))
                                  for section in sections})
        outputs = self.get_section('outputs')
        if outputs:
            outs = []
            for out in outputs:
                out = copy.copy(out)
                # files are dynamically determined, and there's no way to match them at render time.
                #    we need to exclude them from the hash.
                if 'files' in out:
                    del out['files']
                outs.append(out)
            composite.update({'outputs': [HashableDict(out) for out in outs]})

        # filter build requirements for ones that should not be in the hash
        requirements = composite.get('requirements', {})
        build_reqs = requirements.get('build', [])
        excludes = self.config.variant.get('exclude_from_build_hash', [])
        if excludes:
            exclude_pattern = re.compile('|'.join('{}[\s$]?.*'.format(exc) for exc in excludes))
            build_reqs = [req for req in build_reqs if not exclude_pattern.match(req)]
        requirements['build'] = build_reqs
        composite['requirements'] = requirements

        # remove the build number from the hash, so that we can bump it without changing the hash
        if 'number' in composite['build']:
            del composite['build']['number']
        # remove the build string, so that hashes don't affect themselves
        if 'string' in composite['build']:
            del composite['build']['string']
        if not composite['build']:
            del composite['build']
        for key in 'build', 'run':
            if key in composite['requirements'] and not composite['requirements'].get(key):
                del composite['requirements'][key]
        trim_empty_keys(composite)
        file_paths = []

        if self.path:
            recorded_input_files = os.path.join(self.path, '..', 'hash_input_files')
            if os.path.exists(recorded_input_files):
                with open(recorded_input_files) as f:
                    file_paths = f.read().splitlines()
            else:
                files = utils.rec_glob(self.path, "*")
                file_paths = sorted([f.replace(self.path + os.sep, '') for f in files])
                # exclude meta.yaml and meta.yaml.template, because the json dictionary captures
                #    their content
                file_paths = [f for f in file_paths if not f.startswith('meta.yaml')]
                file_paths = sorted(filter_files(file_paths, self.path))

        return composite, file_paths
Ejemplo n.º 6
0
def _copy_output_recipe(m, dest_dir):
    _copy_top_level_recipe(m.path, m.config, dest_dir, "parent")

    this_output = m.get_rendered_output(m.name()) or {}
    install_script = this_output.get("script")
    build_inputs = []
    inputs = [install_script] + build_inputs
    file_paths = [script for script in inputs if script]
    file_paths = utils.filter_files(file_paths, m.path)

    for f in file_paths:
        utils.copy_into(
            join(m.path, f),
            join(dest_dir, f),
            timeout=m.config.timeout,
            locking=m.config.locking,
            clobber=True,
        )
Ejemplo n.º 7
0
def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None):
    files = utils.rec_glob(path, "*")
    file_paths = sorted([f.replace(path + os.sep, "") for f in files])

    # when this actually has a value, we're copying the top-level recipe into a subdirectory,
    #    so that we have record of what parent recipe produced subpackages.
    if destination_subdir:
        dest_dir = join(dest_dir, destination_subdir)
    else:
        # exclude recipe.yaml because the json dictionary captures its content
        file_paths = [
            f for f in file_paths
            if not (f == "recipe.yaml" or f == "conda_build_config.yaml")
        ]
    file_paths = utils.filter_files(file_paths, path)
    for f in file_paths:
        utils.copy_into(
            join(path, f),
            join(dest_dir, f),
            timeout=config.timeout,
            locking=config.locking,
            clobber=True,
        )
Ejemplo n.º 8
0
def bundle_conda(metadata, initial_files, env, files_selector=None):

    files = post_process_files(metadata, initial_files)

    # first filter is so that info_files does not pick up ignored files
    files = utils.filter_files(files, prefix=metadata.config.host_prefix)
    if files_selector:
        files = select_files(files, files_selector.get("include"),
                             files_selector.get("exclude"))

    console.print(f"\n[yellow]Adding files for {metadata.name()}[/yellow]\n")
    if files:
        for f in sorted(files):
            console.print(f"- {f}")
    else:
        console.print(
            f"[red]ATTENTION: No files added in target [bold]{metadata.name()}[/bold][/red]"
        )
    console.print("\n")

    # this is also copying things like run_test.sh into info/recipe
    utils.rm_rf(os.path.join(metadata.config.info_dir, "test"))

    output = {}

    with tmp_chdir(metadata.config.host_prefix):
        output["checksums"] = create_info_files(
            metadata, files, prefix=metadata.config.host_prefix)

    # here we add the info files into the prefix, so we want to re-collect the files list
    prefix_files = set(utils.prefix_files(metadata.config.host_prefix))
    files = utils.filter_files(prefix_files - initial_files,
                               prefix=metadata.config.host_prefix)
    if files_selector:
        include_files = files_selector.get("include")
        if include_files:
            include_files += ["info/*"]
        files = select_files(files, include_files,
                             files_selector.get("exclude"))

    basename = metadata.dist()
    tmp_archives = []
    final_outputs = []
    ext = ".tar.bz2"
    if output.get(
            "type") == "conda_v2" or metadata.config.conda_pkg_format == "2":
        ext = ".conda"

    with TemporaryDirectory() as tmp:
        conda_package_handling.api.create(metadata.config.host_prefix,
                                          files,
                                          basename + ext,
                                          out_folder=tmp)
        tmp_archives = [os.path.join(tmp, basename + ext)]

        # we're done building, perform some checks
        for tmp_path in tmp_archives:
            #     if tmp_path.endswith('.tar.bz2'):
            #         tarcheck.check_all(tmp_path, metadata.config)
            output_filename = os.path.basename(tmp_path)

            #     # we do the import here because we want to respect logger level context
            #     try:
            #         from conda_verify.verify import Verify
            #     except ImportError:
            #         Verify = None
            #         log.warn("Importing conda-verify failed.  Please be sure to test your packages.  "
            #             "conda install conda-verify to make this message go away.")
            #     if getattr(metadata.config, "verify", False) and Verify:
            #         verifier = Verify()
            #         checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) +
            #                             metadata.ignore_verify_codes())
            #         try:
            #             verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore,
            #                                     exit_on_error=metadata.config.exit_on_verify_error)
            #         except KeyError as e:
            #             log.warn("Package doesn't have necessary files.  It might be too old to inspect."
            #                      "Legacy noarch packages are known to fail.  Full message was {}".format(e))
            try:
                crossed_subdir = metadata.config.target_subdir
            except AttributeError:
                crossed_subdir = metadata.config.host_subdir
            subdir = ("noarch" if (metadata.noarch or metadata.noarch_python)
                      else crossed_subdir)
            if metadata.config.output_folder:
                output_folder = os.path.join(metadata.config.output_folder,
                                             subdir)
            else:
                output_folder = os.path.join(
                    os.path.dirname(metadata.config.bldpkgs_dir), subdir)
            final_output = os.path.join(output_folder, output_filename)
            if os.path.isfile(final_output):
                utils.rm_rf(final_output)

            # disable locking here. It's just a temp folder getting locked.
            # Having it proved a major bottleneck.
            utils.copy_into(tmp_path,
                            final_output,
                            metadata.config.timeout,
                            locking=False)
            final_outputs.append(final_output)

    update_index(os.path.dirname(output_folder),
                 verbose=metadata.config.debug,
                 threads=1)

    # clean out host prefix so that this output's files don't interfere with other outputs
    # We have a backup of how things were before any output scripts ran.  That's
    # restored elsewhere.
    if metadata.config.keep_old_work:
        prefix = metadata.config.host_prefix
        dest = os.path.join(
            os.path.dirname(prefix),
            "_".join(("_h_env_moved", metadata.dist(),
                      metadata.config.host_subdir)),
        )
        console.print("Renaming host env directory, ", prefix, " to ", dest)
        if os.path.exists(dest):
            utils.rm_rf(dest)
        shutil.move(prefix, dest)
    else:
        utils.rm_rf(metadata.config.host_prefix)

    return final_outputs
Ejemplo n.º 9
0
def post_process_files(m, initial_prefix_files):
    get_build_metadata(m)
    create_post_scripts(m)

    # this is new-style noarch, with a value of 'python'
    if m.noarch != "python":
        utils.create_entry_points(m.get_value("build/entry_points"),
                                  config=m.config)
    current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix)

    python = (m.config.build_python if os.path.isfile(m.config.build_python)
              else m.config.host_python)
    post_process(
        m.get_value("package/name"),
        m.get_value("package/version"),
        sorted(current_prefix_files - initial_prefix_files),
        prefix=m.config.host_prefix,
        config=m.config,
        preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")),
        noarch=m.get_value("build/noarch"),
        skip_compile_pyc=m.get_value("build/skip_compile_pyc"),
    )

    # The post processing may have deleted some files (like easy-install.pth)
    current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix)
    new_files = sorted(current_prefix_files - initial_prefix_files)
    new_files = utils.filter_files(new_files, prefix=m.config.host_prefix)

    host_prefix = m.config.host_prefix
    meta_dir = m.config.meta_dir
    if any(meta_dir in join(host_prefix, f) for f in new_files):
        meta_files = (tuple(
            f for f in new_files
            if m.config.meta_dir in join(m.config.host_prefix, f)), )
        sys.exit(
            indent(
                """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
    post_build(m, new_files, build_python=python)

    entry_point_script_names = get_entry_point_script_names(
        m.get_value("build/entry_points"))
    if m.noarch == "python":
        pkg_files = [
            fi for fi in new_files if fi not in entry_point_script_names
        ]
    else:
        pkg_files = new_files

    # the legacy noarch
    if m.get_value("build/noarch_python"):
        noarch_python.transform(m, new_files, m.config.host_prefix)
    # new way: build/noarch: python
    elif m.noarch == "python":
        noarch_python.populate_files(m, pkg_files, m.config.host_prefix,
                                     entry_point_script_names)

    current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix)
    new_files = current_prefix_files - initial_prefix_files
    fix_permissions(new_files, m.config.host_prefix)

    return new_files