def create_files(dir_path, m, config):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        copy_into(path, join(dir_path, fn), config.timeout)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(config.work_dir):
        source.provide(m.path, m.get_section('source'), config=config)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                                "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(config.work_dir, pattern))
        if not files:
            raise RuntimeError("Did not find any source_files for test with pattern %s", pattern)
        for f in files:
            copy_into(f, f.replace(config.work_dir, config.test_dir), config.timeout)
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(config.test_dir, ext):
                os.remove(f)
    return has_files
Beispiel #2
0
def test_new_pkg_format_stat_cache_used(testing_workdir, mocker):
    # if we have old .tar.bz2 index cache stuff, assert that we pick up correct md5, sha26 and size for .conda
    test_package_path = join(testing_workdir, 'osx-64', 'conda-index-pkg-a-1.0-py27h5e241af_0')
    copy_into(os.path.join(archive_dir, 'conda-index-pkg-a-1.0-py27h5e241af_0' + '.tar.bz2'), test_package_path + '.tar.bz2')
    conda_build.index.update_index(testing_workdir, channel_name='test-channel')

    # mock the extract function, so that we can assert that it is not called, because the stat cache should exist
    #    if this doesn't work, something about the stat cache is confused.  It's a little convoluted, because
    #    the index has keys for .tar.bz2's, but the stat cache comes from .conda files when they are available
    #    because extracting them is much, much faster.
    copy_into(os.path.join(archive_dir, 'conda-index-pkg-a-1.0-py27h5e241af_0' + '.conda'), test_package_path + '.conda')
    cph_extract = mocker.spy(conda_package_handling.api, 'extract')
    conda_build.index.update_index(testing_workdir, channel_name='test-channel', debug=True)
    cph_extract.assert_not_called()

    with open(join(testing_workdir, 'osx-64', 'repodata.json')) as fh:
        actual_repodata_json = json.loads(fh.read())

    expected_repodata_json = {
        "info": {
            'subdir': 'osx-64',
        },
        "packages": {
            "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": {
                "build": "py27h5e241af_0",
                "build_number": 0,
                "depends": [
                    "python >=2.7,<2.8.0a0"
                ],
                "license": "BSD",
                "md5": "37861df8111170f5eed4bff27868df59",
                "name": "conda-index-pkg-a",
                "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30",
                "size": 8733,
                "subdir": "osx-64",
                "timestamp": 1508520039632,
                "version": "1.0",
            },
        },
        "packages.conda": {
            "conda-index-pkg-a-1.0-py27h5e241af_0.conda": {
                "build": "py27h5e241af_0",
                "build_number": 0,
                "depends": [
                    "python >=2.7,<2.8.0a0"
                ],
                "license": "BSD",
                "md5": "4ed4b435f400dac1aabdc1fff06f78ff",
                "name": "conda-index-pkg-a",
                "sha256": "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19",
                "size": 9296,
                "subdir": "osx-64",
                "timestamp": 1508520039632,
                "version": "1.0",
            },
        },
        "removed": [],
        "repodata_version": 1,
    }
    assert actual_repodata_json == expected_repodata_json
Beispiel #3
0
def copy_recipe(m, config):
    if config.include_recipe and m.include_recipe():
        recipe_dir = join(config.info_dir, 'recipe')
        os.makedirs(recipe_dir)

        if os.path.isdir(m.path):
            for fn in os.listdir(m.path):
                if fn.startswith('.'):
                    continue
                src_path = join(m.path, fn)
                dst_path = join(recipe_dir, fn)
                copy_into(src_path, dst_path, timeout=config.timeout)

            # store the rendered meta.yaml file, plus information about where it came from
            #    and what version of conda-build created it
            original_recipe = os.path.join(m.path, 'meta.yaml')
        else:
            original_recipe = ""

        rendered = output_yaml(m)
        if not original_recipe or not open(original_recipe).read() == rendered:
            with open(join(recipe_dir, "meta.yaml"), 'w') as f:
                f.write("# This file created by conda-build {}\n".format(__version__))
                if original_recipe:
                    f.write("# meta.yaml template originally from:\n")
                    f.write("# " + source.get_repository_info(m.path) + "\n")
                f.write("# ------------------------------------------------\n\n")
                f.write(rendered)
            if original_recipe:
                copy_into(original_recipe, os.path.join(recipe_dir, 'meta.yaml.template'),
                          timeout=config.timeout)
def create_shell_files(dir_path, m, config):
    has_tests = False
    ext = '.bat' if sys.platform == 'win32' else '.sh'
    name = 'no-file'

    for out in m.meta.get('outputs', []):
        if m.name() == out['name']:
            out_test_script = out.get('test', {}).get('script', 'no-file')
            if os.path.splitext(out_test_script)[1].lower() == ext:
                name = out_test_script
                break
    else:
        name = "run_test{}".format(ext)

    if exists(join(m.path, name)):
        copy_into(join(m.path, name), dir_path, config.timeout)
        has_tests = True

    with open(join(dir_path, name), 'a') as f:
        f.write('\n\n')
        for cmd in ensure_list(m.get_value('test/commands', [])):
            f.write(cmd)
            f.write('\n')
            if sys.platform == 'win32':
                f.write("if errorlevel 1 exit 1\n")
            has_tests = True

    return has_tests
Beispiel #5
0
def provide(metadata):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    meta = metadata.get_section('source')
    if not os.path.isdir(metadata.config.build_folder):
        os.makedirs(metadata.config.build_folder)
    git = None

    if hasattr(meta, 'keys'):
        dicts = [meta]
    else:
        dicts = meta

    try:
        for source_dict in dicts:
            folder = source_dict.get('folder')
            src_dir = (os.path.join(metadata.config.work_dir, folder) if folder else
                    metadata.config.work_dir)
            if any(k in source_dict for k in ('fn', 'url')):
                unpack(source_dict, src_dir, metadata.config.src_cache, recipe_path=metadata.path,
                    croot=metadata.config.croot, verbose=metadata.config.verbose,
                    timeout=metadata.config.timeout, locking=metadata.config.locking)
            elif 'git_url' in source_dict:
                git = git_source(source_dict, metadata.config.git_cache, src_dir, metadata.path,
                                verbose=metadata.config.verbose)
            # build to make sure we have a work directory with source in it. We
            #    want to make sure that whatever version that is does not
            #    interfere with the test we run next.
            elif 'hg_url' in source_dict:
                hg_source(source_dict, src_dir, metadata.config.hg_cache,
                        verbose=metadata.config.verbose)
            elif 'svn_url' in source_dict:
                svn_source(source_dict, src_dir, metadata.config.svn_cache,
                        verbose=metadata.config.verbose, timeout=metadata.config.timeout,
                        locking=metadata.config.locking)
            elif 'path' in source_dict:
                path = normpath(abspath(join(metadata.path, source_dict['path'])))
                if metadata.config.verbose:
                    print("Copying %s to %s" % (path, src_dir))
                # careful here: we set test path to be outside of conda-build root in setup.cfg.
                #    If you don't do that, this is a recursive function
                copy_into(path, src_dir, metadata.config.timeout, symlinks=True,
                        locking=metadata.config.locking, clobber=True)
            else:  # no source
                if not isdir(src_dir):
                    os.makedirs(src_dir)

            patches = ensure_list(source_dict.get('patches', []))
            for patch in patches:
                apply_patch(src_dir, join(metadata.path, patch), metadata.config, git)

    except CalledProcessError:
        os.rename(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide')
        raise

    return metadata.config.work_dir
Beispiel #6
0
def create_shell_files(m):
    has_tests = False
    ext = '.bat' if sys.platform == 'win32' else '.sh'
    name = 'no-file'

    # the way this works is that each output needs to explicitly define a test script to run.
    #   They do not automatically pick up run_test.*, but can be pointed at that explicitly.
    for out in m.meta.get('outputs', []):
        if m.name() == out.get('name'):
            out_test_script = out.get('test', {}).get('script', 'no-file')
            if os.path.splitext(out_test_script)[1].lower() == ext:
                name = out_test_script
                break
    else:
        name = "run_test{}".format(ext)

    if exists(join(m.path, name)):
        copy_into(join(m.path, name), m.config.test_dir, m.config.timeout, locking=m.config.locking)
        has_tests = True

    commands = ensure_list(m.get_value('test/commands', []))
    if commands:
        with open(join(m.config.test_dir, name), 'a') as f:
            f.write('\n\n')
            for cmd in commands:
                f.write(cmd)
                f.write('\n')
                if sys.platform == 'win32':
                    f.write("if errorlevel 1 exit 1\n")
                has_tests = True

    return has_tests
Beispiel #7
0
def create_files(m, test_dir=None):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    if not test_dir:
        test_dir = m.config.test_dir
    has_files = False
    if not os.path.isdir(test_dir):
        os.makedirs(test_dir)

    recipe_dir = m.path or m.meta.get('extra', {}).get('parent_recipe', {}).get('path')

    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(recipe_dir, fn)
        if not os.path.isdir(os.path.dirname(path)):
            os.makedirs(os.path.dirname(path))
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(path, join(test_dir, fn), m.config.timeout, locking=False,
                  clobber=True)
    return has_files
def test_skip_existing_url(testing_metadata, testing_workdir, capfd):
    # make sure that it is built
    outputs = api.build(testing_metadata)

    # Copy our package into some new folder
    output_dir = os.path.join(testing_workdir, 'someoutput')
    platform = os.path.join(output_dir, testing_metadata.config.host_subdir)
    os.makedirs(platform)
    copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))

    # create the index so conda can find the file
    api.update_index(platform, config=testing_metadata.config)

    # HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel
    noarch = os.path.join(output_dir, 'noarch')
    os.makedirs(noarch)
    api.update_index(noarch, config=testing_metadata.config)

    testing_metadata.config.skip_existing = True
    testing_metadata.config.channel_urls = [url_path(output_dir)]

    api.build(testing_metadata)

    output, error = capfd.readouterr()
    assert "are already built" in output
    assert url_path(testing_metadata.config.croot) in output
Beispiel #9
0
def unpack(source_dict, src_dir, cache_folder, recipe_path, croot, verbose=False,
           timeout=90, locking=True):
    ''' Uncompress a downloaded source. '''
    src_path, unhashed_fn = download_to_cache(cache_folder, recipe_path, source_dict)

    if not isdir(src_dir):
        os.makedirs(src_dir)
    if verbose:
        print("Extracting download")
    with TemporaryDirectory(dir=croot) as tmpdir:
        unhashed_dest = os.path.join(tmpdir, unhashed_fn)
        if src_path.lower().endswith(decompressible_exts):
            tar_xf(src_path, tmpdir)
        else:
            # In this case, the build script will need to deal with unpacking the source
            print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR")
            copy_into(src_path, unhashed_dest, timeout, locking=locking)
        if src_path.lower().endswith('.whl'):
            # copy wheel itself *and* unpack it
            # This allows test_files or about.license_file to locate files in the wheel,
            # as well as `pip install name-version.whl` as install command
            copy_into(src_path, unhashed_dest, timeout, locking=locking)
        flist = os.listdir(tmpdir)
        folder = os.path.join(tmpdir, flist[0])
        if len(flist) == 1 and os.path.isdir(folder):
            hoist_single_extracted_folder(folder)
        flist = os.listdir(tmpdir)
        for f in flist:
            shutil.move(os.path.join(tmpdir, f), os.path.join(src_dir, f))
Beispiel #10
0
def fix_staged_scripts(scripts_dir, config):
    """
    Fixes scripts which have been installed unix-style to have a .bat
    helper
    """
    if not isdir(scripts_dir):
        return
    for fn in os.listdir(scripts_dir):
        # process all the extensionless files
        if not isfile(join(scripts_dir, fn)) or '.' in fn:
            continue

        # read as binary file to ensure we don't run into encoding errors, see #1632
        with open(join(scripts_dir, fn), 'rb') as f:
            line = f.readline()
            # If it's a #!python script
            if not (line.startswith(b'#!') and b'python' in line.lower()):
                continue
            print('Adjusting unix-style #! script %s, '
                  'and adding a .bat file for it' % fn)
            # copy it with a .py extension (skipping that first #! line)
            with open(join(scripts_dir, fn + '-script.py'), 'wb') as fo:
                fo.write(f.read())
            # now create the .exe file
            copy_into(join(dirname(__file__), 'cli-%s.exe' % config.host_arch),
                            join(scripts_dir, fn + '.exe'))

        # remove the original script
        os.remove(join(scripts_dir, fn))
Beispiel #11
0
def fix_staged_scripts(scripts_dir):
    """
    Fixes scripts which have been installed unix-style to have a .bat
    helper
    """
    if not isdir(scripts_dir):
        return
    for fn in os.listdir(scripts_dir):
        # process all the extensionless files
        if not isfile(join(scripts_dir, fn)) or '.' in fn:
            continue

        with open(join(scripts_dir, fn)) as f:
            line = bs4.UnicodeDammit(f.readline()).unicode_markup.lower()
            # If it's a #!python script
            if not (line.startswith('#!') and 'python' in line.lower()):
                continue
            print('Adjusting unix-style #! script %s, '
                  'and adding a .bat file for it' % fn)
            # copy it with a .py extension (skipping that first #! line)
            with open(join(scripts_dir, fn + '-script.py'), 'w') as fo:
                fo.write(f.read())
            # now create the .exe file
            copy_into(join(dirname(__file__), 'cli-%d.exe' % bits),
                            join(scripts_dir, fn + '.exe'))

        # remove the original script
        os.remove(join(scripts_dir, fn))
def test_merge_namespace_trees(namespace_setup):
    dep = os.path.join(namespace_setup, 'other_tree', 'namespace', 'package', 'dependency.py')
    makefile(dep)

    utils.copy_into(os.path.join(namespace_setup, 'other_tree'), namespace_setup)
    assert os.path.isfile(os.path.join(namespace_setup, 'namespace', 'package',
                                                'module.py'))
    assert os.path.isfile(dep)
Beispiel #13
0
    def return_to_saved_path():
        if os.path.isdir(os.path.join(saved_path, 'prof')):
            profdir = tmpdir.join('prof')
            files = profdir.listdir('*.prof') if profdir.isdir() else []

            for f in files:
                copy_into(str(f), os.path.join(saved_path, 'prof', f.basename))
        os.chdir(saved_path)
Beispiel #14
0
def create_info_files(m, files, config, prefix):
    '''
    Creates the metadata files that will be stored in the built package.

    :param m: Package metadata
    :type m: Metadata
    :param files: Paths to files to include in package
    :type files: list of str
    '''

    copy_recipe(m, config)
    copy_readme(m, config)
    copy_license(m, config)

    mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}

    write_info_json(m, config, mode_dict)
    write_about_json(m, config)

    entry_point_scripts = get_entry_points(config, m)

    if is_noarch_python(m):
        noarch_python.create_entry_point_information(
            "python", entry_point_scripts, config
        )

    entry_point_script_names = get_entry_point_script_names(entry_point_scripts)

    if on_win:
        # make sure we use '/' path separators in metadata
        files = [_f.replace('\\', '/') for _f in files]

    with open(join(config.info_dir, 'files'), **mode_dict) as fo:
        if m.get_value('build/noarch_python'):
            fo.write('\n')
        elif is_noarch_python(m):
            for f in files:
                if f.find("site-packages") > 0:
                    fo.write(f[f.find("site-packages"):] + '\n')
                elif f.startswith("bin") and (f not in entry_point_script_names):
                    fo.write(f.replace("bin", "python-scripts") + '\n')
                elif f.startswith("Scripts") and (f not in entry_point_script_names):
                    fo.write(f.replace("Scripts", "python-scripts") + '\n')
        else:
            for f in files:
                fo.write(f + '\n')

    detect_and_record_prefix_files(m, files, prefix, config)
    write_no_link(m, config, files)

    if m.get_value('source/git_url'):
        with io.open(join(config.info_dir, 'git'), 'w', encoding='utf-8') as fo:
            source.git_info(config, fo)

    if m.get_value('app/icon'):
        copy_into(join(m.path, m.get_value('app/icon')),
                        join(config.info_dir, 'icon.png'),
                  config.timeout)
Beispiel #15
0
    def test_merge_namespace_trees(self):

        dep = os.path.join(self.dst, 'namespace', 'package', 'dependency.py')
        self.makefile(dep)

        utils.copy_into(self.src, self.dst)
        self.assertTrue(os.path.isfile(os.path.join(self.dst, 'namespace', 'package',
                                                    'module.py')))
        self.assertTrue(os.path.isfile(dep))
Beispiel #16
0
def test_copy_read_only_file_with_xattr(testing_config, testing_workdir):
    src_recipe = os.path.join(metadata_dir, '_xattr_copy')
    recipe = os.path.join(testing_workdir, '_xattr_copy')
    copy_into(src_recipe, recipe)
    # file is r/w for owner, but we change it to 400 after setting the attribute
    ro_file = os.path.join(recipe, 'mode_400_file')
    subprocess.check_call('setfattr -n user.attrib -v somevalue {}'.format(ro_file), shell=True)
    subprocess.check_call('chmod 400 {}'.format(ro_file), shell=True)
    api.build(recipe, config=testing_config)
Beispiel #17
0
def copy_readme(m, config):
    readme = m.get_value('about/readme')
    if readme:
        src = join(config.work_dir, readme)
        if not isfile(src):
            sys.exit("Error: no readme file: %s" % readme)
        dst = join(config.info_dir, readme)
        copy_into(src, dst, config.timeout)
        if os.path.split(readme)[1] not in {"README.md", "README.rst", "README"}:
            print("WARNING: anaconda.org only recognizes about/readme "
                  "as README.md and README.rst", file=sys.stderr)
Beispiel #18
0
def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    absfiles = [os.path.join(prefix, f) for f in files]
    py_ver = '.'.join(config.variant['python'].split('.')[:2])
    sp_dir = utils.get_site_packages(prefix, py_ver)
    for egg_path in glob(os.path.join(sp_dir, '*-py*.egg')):
        if os.path.isdir(egg_path):
            if preserve_egg_dir or not any(os.path.join(egg_path, i) in absfiles for i
                    in walk_prefix(egg_path, False, windows_forward_slashes=False)):
                write_pth(egg_path, config=config)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(os.path.join(egg_path, 'EGG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(os.path.join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(os.path.join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(os.path.join(sp_dir, fn)):
                        try:
                            utils.copy_into(os.path.join(egg_path, fn),
                                            os.path.join(sp_dir, fn), config.timeout,
                                            locking=config.locking)
                            utils.rm_rf(os.path.join(egg_path, fn))
                        except IOError as e:
                            fn = os.path.basename(str(e).split()[-1])
                            raise IOError("Tried to merge folder {egg_path} into {sp_dir}, but {fn}"
                                          " exists in both locations.  Please either add "
                                          "build/preserve_egg_dir: True to meta.yaml, or manually "
                                          "remove the file during your install process to avoid "
                                          "this conflict."
                                          .format(egg_path=egg_path, sp_dir=sp_dir, fn=fn))
                    else:
                        os.rename(os.path.join(egg_path, fn), os.path.join(sp_dir, fn))

        elif os.path.isfile(egg_path):
            if egg_path not in absfiles:
                continue
            print('found egg:', egg_path)
            write_pth(egg_path, config=config)

    utils.rm_rf(os.path.join(sp_dir, 'easy-install.pth'))
Beispiel #19
0
def make_hardlink_copy(path, prefix):
    """Hardlinks create invalid packages.  Copy files to break the link.
    Symlinks are OK, and unaffected here."""
    if not os.path.isabs(path):
        path = os.path.normpath(os.path.join(prefix, path))
    fn = os.path.basename(path)
    if os.lstat(path).st_nlink > 1:
        with TemporaryDirectory() as dest:
            # copy file to new name
            utils.copy_into(path, dest)
            # remove old file
            utils.rm_rf(path)
            # rename copy to original filename
            #   It is essential here to use copying (as opposed to os.rename), so that
            #        crossing volume boundaries works
            utils.copy_into(os.path.join(dest, fn), path)
Beispiel #20
0
def unpack(meta, config):
    """ Uncompress a downloaded source. """
    src_path = download_to_cache(meta, config)

    if not isdir(config.work_dir):
        os.makedirs(config.work_dir)
    if config.verbose:
        print("Extracting download")
    if src_path.lower().endswith((".tar.gz", ".tar.bz2", ".tgz", ".tar.xz", ".tar", "tar.z")):
        tar_xf(src_path, get_dir(config))
    elif src_path.lower().endswith(".zip"):
        unzip(src_path, get_dir(config))
    else:
        # In this case, the build script will need to deal with unpacking the source
        print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR")
        copy_into(src_path, get_dir(config), config.timeout)
Beispiel #21
0
def create_post_scripts(m, config):
    '''
    Create scripts to run after build step
    '''
    recipe_dir = m.path
    ext = '.bat' if on_win else '.sh'
    for tp in 'pre-link', 'post-link', 'pre-unlink':
        src = join(recipe_dir, tp + ext)
        if not isfile(src):
            continue
        dst_dir = join(config.build_prefix,
                       'Scripts' if on_win else 'bin')
        if not isdir(dst_dir):
            os.makedirs(dst_dir, int('755', 8))
        dst = join(dst_dir, '.%s-%s%s' % (m.name(), tp, ext))
        copy_into(src, dst, config.timeout)
        os.chmod(dst, int('755', 8))
Beispiel #22
0
def test_skip_existing_url(testing_workdir, test_config, capfd):
    # make sure that it is built
    api.build(empty_sections, config=test_config)
    output_file = os.path.join(test_config.croot, test_config.subdir, "empty_sections-0.0-0.tar.bz2")

    platform = os.path.join(testing_workdir, test_config.subdir)
    copy_into(output_file, os.path.join(platform, os.path.basename(output_file)))

    # create the index so conda can find the file
    api.update_index(platform, config=test_config)

    api.build(os.path.join(metadata_dir, "empty_sections"), skip_existing=True,
              config=test_config, channel_urls=[url_path(testing_workdir)])

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_config.croot) in output
Beispiel #23
0
def make_hardlink_copy(path, prefix):
    """Hardlinks create invalid packages.  Copy files to break the link.
    Symlinks are OK, and unaffected here."""
    if not os.path.isabs(path) and not os.path.exists(path):
        path = os.path.normpath(os.path.join(prefix, path))
    nlinks = os.lstat(path).st_nlink
    dest = 'tmpfile'
    if os.path.isabs(path):
        dest = os.path.join(os.getcwd(), dest)
    if nlinks > 1:
        # copy file to new name
        utils.copy_into(path, dest)
        # remove old file
        utils.rm_rf(path)
        # rename copy to original filename
        utils.copy_into(dest, path)
        utils.rm_rf(dest)
Beispiel #24
0
def unpack(meta, config):
    ''' Uncompress a downloaded source. '''
    src_path = download_to_cache(meta, config)

    if not isdir(config.work_dir):
        os.makedirs(config.work_dir)
    if config.verbose:
        print("Extracting download")
    if src_path.lower().endswith(('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz',
            '.tar', 'tar.z')):
        tar_xf(src_path, config.work_dir)
    elif src_path.lower().endswith('.zip'):
        unzip(src_path, config.work_dir)
    else:
        # In this case, the build script will need to deal with unpacking the source
        print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR")
        copy_into(src_path, config.work_dir, config.timeout, locking=config.locking)
Beispiel #25
0
def create_info_files(m, files, config, prefix):
    '''
    Creates the metadata files that will be stored in the built package.

    :param m: Package metadata
    :type m: Metadata
    :param files: Paths to files to include in package
    :type files: list of str
    :param include_recipe: Whether or not to include the recipe (True by default)
    :type include_recipe: bool
    '''
    if not isdir(config.info_dir):
        os.makedirs(config.info_dir)

    copy_recipe(m, config)
    copy_readme(m, config)
    copy_license(m, config)

    mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}

    write_info_json(m, config, mode_dict)
    write_about_json(m, config)

    if on_win:
        # make sure we use '/' path separators in metadata
        files = [_f.replace('\\', '/') for _f in files]

    with open(join(config.info_dir, 'files'), **mode_dict) as fo:
        if m.get_value('build/noarch_python'):
            fo.write('\n')
        else:
            for f in files:
                fo.write(f + '\n')

    detect_and_record_prefix_files(m, files, prefix, config)
    write_no_link(m, config, files)

    if m.get_value('source/git_url'):
        with io.open(join(config.info_dir, 'git'), 'w', encoding='utf-8') as fo:
            source.git_info(config, fo)

    if m.get_value('app/icon'):
        copy_into(join(m.path, m.get_value('app/icon')),
                        join(config.info_dir, 'icon.png'),
                  config.timeout)
Beispiel #26
0
def svn_source(metadata, config):
    ''' Download a source from SVN repo. '''
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    meta = metadata.get_section('source')

    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn', config.build_prefix)
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(config.svn_cache):
        os.makedirs(config.svn_cache)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(config.svn_cache, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call_env([svn, 'up', '-r', svn_revision] + extra_args, cwd=cache_repo,
                       stdout=stdout, stderr=stderr)
    else:
        check_call_env([svn, 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo],
                       stdout=stdout, stderr=stderr)
        assert isdir(cache_repo)

    # now copy into work directory
    copy_into(cache_repo, config.work_dir, config.timeout, symlinks=True, locking=config.locking)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
Beispiel #27
0
def test_skip_existing_url(test_metadata, testing_workdir, capfd):
    # make sure that it is built
    output_file = api.get_output_file_path(test_metadata)
    api.build(test_metadata)

    # Copy our package into some new folder
    platform = os.path.join(testing_workdir, test_metadata.config.subdir)
    copy_into(output_file, os.path.join(platform, os.path.basename(output_file)))

    # create the index so conda can find the file
    api.update_index(platform, config=test_metadata.config)

    test_metadata.config.skip_existing = True
    test_metadata.config.channel_urls = [url_path(testing_workdir)]
    api.build(test_metadata)

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_metadata.config.croot) in output
Beispiel #28
0
def provide(metadata, config, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """

    if not os.path.isdir(config.build_folder):
        os.makedirs(config.build_folder)
    git = None

    meta = metadata.get_section('source')

    if any(k in meta for k in ('fn', 'url')):
        unpack(metadata, config=config)
    elif 'git_url' in meta:
        git = git_source(metadata, config=config)
    # build to make sure we have a work directory with source in it.  We want to make sure that
    #    whatever version that is does not interfere with the test we run next.
    elif 'hg_url' in meta:
        hg_source(metadata, config=config)
    elif 'svn_url' in meta:
        svn_source(metadata, config=config)
    elif 'path' in meta:
        path = normpath(abspath(join(metadata.path, metadata.get_value('source/path'))))
        if config.verbose:
            print("Copying %s to %s" % (path, config.work_dir))
        # careful here: we set test path to be outside of conda-build root in setup.cfg.
        #    If you don't do that, this is a recursive function
        copy_into(path, config.work_dir, config.timeout, locking=config.locking)
    else:  # no source
        if not isdir(config.work_dir):
            os.makedirs(config.work_dir)

    if patch:
        src_dir = config.work_dir
        patches = ensure_list(meta.get('patches', []))
        for patch in patches:
            apply_patch(src_dir, join(metadata.path, patch), config, git)

    return config.work_dir
Beispiel #29
0
def svn_source(meta, config):
    """ Download a source from SVN repo. """
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, "w")
        stdout = FNULL
        stderr = FNULL

    def parse_bool(s):
        return str(s).lower().strip() in ("yes", "true", "1", "on")

    svn = external.find_executable("svn", config.build_prefix)
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta["svn_url"]
    svn_revision = meta.get("svn_rev") or "head"
    svn_ignore_externals = parse_bool(meta.get("svn_ignore_externals") or "no")
    if not isdir(config.svn_cache):
        os.makedirs(config.svn_cache)
    svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_")
    cache_repo = join(config.svn_cache, svn_dn)
    if svn_ignore_externals:
        extra_args = ["--ignore-externals"]
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call_env([svn, "up", "-r", svn_revision] + extra_args, cwd=cache_repo, stdout=stdout, stderr=stderr)
    else:
        check_call_env(
            [svn, "co", "-r", svn_revision] + extra_args + [svn_url, cache_repo], stdout=stdout, stderr=stderr
        )
        assert isdir(cache_repo)

    # now copy into work directory
    copy_into(cache_repo, config.work_dir, config.timeout, symlinks=True)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
Beispiel #30
0
def remove_easy_install_pth(files, preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    absfiles = [join(config.build_prefix, f) for f in files]
    sp_dir = environ.get_sp_dir()
    for egg_path in glob(join(sp_dir, '*-py*.egg')):
        if isdir(egg_path):
            if preserve_egg_dir or not any(
                    join(egg_path, i) in absfiles
                    for i in walk_prefix(egg_path, False)):
                write_pth(egg_path)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(join(egg_path, 'EGG-INFO/PKG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(join(sp_dir, fn)):
                        utils.copy_into(join(egg_path, fn), join(sp_dir, fn))
                        utils.rm_rf(join(egg_path, fn))
                    else:
                        os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            if not egg_path in absfiles:
                continue
            print('found egg:', egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, 'easy-install.pth'))
Beispiel #31
0
def check_symlinks(files, prefix, croot):
    if readlink is False:
        return  # Not on Unix system
    msgs = []
    real_build_prefix = os.path.realpath(prefix)
    for f in files:
        path = os.path.join(real_build_prefix, f)
        if os.path.islink(path):
            link_path = readlink(path)
            real_link_path = os.path.realpath(path)
            # symlinks to binaries outside of the same dir don't work.  RPATH stuff gets confused
            #    because ld.so follows symlinks in RPATHS
            #    If condition exists, then copy the file rather than symlink it.
            if (not os.path.dirname(link_path)
                    == os.path.dirname(real_link_path) and codefile_type(f)):
                os.remove(path)
                utils.copy_into(real_link_path, path)
            elif real_link_path.startswith(real_build_prefix):
                # If the path is in the build prefix, this is fine, but
                # the link needs to be relative
                if not link_path.startswith('.'):
                    # Don't change the link structure if it is already a
                    # relative link. It's possible that ..'s later in the path
                    # can result in a broken link still, but we'll assume that
                    # such crazy things don't happen.
                    print("Making absolute symlink %s -> %s relative" %
                          (f, link_path))
                    os.unlink(path)
                    os.symlink(
                        os.path.relpath(real_link_path, os.path.dirname(path)),
                        path)
            else:
                # Symlinks to absolute paths on the system (like /usr) are fine.
                if real_link_path.startswith(croot):
                    msgs.append("%s is a symlink to a path that may not "
                                "exist after the build is completed (%s)" %
                                (f, link_path))

    if msgs:
        for msg in msgs:
            print("Error: %s" % msg, file=sys.stderr)
        sys.exit(1)
Beispiel #32
0
def unpack(source_dict,
           src_dir,
           cache_folder,
           recipe_path,
           croot,
           verbose=False,
           timeout=90,
           locking=True):
    ''' Uncompress a downloaded source. '''
    src_path, unhashed_fn = download_to_cache(cache_folder, recipe_path,
                                              source_dict)

    if not isdir(src_dir):
        os.makedirs(src_dir)
    if verbose:
        print("Extracting download")
    with TemporaryDirectory(dir=croot) as tmpdir:
        unhashed_dest = os.path.join(tmpdir, unhashed_fn)
        if src_path.lower().endswith(
            ('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz', '.tar', 'tar.z')):
            tar_xf(src_path, tmpdir)
        elif src_path.lower().endswith('.zip'):
            unzip(src_path, tmpdir)
        elif src_path.lower().endswith('.whl'):
            # copy wheel itself *and* unpack it
            # This allows test_files or about.license_file to locate files in the wheel,
            # as well as `pip install name-version.whl` as install command
            unzip(src_path, tmpdir)
            copy_into(src_path, unhashed_dest, timeout, locking=locking)
        else:
            # In this case, the build script will need to deal with unpacking the source
            print(
                "Warning: Unrecognized source format. Source file will be copied to the SRC_DIR"
            )
            copy_into(src_path, unhashed_dest, timeout, locking=locking)
        flist = os.listdir(tmpdir)
        folder = os.path.join(tmpdir, flist[0])
        if len(flist) == 1 and os.path.isdir(folder):
            hoist_single_extracted_folder(folder)
        flist = os.listdir(tmpdir)
        for f in flist:
            shutil.move(os.path.join(tmpdir, f), os.path.join(src_dir, f))
Beispiel #33
0
def create_shell_files(m, test_dir=None):
    if not test_dir:
        test_dir = m.config.test_dir
    has_tests = False
    ext = '.bat' if sys.platform == 'win32' else '.sh'
    name = 'no-file'

    # the way this works is that each output needs to explicitly define a test script to run.
    #   They do not automatically pick up run_test.*, but can be pointed at that explicitly.
    for out in m.meta.get('outputs', []):
        if m.name() == out.get('name'):
            out_test_script = out.get('test', {}).get('script', 'no-file')
            if os.path.splitext(out_test_script)[1].lower() == ext:
                name = out_test_script
                break
    else:
        name = "run_test{}".format(ext)

    if exists(join(m.path, name)):
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(join(m.path, name),
                  test_dir,
                  m.config.timeout,
                  locking=False)
        has_tests = True

    commands = ensure_list(m.get_value('test/commands', []))
    if commands:
        with open(join(test_dir, name), 'a') as f:
            f.write('\n\n')
            if not on_win:
                f.write('set -ex\n\n')
            f.write('\n\n')
            for cmd in commands:
                f.write(cmd)
                f.write('\n')
                if on_win:
                    f.write("if errorlevel 1 exit 1\n")
                has_tests = True
            f.write('exit 0\n')

    return has_tests
Beispiel #34
0
def test_skip_existing_url(test_metadata, testing_workdir, capfd):
    # make sure that it is built
    outputs = api.build(test_metadata)

    # Copy our package into some new folder
    output_dir = os.path.join(testing_workdir, 'someoutput')
    platform = os.path.join(output_dir, test_metadata.config.subdir)
    os.makedirs(platform)
    copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))

    # create the index so conda can find the file
    api.update_index(platform, config=test_metadata.config)

    test_metadata.config.skip_existing = True
    test_metadata.config.channel_urls = [url_path(output_dir)]
    api.build(test_metadata)

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_metadata.config.croot) in output
Beispiel #35
0
def test_new_pkg_format_stat_cache_used(testing_workdir, mocker):
    test_package_path = join(testing_workdir, 'osx-64',
                             'conda-index-pkg-a-1.0-py27h5e241af_0')
    exts = ('.tar.bz2', '.conda')
    for ext in exts:
        copy_into(
            os.path.join(archive_dir,
                         'conda-index-pkg-a-1.0-py27h5e241af_0' + ext),
            test_package_path + ext)
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel')

    # mock the extract function, so that we can assert that it is not called, because the stat cache should exist
    #    if this doesn't work, something about the stat cache is confused.  It's a little convoluted, because
    #    the index has keys for .tar.bz2's, but the stat cache comes from .conda files when they are available
    #    because extracting them is much, much faster.
    cph_extract = mocker.spy(conda_package_handling.api, 'extract')
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel')
    cph_extract.assert_not_called()
Beispiel #36
0
def create_shell_files(m, test_dir=None):
    if not test_dir:
        test_dir = m.config.test_dir
    name = 'no-file'

    win_status = [on_win]

    if m.noarch:
        win_status = [False, True]

    shell_files = []
    for status in win_status:
        name = _get_output_script_name(m, status)
        dest_file = join(test_dir, name)
        if exists(join(m.path, name)):
            # disable locking to avoid locking a temporary directory (the extracted test folder)
            copy_into(join(m.path, name), dest_file, m.config.timeout, locking=False)
        if os.path.basename(test_dir) != 'test_tmp':
            commands = ensure_list(m.get_value('test/commands', []))
            if commands:
                if name == 'no-file':
                    name = 'run_test.{}'.format('bat' if status else 'sh')
                    if status == on_win:
                        dest_file = join(test_dir, name)
                with open(join(test_dir, name), 'a') as f:
                    f.write('\n\n')
                    if not status:
                        f.write('set -ex\n\n')
                    f.write('\n\n')
                    for cmd in commands:
                        f.write(cmd)
                        f.write('\n')
                        if status:
                            f.write("IF %ERRORLEVEL% NEQ 0 exit /B 1\n")
                    if status:
                        f.write('exit /B 0\n')
                    else:
                        f.write('exit 0\n')
        if os.path.isfile(dest_file):
            shell_files.append(dest_file)
    return shell_files
Beispiel #37
0
def remove_easy_install_pth(files, preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    absfiles = [join(config.build_prefix, f) for f in files]
    sp_dir = environ.get_sp_dir()
    for egg_path in glob(join(sp_dir, '*-py*.egg')):
        if isdir(egg_path):
            if preserve_egg_dir or not any(join(egg_path, i) in absfiles for i
                in walk_prefix(egg_path, False, windows_forward_slashes=False)):
                write_pth(egg_path)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(join(egg_path, 'EGG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(join(sp_dir, fn)):
                        utils.copy_into(join(egg_path, fn), join(sp_dir, fn))
                        utils.rm_rf(join(egg_path, fn))
                    else:
                        os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            if not egg_path in absfiles:
                continue
            print('found egg:', egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, 'easy-install.pth'))
def test_skip_existing_url(testing_workdir, test_config, capfd):
    # make sure that it is built
    api.build(empty_sections, config=test_config)
    output_file = os.path.join(test_config.croot, test_config.subdir,
                               "empty_sections-0.0-0.tar.bz2")

    platform = os.path.join(testing_workdir, test_config.subdir)
    copy_into(output_file, os.path.join(platform,
                                        os.path.basename(output_file)))

    # create the index so conda can find the file
    api.update_index(platform, config=test_config)

    api.build(os.path.join(metadata_dir, "empty_sections"),
              skip_existing=True,
              config=test_config,
              channel_urls=[url_path(testing_workdir)])

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_config.croot) in output
Beispiel #39
0
def create_files(m, test_dir=None):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    if not test_dir:
        test_dir = m.config.test_dir
    has_files = False
    if not os.path.isdir(test_dir):
        os.makedirs(test_dir)

    for pattern in ensure_list(m.get_value('test/files', [])):
        has_files = True
        files = glob(join(m.path, pattern))
        for f in files:
            copy_into(f, f.replace(m.path, test_dir), m.config.timeout, locking=False,
                    clobber=True)
    return has_files
def create_shell_files(dir_path, m, config):
    has_tests = False
    if sys.platform == 'win32':
        name = 'run_test.bat'
    else:
        name = 'run_test.sh'

    if exists(join(m.path, name)):
        copy_into(join(m.path, name), dir_path, config.timeout)
        has_tests = True

    with open(join(dir_path, name), 'a') as f:
        f.write('\n\n')
        for cmd in ensure_list(m.get_value('test/commands', [])):
            f.write(cmd)
            f.write('\n')
            if sys.platform == 'win32':
                f.write("if errorlevel 1 exit 1\n")
            has_tests = True

    return has_tests
Beispiel #41
0
def provide(recipe_dir, meta, config, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """

    if not os.path.isdir(config.build_folder):
        os.makedirs(config.build_folder)
    git = None
    if any(k in meta for k in ('fn', 'url')):
        unpack(meta, config=config)
    elif 'git_url' in meta:
        git = git_source(meta, recipe_dir, config=config)
    # build to make sure we have a work directory with source in it.  We want to make sure that
    #    whatever version that is does not interfere with the test we run next.
    elif 'hg_url' in meta:
        hg_source(meta, config=config)
    elif 'svn_url' in meta:
        svn_source(meta, config=config)
    elif 'path' in meta:
        path = normpath(abspath(join(recipe_dir, meta.get('path'))))
        if config.verbose:
            print("Copying %s to %s" % (path, config.work_dir))
        # careful here: we set test path to be outside of conda-build root in setup.cfg.
        #    If you don't do that, this is a recursive function
        copy_into(path, config.work_dir, config.timeout)
    else:  # no source
        if not isdir(config.work_dir):
            os.makedirs(config.work_dir)

    if patch:
        src_dir = config.work_dir
        patches = ensure_list(meta.get('patches', []))
        for patch in patches:
            apply_patch(src_dir, join(recipe_dir, patch), config, git)

    return config.work_dir
Beispiel #42
0
def svn_source(source_dict, src_dir, svn_cache, verbose=True, timeout=90, locking=True):
    ''' Download a source from SVN repo. '''
    if verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn_url = source_dict['svn_url']
    svn_revision = source_dict.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(source_dict.get('svn_ignore_externals') or 'no')
    if not isdir(svn_cache):
        os.makedirs(svn_cache)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(svn_cache, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call_env(['svn', 'up', '-r', svn_revision] + extra_args, cwd=cache_repo,
                       stdout=stdout, stderr=stderr)
    else:
        check_call_env(['svn', 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo],
                       stdout=stdout, stderr=stderr)
        assert isdir(cache_repo)

    # now copy into work directory
    copy_into(cache_repo, src_dir, timeout, symlinks=True, locking=locking)

    if not verbose:
        FNULL.close()

    return src_dir
Beispiel #43
0
def test_no_shared_format_cache(testing_workdir, mocker):
    test_package_path = join(testing_workdir, 'osx-64',
                             'conda-index-pkg-a-1.0-py27h5e241af_0')
    exts = ('.tar.bz2', '.conda')
    for ext in exts:
        copy_into(
            os.path.join(archive_dir,
                         'conda-index-pkg-a-1.0-py27h5e241af_0' + ext),
            test_package_path + ext)
    # mock the extract function, so that we can assert that it is not called
    #     with the .tar.bz2, because the .conda should be preferred
    cph_extract = mocker.spy(conda_package_handling.api, 'extract')
    # debug here uses a single-threaded bypass of the ProcessPool.  Mocking doesn't work otherwise.
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel',
                                   shared_format_cache=False,
                                   debug=True)
    # extract will get called twice.  It's not really safe to assume that .conda files will be the
    #     exact same as .tar.bz2, since they can be uploaded separately.
    cph_extract.assert_any_call(test_package_path + '.conda', mock.ANY, 'info')
    cph_extract.assert_any_call(test_package_path + '.tar.bz2', mock.ANY,
                                'info')
Beispiel #44
0
def unpack(meta, config):
    ''' Uncompress a downloaded source. '''
    src_path = download_to_cache(meta, config)

    if not isdir(config.work_dir):
        os.makedirs(config.work_dir)
    if config.verbose:
        print("Extracting download")
    if src_path.lower().endswith(
        ('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz', '.tar', 'tar.z')):
        tar_xf(src_path, config.work_dir)
    elif src_path.lower().endswith('.zip'):
        unzip(src_path, config.work_dir)
    else:
        # In this case, the build script will need to deal with unpacking the source
        print(
            "Warning: Unrecognized source format. Source file will be copied to the SRC_DIR"
        )
        copy_into(src_path,
                  config.work_dir,
                  config.timeout,
                  locking=config.locking)
Beispiel #45
0
def unpack(meta):
    ''' Uncompress a downloaded source. '''
    src_path = download_to_cache(meta)

    if not isdir(meta.config.work_dir):
        os.makedirs(meta.config.work_dir)
    if meta.config.verbose:
        print("Extracting download")
    if src_path.lower().endswith(('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz',
            '.tar', 'tar.z')):
        tar_xf(src_path, meta.config.work_dir)
    elif src_path.lower().endswith('.zip'):
        unzip(src_path, meta.config.work_dir)
    elif src_path.lower().endswith('.whl'):
        # copy wheel itself *and* unpack it
        # This allows test_files or about.license_file to locate files in the wheel,
        # as well as `pip install name-version.whl` as install command
        unzip(src_path, meta.config.work_dir)
        copy_into(src_path, meta.config.work_dir, meta.config.timeout, locking=meta.config.locking)
    else:
        # In this case, the build script will need to deal with unpacking the source
        print("Warning: Unrecognized source format. Source file will be copied to the SRC_DIR")
        copy_into(src_path, meta.config.work_dir, meta.config.timeout, locking=meta.config.locking)
Beispiel #46
0
def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None):
    files = utils.rec_glob(path, "*")
    file_paths = sorted([f.replace(path + os.sep, "") for f in files])

    # when this actually has a value, we're copying the top-level recipe into a subdirectory,
    #    so that we have record of what parent recipe produced subpackages.
    if destination_subdir:
        dest_dir = join(dest_dir, destination_subdir)
    else:
        # exclude recipe.yaml because the json dictionary captures its content
        file_paths = [
            f for f in file_paths
            if not (f == "recipe.yaml" or f == "conda_build_config.yaml")
        ]
    file_paths = utils.filter_files(file_paths, path)
    for f in file_paths:
        utils.copy_into(
            join(path, f),
            join(dest_dir, f),
            timeout=config.timeout,
            locking=config.locking,
            clobber=True,
        )
Beispiel #47
0
def _extract_test_files_from_package(metadata):
    recipe_dir = (
        metadata.config.recipe_dir
        if hasattr(metadata.config, "recipe_dir")
        else metadata.path
    )
    if recipe_dir:
        info_dir = os.path.normpath(os.path.join(recipe_dir, "info"))
        test_files = os.path.join(info_dir, "test")
        if os.path.exists(test_files) and os.path.isdir(test_files):
            # things are re-extracted into the test dir because that's cwd when tests are run,
            #    and provides the most intuitive experience. This is a little
            #    tricky, because SRC_DIR still needs to point at the original
            #    work_dir, for legacy behavior where people aren't using
            #    test/source_files. It would be better to change SRC_DIR in
            #    test phase to always point to test_dir. Maybe one day.
            utils.copy_into(
                test_files,
                metadata.config.test_dir,
                metadata.config.timeout,
                symlinks=True,
                locking=metadata.config.locking,
                clobber=True,
            )
            dependencies_file = os.path.join(test_files, "test_time_dependencies.json")
            test_deps = []
            if os.path.isfile(dependencies_file):
                with open(dependencies_file) as f:
                    test_deps = json.load(f)
            test_section = metadata.meta.get("test", {})
            test_section["requires"] = test_deps
            metadata.meta["test"] = test_section

        else:
            if metadata.meta.get("test", {}).get("source_files"):
                if not metadata.source_provided:
                    try_download(metadata, no_download_source=False)
Beispiel #48
0
def hoist_single_extracted_folder(nested_folder):
    """Moves all files/folders one level up.

    This is for when your archive extracts into its own folder, so that we don't need to
    know exactly what that folder is called."""
    flist = os.listdir(nested_folder)
    parent = os.path.dirname(nested_folder)
    # only hoist if the parent folder contains ONLY our nested folder
    nested_folders_to_remove = [nested_folder]
    for thing in flist:
        if not os.path.isdir(os.path.join(parent, thing)):
            shutil.move(os.path.join(nested_folder, thing),
                        os.path.join(parent, thing))
        else:
            copy_into(os.path.join(nested_folder, thing),
                      os.path.join(parent, thing))
            nested_folders_to_remove.append(os.path.join(nested_folder, thing))
    # handle nested similar folder names
    fn = os.path.basename(nested_folder)
    if (os.path.join(nested_folder, fn) in nested_folders_to_remove
            and nested_folder in nested_folders_to_remove):
        nested_folders_to_remove.remove(nested_folder)
    for folder in nested_folders_to_remove:
        rm_rf(folder)
Beispiel #49
0
def create_info_files(m, files, config, prefix):
    '''
    Creates the metadata files that will be stored in the built package.

    :param m: Package metadata
    :type m: Metadata
    :param files: Paths to files to include in package
    :type files: list of str
    '''

    copy_recipe(m, config)
    copy_readme(m, config)
    copy_license(m, config)

    mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}

    write_info_json(m, config, mode_dict)
    write_about_json(m, config)

    entry_point_scripts = m.get_value('build/entry_points')

    if is_noarch_python(m):
        noarch_python.create_entry_point_information("python",
                                                     entry_point_scripts,
                                                     config)

    entry_point_script_names = get_entry_point_script_names(
        entry_point_scripts)

    if on_win:
        # make sure we use '/' path separators in metadata
        files = [_f.replace('\\', '/') for _f in files]

    with open(join(config.info_dir, 'files'), **mode_dict) as fo:
        if m.get_value('build/noarch_python'):
            fo.write('\n')
        elif is_noarch_python(m):
            for f in files:
                if f.find("site-packages") > 0:
                    fo.write(f[f.find("site-packages"):] + '\n')
                elif f.startswith("bin") and (f
                                              not in entry_point_script_names):
                    fo.write(f.replace("bin", "python-scripts") + '\n')
                elif f.startswith("Scripts") and (
                        f not in entry_point_script_names):
                    fo.write(f.replace("Scripts", "python-scripts") + '\n')
        else:
            for f in files:
                fo.write(f + '\n')

    detect_and_record_prefix_files(m, files, prefix, config)
    write_no_link(m, config, files)

    if m.get_value('source/git_url'):
        with io.open(join(config.info_dir, 'git'), 'w',
                     encoding='utf-8') as fo:
            source.git_info(config, fo)

    if m.get_value('app/icon'):
        copy_into(join(m.path, m.get_value('app/icon')),
                  join(config.info_dir, 'icon.png'), config.timeout)
Beispiel #50
0
def provide(metadata):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    meta = metadata.get_section('source')
    if not os.path.isdir(metadata.config.build_folder):
        os.makedirs(metadata.config.build_folder)
    git = None

    if hasattr(meta, 'keys'):
        dicts = [meta]
    else:
        dicts = meta

    try:
        for source_dict in dicts:
            folder = source_dict.get('folder')
            src_dir = os.path.join(metadata.config.work_dir,
                                   folder if folder else '')
            if any(k in source_dict for k in ('fn', 'url')):
                unpack(source_dict,
                       src_dir,
                       metadata.config.src_cache,
                       recipe_path=metadata.path,
                       croot=metadata.config.croot,
                       verbose=metadata.config.verbose,
                       timeout=metadata.config.timeout,
                       locking=metadata.config.locking)
            elif 'git_url' in source_dict:
                git = git_source(source_dict,
                                 metadata.config.git_cache,
                                 src_dir,
                                 metadata.path,
                                 verbose=metadata.config.verbose)
            # build to make sure we have a work directory with source in it. We
            #    want to make sure that whatever version that is does not
            #    interfere with the test we run next.
            elif 'hg_url' in source_dict:
                hg_source(source_dict,
                          src_dir,
                          metadata.config.hg_cache,
                          verbose=metadata.config.verbose)
            elif 'svn_url' in source_dict:
                svn_source(source_dict,
                           src_dir,
                           metadata.config.svn_cache,
                           verbose=metadata.config.verbose,
                           timeout=metadata.config.timeout,
                           locking=metadata.config.locking)
            elif 'path' in source_dict:
                source_path = os.path.expanduser(source_dict['path'])
                path = normpath(abspath(join(metadata.path, source_path)))
                path_via_symlink = 'path_via_symlink' in source_dict
                if path_via_symlink and not folder:
                    print(
                        "WARNING: `path_via_symlink` is too dangerous without specifying a folder,\n"
                        "  conda could end up changing - or deleting - your local source code!\n"
                        "  Going to make copies instead. When using `path_via_symlink` you should\n"
                        "  also take care to run the build outside of your local source code folder(s)\n"
                        "  unless that is your intention.")
                    path_via_symlink = False
                    sys.exit(1)
                if path_via_symlink:
                    src_dir_symlink = os.path.dirname(src_dir)
                    if not isdir(src_dir_symlink):
                        os.makedirs(src_dir_symlink)
                    if metadata.config.verbose:
                        print("Creating sybmolic link pointing to %s at %s" %
                              (path, src_dir))
                    os.symlink(path, src_dir)
                else:
                    if metadata.config.verbose:
                        print("Copying %s to %s" % (path, src_dir))
                    # careful here: we set test path to be outside of conda-build root in setup.cfg.
                    #    If you don't do that, this is a recursive function
                    copy_into(path,
                              src_dir,
                              metadata.config.timeout,
                              symlinks=True,
                              locking=metadata.config.locking,
                              clobber=True)
            else:  # no source
                if not isdir(src_dir):
                    os.makedirs(src_dir)

            patches = ensure_list(source_dict.get('patches', []))
            patch_attributes_output = []
            for patch in patches:
                patch_attributes_output += [
                    apply_one_patch(src_dir, metadata.path, patch,
                                    metadata.config, git)
                ]
            _patch_attributes_debug_print(patch_attributes_output)

    except CalledProcessError:
        shutil.move(metadata.config.work_dir,
                    metadata.config.work_dir + '_failed_provide')
        raise

    return metadata.config.work_dir
Beispiel #51
0
def test_copy_source_tree(namespace_setup):
    dst = os.path.join(namespace_setup, 'dest')
    utils.copy_into(os.path.join(namespace_setup, 'namespace'), dst)
    assert os.path.isfile(os.path.join(dst, 'package', 'module.py'))
Beispiel #52
0
def bundle_conda(metadata, initial_files, env, files_selector=None):

    files = post_process_files(metadata, initial_files)

    # first filter is so that info_files does not pick up ignored files
    files = utils.filter_files(files, prefix=metadata.config.host_prefix)
    if files_selector:
        files = select_files(files, files_selector.get("include"),
                             files_selector.get("exclude"))

    console.print(f"\n[yellow]Adding files for {metadata.name()}[/yellow]\n")
    if files:
        for f in sorted(files):
            console.print(f"- {f}")
    else:
        console.print(
            f"[red]ATTENTION: No files added in target [bold]{metadata.name()}[/bold][/red]"
        )
    console.print("\n")

    # this is also copying things like run_test.sh into info/recipe
    utils.rm_rf(os.path.join(metadata.config.info_dir, "test"))

    output = {}

    with tmp_chdir(metadata.config.host_prefix):
        output["checksums"] = create_info_files(
            metadata, files, prefix=metadata.config.host_prefix)

    # here we add the info files into the prefix, so we want to re-collect the files list
    prefix_files = set(utils.prefix_files(metadata.config.host_prefix))
    files = utils.filter_files(prefix_files - initial_files,
                               prefix=metadata.config.host_prefix)
    if files_selector:
        include_files = files_selector.get("include")
        if include_files:
            include_files += ["info/*"]
        files = select_files(files, include_files,
                             files_selector.get("exclude"))

    basename = metadata.dist()
    tmp_archives = []
    final_outputs = []
    ext = ".tar.bz2"
    if output.get(
            "type") == "conda_v2" or metadata.config.conda_pkg_format == "2":
        ext = ".conda"

    with TemporaryDirectory() as tmp:
        conda_package_handling.api.create(metadata.config.host_prefix,
                                          files,
                                          basename + ext,
                                          out_folder=tmp)
        tmp_archives = [os.path.join(tmp, basename + ext)]

        # we're done building, perform some checks
        for tmp_path in tmp_archives:
            #     if tmp_path.endswith('.tar.bz2'):
            #         tarcheck.check_all(tmp_path, metadata.config)
            output_filename = os.path.basename(tmp_path)

            #     # we do the import here because we want to respect logger level context
            #     try:
            #         from conda_verify.verify import Verify
            #     except ImportError:
            #         Verify = None
            #         log.warn("Importing conda-verify failed.  Please be sure to test your packages.  "
            #             "conda install conda-verify to make this message go away.")
            #     if getattr(metadata.config, "verify", False) and Verify:
            #         verifier = Verify()
            #         checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) +
            #                             metadata.ignore_verify_codes())
            #         try:
            #             verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore,
            #                                     exit_on_error=metadata.config.exit_on_verify_error)
            #         except KeyError as e:
            #             log.warn("Package doesn't have necessary files.  It might be too old to inspect."
            #                      "Legacy noarch packages are known to fail.  Full message was {}".format(e))
            try:
                crossed_subdir = metadata.config.target_subdir
            except AttributeError:
                crossed_subdir = metadata.config.host_subdir
            subdir = ("noarch" if (metadata.noarch or metadata.noarch_python)
                      else crossed_subdir)
            if metadata.config.output_folder:
                output_folder = os.path.join(metadata.config.output_folder,
                                             subdir)
            else:
                output_folder = os.path.join(
                    os.path.dirname(metadata.config.bldpkgs_dir), subdir)
            final_output = os.path.join(output_folder, output_filename)
            if os.path.isfile(final_output):
                utils.rm_rf(final_output)

            # disable locking here. It's just a temp folder getting locked.
            # Having it proved a major bottleneck.
            utils.copy_into(tmp_path,
                            final_output,
                            metadata.config.timeout,
                            locking=False)
            final_outputs.append(final_output)

    update_index(os.path.dirname(output_folder),
                 verbose=metadata.config.debug,
                 threads=1)

    # clean out host prefix so that this output's files don't interfere with other outputs
    # We have a backup of how things were before any output scripts ran.  That's
    # restored elsewhere.
    if metadata.config.keep_old_work:
        prefix = metadata.config.host_prefix
        dest = os.path.join(
            os.path.dirname(prefix),
            "_".join(("_h_env_moved", metadata.dist(),
                      metadata.config.host_subdir)),
        )
        console.print("Renaming host env directory, ", prefix, " to ", dest)
        if os.path.exists(dest):
            utils.rm_rf(dest)
        shutil.move(prefix, dest)
    else:
        utils.rm_rf(metadata.config.host_prefix)

    return final_outputs
Beispiel #53
0
def create_info_files(m, files, prefix):
    """
    Creates the metadata files that will be stored in the built package.

    :param m: Package metadata
    :type m: Metadata
    :param files: Paths to files to include in package
    :type files: list of str
    """
    if utils.on_win:
        # make sure we use '/' path separators in metadata
        files = [_f.replace("\\", "/") for _f in files]

    if m.config.filename_hashing:
        write_hash_input(m)

    write_info_json(m)  # actually index.json

    write_about_json(m)
    write_link_json(m)
    write_run_exports(m)

    # TODO
    copy_recipe(m)
    copy_readme(m)
    copy_license(m)
    copy_recipe_log(m)
    # files.extend(jsonify_info_yamls(m))

    create_all_test_files(m, test_dir=join(m.config.info_dir, "test"))
    if m.config.copy_test_source_files:
        copy_test_source_files(m, join(m.config.info_dir, "test"))

    write_info_files_file(m, files)

    files_with_prefix = get_files_with_prefix(m, [], files, prefix)
    record_prefix_files(m, files_with_prefix)
    checksums = create_info_files_json_v1(m, m.config.info_dir, prefix, files,
                                          files_with_prefix)

    # write_no_link(m, files)

    sources = m.get_section("source")
    if hasattr(sources, "keys"):
        sources = [sources]

    with io.open(join(m.config.info_dir, "git"), "w", encoding="utf-8") as fo:
        for src in sources:
            if src.get("git_url"):
                source.git_info(
                    os.path.join(m.config.work_dir, src.get("folder", "")),
                    build_prefix=m.config.build_prefix,
                    verbose=m.config.verbose,
                    fo=fo,
                )

    if m.get_value("app/icon"):
        utils.copy_into(
            join(m.path, m.get_value("app/icon")),
            join(m.config.info_dir, "icon.png"),
            m.config.timeout,
            locking=m.config.locking,
        )

    return checksums
Beispiel #54
0
def test_new_pkg_format_preferred(testing_workdir, mocker):
    """Test that in one pass, the .conda file is extracted before the .tar.bz2, and the .tar.bz2 uses the cache"""
    test_package_path = join(testing_workdir, 'osx-64',
                             'conda-index-pkg-a-1.0-py27h5e241af_0')
    exts = ('.tar.bz2', '.conda')
    for ext in exts:
        copy_into(
            os.path.join(archive_dir,
                         'conda-index-pkg-a-1.0-py27h5e241af_0' + ext),
            test_package_path + ext)
    # mock the extract function, so that we can assert that it is not called
    #     with the .tar.bz2, because the .conda should be preferred
    cph_extract = mocker.spy(conda_package_handling.api, 'extract')
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel',
                                   debug=True)
    # extract should get called once by default.  Within a channel, we assume that a .tar.bz2 and .conda have the same contents.
    cph_extract.assert_called_once_with(test_package_path + '.conda', mock.ANY,
                                        'info')

    with open(join(testing_workdir, 'osx-64', 'repodata.json')) as fh:
        actual_repodata_json = json.loads(fh.read())

    expected_repodata_json = {
        "info": {
            'subdir': 'osx-64',
        },
        "packages": {
            "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": {
                "build": "py27h5e241af_0",
                "build_number": 0,
                "depends": ["python >=2.7,<2.8.0a0"],
                "license": "BSD",
                "md5": "37861df8111170f5eed4bff27868df59",
                "name": "conda-index-pkg-a",
                "sha256":
                "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30",
                "size": 8733,
                "subdir": "osx-64",
                "timestamp": 1508520039632,
                "version": "1.0",
            },
        },
        "packages.conda": {
            "conda-index-pkg-a-1.0-py27h5e241af_0.conda": {
                "build": "py27h5e241af_0",
                "build_number": 0,
                "depends": ["python >=2.7,<2.8.0a0"],
                "license": "BSD",
                "md5": "4ed4b435f400dac1aabdc1fff06f78ff",
                "name": "conda-index-pkg-a",
                "sha256":
                "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19",
                "size": 9296,
                "subdir": "osx-64",
                "timestamp": 1508520039632,
                "version": "1.0",
            },
        },
        "removed": [],
        "repodata_version": 1,
    }
    assert actual_repodata_json == expected_repodata_json

    # if we clear the stat cache, we force a re-examination.  This re-examination will load files
    #     from the cache.  This has been a source of bugs in the past, where the wrong cached file
    #     being loaded resulted in incorrect hashes/sizes for either the .tar.bz2 or .conda, depending
    #     on which of those 2 existed in the cache.
    rm_rf(os.path.join(testing_workdir, 'osx-64', 'stat.json'))
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel',
                                   debug=True)

    with open(join(testing_workdir, 'osx-64', 'repodata.json')) as fh:
        actual_repodata_json = json.loads(fh.read())

    assert actual_repodata_json == expected_repodata_json
Beispiel #55
0
def provide(metadata):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    meta = metadata.get_section('source')
    if not os.path.isdir(metadata.config.build_folder):
        os.makedirs(metadata.config.build_folder)
    git = None

    if hasattr(meta, 'keys'):
        dicts = [meta]
    else:
        dicts = meta

    try:
        for source_dict in dicts:
            folder = source_dict.get('folder')
            src_dir = (os.path.join(metadata.config.work_dir, folder) if folder else
                    metadata.config.work_dir)
            if any(k in source_dict for k in ('fn', 'url')):
                unpack(source_dict, src_dir, metadata.config.src_cache, recipe_path=metadata.path,
                    croot=metadata.config.croot, verbose=metadata.config.verbose,
                    timeout=metadata.config.timeout, locking=metadata.config.locking)
            elif 'git_url' in source_dict:
                git = git_source(source_dict, metadata.config.git_cache, src_dir, metadata.path,
                                verbose=metadata.config.verbose)
            # build to make sure we have a work directory with source in it. We
            #    want to make sure that whatever version that is does not
            #    interfere with the test we run next.
            elif 'hg_url' in source_dict:
                hg_source(source_dict, src_dir, metadata.config.hg_cache,
                        verbose=metadata.config.verbose)
            elif 'svn_url' in source_dict:
                svn_source(source_dict, src_dir, metadata.config.svn_cache,
                        verbose=metadata.config.verbose, timeout=metadata.config.timeout,
                        locking=metadata.config.locking)
            elif 'path' in source_dict:
                source_path = os.path.expanduser(source_dict['path'])
                path = normpath(abspath(join(metadata.path, source_path)))
                if metadata.config.verbose:
                    print("Copying %s to %s" % (path, src_dir))
                # careful here: we set test path to be outside of conda-build root in setup.cfg.
                #    If you don't do that, this is a recursive function
                copy_into(path, src_dir, metadata.config.timeout, symlinks=True,
                        locking=metadata.config.locking, clobber=True)
            else:  # no source
                if not isdir(src_dir):
                    os.makedirs(src_dir)

            patches = ensure_list(source_dict.get('patches', []))
            recipe_dir = (metadata.path or
                          metadata.meta.get('extra', {}).get('parent_recipe', {}).get('path', ''))
            for patch in patches:
                apply_patch(src_dir, join(recipe_dir, patch), metadata.config, git)

    except CalledProcessError:
        shutil.move(metadata.config.work_dir, metadata.config.work_dir + '_failed_provide')
        raise

    return metadata.config.work_dir
Beispiel #56
0
def build(m,
          config,
          post=None,
          need_source_download=True,
          need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(),
                  "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print(
                "    (actual version deferred until further download or env creation)"
            )

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(
                external.find_executable(vcs_executable, config.build_prefix)
                or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or not any(
                        spec.startswith('python') and "3." in spec
                        for spec in specs)):
                    specs.append(vcs_source)

                    log.warn(
                        "Your recipe depends on %s at build time (for templates), "
                        "but you have not listed it as a build dependency.  Doing "
                        "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError(
                        "Your recipe uses mercurial in build, but mercurial"
                        " does not yet support Python 3.  Please handle all of "
                        "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = source.get_dir(config)
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files",
                        pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(source.get_dir(config),
                                         'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(
                                        conda_root=root_script_dir +
                                        os.path.sep,
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file,
                                          config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)
        create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            meta_files = (tuple(
                f for f in files2 - files1
                if config.meta_dir in join(config.build_prefix, f)), )
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m,
                   sorted(files2 - files1),
                   prefix=config.build_prefix,
                   build_python=config.build_python,
                   croot=config.croot)
        create_info_files(m,
                          sorted(files2 - files1),
                          config=config,
                          prefix=config.build_prefix)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1),
                                    config.build_prefix)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True
Beispiel #57
0
def _construct_metadata_for_test_from_package(package, config):
    recipe_dir, need_cleanup = utils.get_recipe_abspath(package)
    config.need_cleanup = need_cleanup
    config.recipe_dir = recipe_dir
    hash_input = {}

    info_dir = os.path.normpath(os.path.join(recipe_dir, "info"))
    with open(os.path.join(info_dir, "index.json")) as f:
        package_data = json.load(f)

    if package_data["subdir"] != "noarch":
        config.host_subdir = package_data["subdir"]
    # We may be testing an (old) package built without filename hashing.
    hash_input = os.path.join(info_dir, "hash_input.json")
    if os.path.isfile(hash_input):
        with open(os.path.join(info_dir, "hash_input.json")) as f:
            hash_input = json.load(f)
    else:
        config.filename_hashing = False
        hash_input = {}
    # not actually used as a variant, since metadata will have been finalized.
    #    This is still necessary for computing the hash correctly though
    config.variant = hash_input
    log = utils.get_logger(__name__)

    # get absolute file location
    local_pkg_location = os.path.normpath(os.path.abspath(os.path.dirname(package)))

    # get last part of the path
    last_element = os.path.basename(local_pkg_location)
    is_channel = False
    for platform in ("win-", "linux-", "osx-", "noarch"):
        if last_element.startswith(platform):
            is_channel = True

    if not is_channel:
        log.warn(
            "Copying package to conda-build croot.  No packages otherwise alongside yours will"
            " be available unless you specify -c local.  To avoid this warning, your package "
            "must reside in a channel structure with platform-subfolders.  See more info on "
            "what a valid channel is at "
            "https://conda.io/docs/user-guide/tasks/create-custom-channels.html"
        )

        local_dir = os.path.join(config.croot, config.host_subdir)
        mkdir_p(local_dir)
        local_pkg_location = os.path.join(local_dir, os.path.basename(package))
        utils.copy_into(package, local_pkg_location)
        local_pkg_location = local_dir

    local_channel = os.path.dirname(local_pkg_location)

    # update indices in the channel
    update_index(local_channel, verbose=config.debug, threads=1)

    try:
        # raise IOError()
        # metadata = render_recipe(
        #     os.path.join(info_dir, "recipe"), config=config, reset_build_id=False
        # )[0][0]

        metadata = get_metadata(os.path.join(info_dir, "recipe", "recipe.yaml"), config)
        # with open(os.path.join(info_dir, "recipe", "recipe.yaml")) as fi:
        # metadata = yaml.load(fi)
    # no recipe in package.  Fudge metadata
    except SystemExit:
        # force the build string to line up - recomputing it would
        #    yield a different result
        metadata = MetaData.fromdict(
            {
                "package": {
                    "name": package_data["name"],
                    "version": package_data["version"],
                },
                "build": {
                    "number": int(package_data["build_number"]),
                    "string": package_data["build"],
                },
                "requirements": {"run": package_data["depends"]},
            },
            config=config,
        )
    # HACK: because the recipe is fully baked, detecting "used" variables no longer works.  The set
    #     of variables in the hash_input suffices, though.

    if metadata.noarch:
        metadata.config.variant["target_platform"] = "noarch"

    metadata.config.used_vars = list(hash_input.keys())
    urls = list(utils.ensure_list(metadata.config.channel_urls))
    local_path = url_path(local_channel)
    # replace local with the appropriate real channel.  Order is maintained.
    urls = [url if url != "local" else local_path for url in urls]
    if local_path not in urls:
        urls.insert(0, local_path)
    metadata.config.channel_urls = urls
    utils.rm_rf(metadata.config.test_dir)
    return metadata, hash_input
Beispiel #58
0
def test_new_pkg_format_stat_cache_used(testing_workdir, mocker):
    # if we have old .tar.bz2 index cache stuff, assert that we pick up correct md5, sha26 and size for .conda
    test_package_path = join(testing_workdir, 'osx-64',
                             'conda-index-pkg-a-1.0-py27h5e241af_0')
    copy_into(
        os.path.join(archive_dir,
                     'conda-index-pkg-a-1.0-py27h5e241af_0' + '.tar.bz2'),
        test_package_path + '.tar.bz2')
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel')

    # mock the extract function, so that we can assert that it is not called, because the stat cache should exist
    #    if this doesn't work, something about the stat cache is confused.  It's a little convoluted, because
    #    the index has keys for .tar.bz2's, but the stat cache comes from .conda files when they are available
    #    because extracting them is much, much faster.
    copy_into(
        os.path.join(archive_dir,
                     'conda-index-pkg-a-1.0-py27h5e241af_0' + '.conda'),
        test_package_path + '.conda')
    cph_extract = mocker.spy(conda_package_handling.api, 'extract')
    conda_build.index.update_index(testing_workdir,
                                   channel_name='test-channel',
                                   debug=True)
    cph_extract.assert_not_called()

    with open(join(testing_workdir, 'osx-64', 'repodata.json')) as fh:
        actual_repodata_json = json.loads(fh.read())

    expected_repodata_json = {
        "info": {
            'subdir': 'osx-64',
        },
        "packages": {
            "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": {
                "build": "py27h5e241af_0",
                "build_number": 0,
                "depends": ["python >=2.7,<2.8.0a0"],
                "license": "BSD",
                "md5": "37861df8111170f5eed4bff27868df59",
                "name": "conda-index-pkg-a",
                "sha256":
                "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30",
                "size": 8733,
                "subdir": "osx-64",
                "timestamp": 1508520039632,
                "version": "1.0",
            },
        },
        "packages.conda": {
            "conda-index-pkg-a-1.0-py27h5e241af_0.conda": {
                "build": "py27h5e241af_0",
                "build_number": 0,
                "depends": ["python >=2.7,<2.8.0a0"],
                "license": "BSD",
                "md5": "4ed4b435f400dac1aabdc1fff06f78ff",
                "name": "conda-index-pkg-a",
                "sha256":
                "67b07b644105439515cc5c8c22c86939514cacf30c8c574cd70f5f1267a40f19",
                "size": 9296,
                "subdir": "osx-64",
                "timestamp": 1508520039632,
                "version": "1.0",
            },
        },
        "removed": [],
        "repodata_version": 1,
    }
    assert actual_repodata_json == expected_repodata_json
Beispiel #59
0
def copy_license(m, config):
    license_file = m.get_value('about/license_file')
    if license_file:
        copy_into(join(source.get_dir(config), license_file),
                  join(config.info_dir, 'LICENSE.txt'), config.timeout)