示例#1
0
    def _get_hash_contents(self):
        sections = ['source', 'requirements', 'build']
        # make a copy of values, so that no sorting occurs in place
        composite = HashableDict({section: copy.copy(self.get_section(section))
                                  for section in sections})
        outputs = self.get_section('outputs')
        if outputs:
            outs = []
            for out in outputs:
                out = copy.copy(out)
                # files are dynamically determined, and there's no way to match them at render time.
                #    we need to exclude them from the hash.
                if 'files' in out:
                    del out['files']
                outs.append(out)
            composite.update({'outputs': [HashableDict(out) for out in outs]})

        # filter build requirements for ones that should not be in the hash
        requirements = composite.get('requirements', {})
        build_reqs = requirements.get('build', [])
        excludes = self.config.variant.get('exclude_from_build_hash', [])
        if excludes:
            exclude_pattern = re.compile('|'.join('{}[\s$]?.*'.format(exc) for exc in excludes))
            build_reqs = [req for req in build_reqs if not exclude_pattern.match(req)]
        requirements['build'] = build_reqs
        composite['requirements'] = requirements

        # remove the build number from the hash, so that we can bump it without changing the hash
        if 'number' in composite['build']:
            del composite['build']['number']
        # remove the build string, so that hashes don't affect themselves
        if 'string' in composite['build']:
            del composite['build']['string']
        if not composite['build']:
            del composite['build']
        for key in 'build', 'run':
            if key in composite['requirements'] and not composite['requirements'].get(key):
                del composite['requirements'][key]
        trim_empty_keys(composite)
        file_paths = []

        if self.path:
            recorded_input_files = os.path.join(self.path, '..', 'hash_input_files')
            if os.path.exists(recorded_input_files):
                with open(recorded_input_files) as f:
                    file_paths = f.read().splitlines()
            else:
                files = utils.rec_glob(self.path, "*")
                file_paths = sorted([f.replace(self.path + os.sep, '') for f in files])
                # exclude meta.yaml and meta.yaml.template, because the json dictionary captures
                #    their content
                file_paths = [f for f in file_paths if not f.startswith('meta.yaml')]
                file_paths = sorted(filter_files(file_paths, self.path))

        return composite, file_paths
示例#2
0
def find_recipe(path):
    """recurse through a folder, locating meta.yaml.  Raises error if more than one is found.

    Returns folder containing meta.yaml, to be built.

    If we have a base level meta.yaml and other supplemental ones, use that first"""
    results = rec_glob(path, ["meta.yaml", "conda.yaml"])
    if len(results) > 1:
        base_recipe = os.path.join(path, "meta.yaml")
        if base_recipe in results:
            return base_recipe
        else:
            raise IOError("More than one meta.yaml files found in %s" % path)
    elif not results:
        raise IOError("No meta.yaml or conda.yaml files found in %s" % path)
    return results[0]
示例#3
0
def find_recipe(path):
    """recurse through a folder, locating meta.yaml.  Raises error if more than one is found.

    Returns folder containing meta.yaml, to be built.

    If we have a base level meta.yaml and other supplemental ones, use that first"""
    results = rec_glob(path, ["meta.yaml", "conda.yaml"])
    if len(results) > 1:
        base_recipe = os.path.join(path, "meta.yaml")
        if base_recipe in results:
            return base_recipe
        else:
            raise IOError("More than one meta.yaml files found in %s" % path)
    elif not results:
        raise IOError("No meta.yaml or conda.yaml files found in %s" % path)
    return results[0]
示例#4
0
def relink_sharedobjects(pkg_path, build_prefix):
    '''
    invokes functions in post module to relink to libraries in conda env

    :param pkg_path: look for shared objects to relink in pkg_path
    :param build_prefix: path to conda environment which contains lib/. to find
        runtime libraries.

    .. note:: develop mode builds the extensions in place and makes a link to
        package in site-packages/. The build_prefix points to conda environment
        since runtime libraries should be loaded from environment's lib/. first
    '''
    # find binaries in package dir and make them relocatable
    bin_files = rec_glob(pkg_path, ['.so'])
    for b_file in bin_files:
        if sys.platform == 'darwin':
            mk_relative_osx(b_file, build_prefix)
        else:
            print("Nothing to do on Linux or Windows.")
示例#5
0
def relink_sharedobjects(pkg_path, build_prefix):
    '''
    invokes functions in post module to relink to libraries in conda env

    :param pkg_path: look for shared objects to relink in pkg_path
    :param build_prefix: path to conda environment which contains lib/. to find
        runtime libraries.

    .. note:: develop mode builds the extensions in place and makes a link to
        package in site-packages/. The build_prefix points to conda environment
        since runtime libraries should be loaded from environment's lib/. first
    '''
    # find binaries in package dir and make them relocatable
    bin_files = rec_glob(pkg_path, ['.so'])
    for b_file in bin_files:
        if sys.platform == 'darwin':
            mk_relative_osx(b_file, build_prefix)
        else:
            print("Nothing to do on Linux or Windows.")
示例#6
0
def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None):
    files = utils.rec_glob(path, "*")
    file_paths = sorted([f.replace(path + os.sep, "") for f in files])

    # when this actually has a value, we're copying the top-level recipe into a subdirectory,
    #    so that we have record of what parent recipe produced subpackages.
    if destination_subdir:
        dest_dir = join(dest_dir, destination_subdir)
    else:
        # exclude recipe.yaml because the json dictionary captures its content
        file_paths = [
            f for f in file_paths
            if not (f == "recipe.yaml" or f == "conda_build_config.yaml")
        ]
    file_paths = utils.filter_files(file_paths, path)
    for f in file_paths:
        utils.copy_into(
            join(path, f),
            join(dest_dir, f),
            timeout=config.timeout,
            locking=config.locking,
            clobber=True,
        )
示例#7
0
def get_static_lib_exports_dumpbin(filename):
    '''
    > dumpbin /SYMBOLS /NOLOGO C:\msys64\mingw64\lib\libasprintf.a
    > C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Tools\MSVC\14.20.27508\bin\Hostx64\x64\dumpbin.exe
    > 020 00000000 UNDEF  notype ()    External     | malloc
    > vs
    > 004 00000010 SECT1  notype ()    External     | _ZN3gnu11autosprintfC1EPKcz
    '''
    dumpbin_exe = find_executable('dumpbin')
    if not dumpbin_exe:
        '''
        Oh the fun:
        https://stackoverflow.com/questions/41106407/programmatically-finding-the-vs2017-installation-directory
        Nice to see MS avoiding the Windows Registry though, took them a while! Still, let's ignore that, we just
        want a good dumpbin!
        '''
        pfx86 = os.environ['PROGRAMFILES(X86)']
        programs = [
            p for p in os.listdir(pfx86)
            if p.startswith("Microsoft Visual Studio")
        ]
        results = []
        for p in programs:
            from conda_build.utils import rec_glob
            dumpbin = rec_glob(os.path.join(pfx86, p), ("dumpbin.exe", ))
            for result in dumpbin:
                try:
                    out, _ = Popen([result, filename],
                                   shell=False,
                                   stdout=PIPE).communicate()
                    lines = out.decode('utf-8').splitlines()
                    version = lines[0].split(' ')[-1]
                    results.append((result, version))
                except:
                    pass
        from conda_build.conda_interface import VersionOrder
        results = sorted(results, key=lambda x: VersionOrder(x[1]))
        dumpbin_exe = results[-1][0]
    if not dumpbin_exe:
        return None
    flags = ['/NOLOGO']
    exports = []
    for flag in ('/SYMBOLS', '/EXPORTS'):
        try:
            out, _ = Popen([dumpbin_exe] + flags + [flag] + [filename],
                           shell=False,
                           stdout=PIPE).communicate()
            results = out.decode('utf-8').splitlines()
            if flag == '/EXPORTS':
                exports.extend([
                    r.split(' ')[-1] for r in results
                    if r.startswith('                  ')
                ])
            else:
                exports.extend([
                    r.split(' ')[-1] for r in results
                    if ('External ' in r and 'UNDEF ' not in r)
                ])
        except OSError:
            # nm may not be available or have the correct permissions, this
            # should not cause a failure, see gh-3287
            print('WARNING: nm: failed to get_exports({})'.format(filename))
            exports = None
    exports.sort()
    return exports
示例#8
0
def test_rec_glob():
    with _generate_tmp_tree() as (tmp, _, (f1, f2, f3, f4)):
        assert sorted(utils.rec_glob(tmp, "fileA")) == [f1, f3]
        assert sorted(utils.rec_glob(tmp, ("fileA", "fileB"),
                                     ignores="dirB")) == [f3, f4]
        assert sorted(utils.rec_glob(tmp, "fileB", ignores=("dirC", ))) == [f2]