コード例 #1
0
def test_multiple_different_sources(testing_metadata):
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f2', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}]
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'README.md'))
コード例 #2
0
def test_git_into_existing_populated_folder_raises(testing_metadata):
    """Git will not clone into a non-empty folder.  This should raise an exception."""
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f1', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}]
    with pytest.raises(subprocess.CalledProcessError):
        source.provide(testing_metadata)
コード例 #3
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_git_into_existing_populated_folder_raises(testing_metadata):
    """Git will not clone into a non-empty folder.  This should raise an exception."""
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f1', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}]
    with pytest.raises(subprocess.CalledProcessError):
        source.provide(testing_metadata)
コード例 #4
0
def test_append_hash_to_fn(testing_metadata, caplog):
    relative_zip = 'testfn.zip'
    assert source.append_hash_to_fn(relative_zip, '123') == 'testfn_123.zip'
    relative_tar_gz = 'testfn.tar.gz'
    assert source.append_hash_to_fn(relative_tar_gz,
                                    '123') == 'testfn_123.tar.gz'
    absolute_zip = '/abc/testfn.zip'
    assert source.append_hash_to_fn(absolute_zip,
                                    '123') == '/abc/testfn_123.zip'
    absolute_tar_gz = '/abc/testfn.tar.gz'
    assert source.append_hash_to_fn(absolute_tar_gz,
                                    '123') == '/abc/testfn_123.tar.gz'
    absolute_win_zip = 'C:\\abc\\testfn.zip'
    assert source.append_hash_to_fn(absolute_win_zip,
                                    '123') == 'C:\\abc\\testfn_123.zip'
    absolute_win_tar_gz = 'C:\\abc\\testfn.tar.gz'
    assert source.append_hash_to_fn(absolute_win_tar_gz,
                                    '123') == 'C:\\abc\\testfn_123.tar.gz'
    relative_whl = 'setuptools-36.4.0-py2.py3-none-any.whl'
    assert source.append_hash_to_fn(
        relative_whl, '123') == 'setuptools-36.4.0-py2.py3-none-any_123.whl'

    testing_metadata.meta['source'] = [{
        'folder':
        'f1',
        'url':
        os.path.join(thisdir, 'archives', 'a.tar.bz2')
    }]
    reset_deduplicator()
    source.provide(testing_metadata)
コード例 #5
0
def create_files(dir_path, m, config):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        copy_into(path, join(dir_path, fn), config.timeout)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(config.work_dir):
        source.provide(m.path, m.get_section('source'), config=config)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                               "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(config.work_dir, pattern))
        if not files:
            raise RuntimeError(
                "Did not find any source_files for test with pattern %s",
                pattern)
        for f in files:
            copy_into(f, f.replace(config.work_dir, config.test_dir),
                      config.timeout)
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(config.test_dir, ext):
                os.remove(f)
    return has_files
コード例 #6
0
ファイル: render.py プロジェクト: saamc/conda-build
def parse_or_try_download(metadata,
                          no_download_source,
                          verbose,
                          force_download=False,
                          dirty=False):

    if (force_download
            or (not no_download_source and has_vcs_metadata(metadata))):
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            if not dirty:
                source.provide(metadata.path,
                               metadata.get_section('source'),
                               verbose=verbose)
            metadata.parse_again(permit_undefined_jinja=False)
            need_source_download = False
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                  "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)
            need_source_download = True
    else:
        # we have not downloaded source in the render phase.  Download it in
        #     the build phase
        need_source_download = True
    metadata.parse_again(permit_undefined_jinja=False)
    return metadata, need_source_download
コード例 #7
0
def create_files(dir_path, m, config):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        copy_into(path, join(dir_path, fn), config.timeout)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(config.work_dir):
        source.provide(m.path, m.get_section('source'), config=config)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                                "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(config.work_dir, pattern))
        if not files:
            raise RuntimeError("Did not find any source_files for test with pattern %s", pattern)
        for f in files:
            copy_into(f, f.replace(config.work_dir, config.test_dir), config.timeout)
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(config.test_dir, ext):
                os.remove(f)
    return has_files
コード例 #8
0
ファイル: main_build.py プロジェクト: minrk/conda-build
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile, join

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import croot
    from conda_build.metadata import MetaData

    check_external()

    with Locked(croot):
        for arg in args.recipe:
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                build.build(m)
                if not args.notest:
                    build.test(m)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
コード例 #9
0
def try_download(metadata, no_download_source, force_download=False):
    need_source_download = True
    if (force_download
            or (not no_download_source and metadata.needs_source_for_render)):
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            if not metadata.config.dirty or len(
                    os.listdir(metadata.config.work_dir)) == 0:
                source.provide(metadata)
            if not metadata.get_section('source') or len(
                    os.listdir(metadata.config.work_dir)) > 0:
                need_source_download = False
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                  "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)

    elif not metadata.get_section('source'):
        need_source_download = False

    if need_source_download and no_download_source:
        raise ValueError(
            "no_download_source specified, but can't fully render recipe without"
            " downloading source.  Please fix the recipe, or don't use "
            "no_download_source.")
コード例 #10
0
ファイル: render.py プロジェクト: patricksnape/conda-build
def parse_or_try_download(metadata, no_download_source, config,
                          force_download=False):

    need_reparse_in_env = True
    need_source_download = True
    if (force_download or (not no_download_source and metadata.needs_source_for_render)):
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            if not config.dirty or len(os.listdir(config.work_dir)) == 0:
                source.provide(metadata, config=config)
            if not metadata.get_section('source') or len(os.listdir(config.work_dir)) > 0:
                need_source_download = False
            try:
                metadata.parse_again(config=config, permit_undefined_jinja=False)
                need_reparse_in_env = False
            except (ImportError, exceptions.UnableToParseMissingSetuptoolsDependencies):
                pass  # we just don't alter the need_reparse_in_env variable
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)

    elif not metadata.get_section('source'):
        need_source_download = False
        need_reparse_in_env = False
    if not need_reparse_in_env:
        try:
            metadata.parse_until_resolved(config=config)
        except exceptions.UnableToParseMissingSetuptoolsDependencies:
            need_reparse_in_env = True
    if metadata.get_value('build/noarch'):
        config.noarch = True
    return metadata, need_source_download, need_reparse_in_env
コード例 #11
0
ファイル: test_source.py プロジェクト: timsnyder/conda-build
def test_multiple_different_sources(testing_metadata):
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f2', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}]
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'README.md'))
コード例 #12
0
def test_git_repo_with_single_subdir_does_not_enter_subdir(testing_metadata):
    """Regression test for https://github.com/conda/conda-build/issues/1910 """
    testing_metadata.meta['source'] = {
        'git_url': 'https://github.com/conda/conda_build_single_folder_test'
    }
    source.provide(testing_metadata)
    assert os.path.basename(testing_metadata.config.work_dir) != 'one_folder'
コード例 #13
0
ファイル: test_source.py プロジェクト: conda/conda-build
def test_alternative_url_no_fn(testing_metadata):
    testing_metadata.meta['source'] = {'url': [
        os.path.join(thisdir, 'archives', 'a.tar.bz2'),
        os.path.join(thisdir, 'archives', 'a.tar.bz2'),
    ]}
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'a'))
コード例 #14
0
def test_multiple_different_sources(testing_metadata):
    testing_metadata.meta['source'] = [{
        'folder':
        'f1',
        'url':
        os.path.join(thisdir, 'archives', 'a.tar.bz2')
    }, {
        'folder':
        'f2',
        'git_url':
        'https://github.com/conda/conda_build_test_recipe'
    }]
    source.provide(testing_metadata)
    assert os.path.exists(
        os.path.join(testing_metadata.config.work_dir, 'f1', 'a'))
    assert os.path.exists(
        os.path.join(testing_metadata.config.work_dir, 'f2', 'README.md'))

    # Test get_value() indexing syntax.
    assert testing_metadata.get_value(
        'source/url') == testing_metadata.meta['source'][0]['url']
    assert testing_metadata.get_value(
        'source/0/url') == testing_metadata.meta['source'][0]['url']
    assert (testing_metadata.get_value('source/1/git_url') ==
            testing_metadata.meta['source'][1]['git_url'])
コード例 #15
0
ファイル: test_source.py プロジェクト: gabm/conda-build
def test_append_hash_to_fn(testing_metadata, caplog):
    relative_zip = 'testfn.zip'
    assert source.append_hash_to_fn(relative_zip, '123') == 'testfn_123.zip'
    relative_tar_gz = 'testfn.tar.gz'
    assert source.append_hash_to_fn(relative_tar_gz,
                                    '123') == 'testfn_123.tar.gz'
    absolute_zip = '/abc/testfn.zip'
    assert source.append_hash_to_fn(absolute_zip,
                                    '123') == '/abc/testfn_123.zip'
    absolute_tar_gz = '/abc/testfn.tar.gz'
    assert source.append_hash_to_fn(absolute_tar_gz,
                                    '123') == '/abc/testfn_123.tar.gz'
    absolute_win_zip = 'C:\\abc\\testfn.zip'
    assert source.append_hash_to_fn(absolute_win_zip,
                                    '123') == 'C:\\abc\\testfn_123.zip'
    absolute_win_tar_gz = 'C:\\abc\\testfn.tar.gz'
    assert source.append_hash_to_fn(absolute_win_tar_gz,
                                    '123') == 'C:\\abc\\testfn_123.tar.gz'

    testing_metadata.meta['source'] = [{
        'folder':
        'f1',
        'url':
        os.path.join(thisdir, 'archives', 'a.tar.bz2')
    }]
    source.provide(testing_metadata)
コード例 #16
0
ファイル: create_test.py プロジェクト: www3838438/conda-build
def create_files(m, test_dir=None):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    if not test_dir:
        test_dir = m.config.test_dir
    has_files = False
    rm_rf(test_dir)
    if not os.path.isdir(test_dir):
        os.makedirs(test_dir)
    info_test_dir = os.path.join(os.path.dirname(m.path), 'test')
    if re.search("info[\\\\/]recipe$",
                 m.path) and os.path.isdir(info_test_dir):
        src_dir = info_test_dir
    else:
        src_dir = m.config.work_dir

    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(path,
                  join(test_dir, fn),
                  m.config.timeout,
                  locking=False,
                  clobber=True)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files'
                   ) and not isdir(src_dir) or not os.listdir(src_dir):
        source.provide(m)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                               "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(src_dir, pattern))
        if not files:
            raise RuntimeError(
                "Did not find any source_files for test with pattern %s",
                pattern)
        for f in files:
            try:
                # disable locking to avoid locking a temporary directory (the extracted test folder)
                copy_into(f,
                          f.replace(src_dir, test_dir),
                          m.config.timeout,
                          locking=False)
            except OSError as e:
                log = logging.getLogger(__name__)
                log.warn("Failed to copy {0} into test files.  Error was: {1}".
                         format(f, str(e)))
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(test_dir, ext):
                os.remove(f)
    return has_files
コード例 #17
0
ファイル: api.py プロジェクト: ESSS/conda-build
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs):
    import os
    from conda_build.conda_interface import url_path
    from conda_build.build import test
    from conda_build.render import render_recipe
    from conda_build.utils import get_recipe_abspath, rm_rf
    from conda_build import source

    config = get_or_merge_config(config, **kwargs)

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    is_package = False

    if hasattr(recipedir_or_package_or_metadata, 'config'):
        metadata = recipedir_or_package_or_metadata
        recipe_config = metadata.config
    else:
        recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata)
        config.need_cleanup = need_cleanup

        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided
        metadata, _, _ = render_recipe(recipe_dir, config=config)
        recipe_config = config
        # this recipe came from an extracted tarball.
        if need_cleanup:
            # ensure that the local location of the package is indexed, so that conda can find the
            #    local package
            local_location = os.path.dirname(recipedir_or_package_or_metadata)
            # strip off extra subdir folders
            for platform in ('win', 'linux', 'osx'):
                if os.path.basename(local_location).startswith(platform + "-"):
                    local_location = os.path.dirname(local_location)
            update_index(local_location, config=config)
            local_url = url_path(local_location)
            # channel_urls is an iterable, but we don't know if it's a tuple or list.  Don't know
            #    how to add elements.
            recipe_config.channel_urls = list(recipe_config.channel_urls)
            recipe_config.channel_urls.insert(0, local_url)
            is_package = True
            if metadata.meta.get('test') and metadata.meta['test'].get('source_files'):
                source.provide(metadata.path, metadata.get_section('source'), config=config)
            rm_rf(recipe_dir)

    with recipe_config:
        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided

        recipe_config.compute_build_id(metadata.name())
        test_result = test(metadata, config=recipe_config, move_broken=move_broken)

        if (test_result and is_package and hasattr(recipe_config, 'output_folder') and
                recipe_config.output_folder):
            os.rename(recipedir_or_package_or_metadata,
                      os.path.join(recipe_config.output_folder,
                                   os.path.basename(recipedir_or_package_or_metadata)))
    return test_result
コード例 #18
0
def main():
    metadata = MetaData(os.environ["RECIPE_DIR"])
    build_id = os.getcwd().split(os.path.sep)[-3]
    print "build_id:", build_id
    for name, section in metadata.get_section("extra").items():
        source.provide(
            Source(section),
            config.Config(build_id=build_id))
コード例 #19
0
def test_multiple_url_sources_into_same_folder(testing_metadata):
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'b.tar.bz2')}]
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'b'))
コード例 #20
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_extract_tarball_with_subfolders_moves_files(testing_metadata):
    """Ensure that tarballs that contain only a single folder get their contents
    hoisted up one level"""
    testing_metadata.meta['source'] = {
        'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')}
    source.provide(testing_metadata)
    assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'abc'))
コード例 #21
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_multiple_url_sources_into_same_folder(testing_metadata):
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'b.tar.bz2')}]
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'b'))
コード例 #22
0
def test_extract_tarball_with_subfolders_moves_files(testing_metadata):
    """Ensure that tarballs that contain only a single folder get their contents
    hoisted up one level"""
    testing_metadata.meta['source'] = {
        'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')}
    source.provide(testing_metadata)
    assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'abc'))
コード例 #23
0
def test_alternative_url_no_fn(testing_metadata):
    testing_metadata.meta['source'] = {
        'url': [
            os.path.join(thisdir, 'archives', 'a.tar.bz2'),
            os.path.join(thisdir, 'archives', 'a.tar.bz2'),
        ]
    }
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'a'))
コード例 #24
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_extract_multiple_tarballs_with_subfolders_flattens_all(testing_metadata):
    """Ensure that tarballs that contain only a single folder get their contents
    hoisted up one level"""
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')},
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder2.tar.bz2')}]
    source.provide(testing_metadata)
    assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder'))
    assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder2'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'abc'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'def'))
コード例 #25
0
def test_extract_multiple_tarballs_with_subfolders_flattens_all(testing_metadata):
    """Ensure that tarballs that contain only a single folder get their contents
    hoisted up one level"""
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder.tar.bz2')},
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'subfolder2.tar.bz2')}]
    source.provide(testing_metadata)
    assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder'))
    assert not os.path.exists(os.path.join(testing_metadata.config.work_dir, 'subfolder2'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'abc'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'def'))
コード例 #26
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_multiple_different_sources(testing_metadata):
    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')},
        {'folder': 'f2', 'git_url': 'https://github.com/conda/conda_build_test_recipe'}]
    source.provide(testing_metadata)
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f1', 'a'))
    assert os.path.exists(os.path.join(testing_metadata.config.work_dir, 'f2', 'README.md'))

    # Test get_value() indexing syntax.
    assert testing_metadata.get_value('source/url') == testing_metadata.meta['source'][0]['url']
    assert testing_metadata.get_value('source/0/url') == testing_metadata.meta['source'][0]['url']
    assert (testing_metadata.get_value('source/1/git_url') ==
            testing_metadata.meta['source'][1]['git_url'])
コード例 #27
0
ファイル: build.py プロジェクト: srossross/conda-build
def build(m, get_src=True, pypi=False):
    rm_rf(prefix)
    create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi)

    print("BUILD START:", m.dist())

    if get_src:
        source.provide(m.path, m.get_section('source'))
    assert isdir(source.WORK_DIR)
    if os.listdir(source.get_dir()):
        print("source tree in:", source.get_dir())
    else:
        print("no source")

    rm_rf(info_dir)
    files1 = prefix_files()

    if sys.platform == 'win32':
        import conda_build.windows as windows
        windows.build(m)
    else:
        env = environ.get_dict(m)
        cmd = ['/bin/bash', '-x', '-e', join(m.path, 'build.sh')]
        _check_call(cmd, env=env, cwd=source.get_dir())

    create_post_scripts(m)
    create_entry_points(m.get_value('build/entry_points'))
    post_process(preserve_egg_dir=bool(
            m.get_value('build/preserve_egg_dir')))

    assert not exists(info_dir)
    files2 = prefix_files()

    post_build(sorted(files2 - files1))
    create_info_files(m, sorted(files2 - files1))
    files3 = prefix_files()
    fix_permissions(files3 - files1)

    path = bldpkg_path(m)
    t = tarfile.open(path, 'w:bz2')
    for f in sorted(files3 - files1):
        t.add(join(prefix, f), f)
    t.close()

    print("BUILD END:", m.dist())

    # we're done building, perform some checks
    tarcheck.check_all(path)
    update_index(bldpkgs_dir)
コード例 #28
0
def main():
    recipe_dir = os.environ["RECIPE_DIR"]
    src_dir = os.environ["SRC_DIR"]
    main_work_dir = source.WORK_DIR

    metadata = MetaData(recipe_dir)
    extra_sources_sections = metadata.get_section('extra')['sources']

    for name, source_section in extra_sources_sections.items():
        # Override the location to clone into
        source.WORK_DIR = main_work_dir + '/' + name
        os.makedirs(source.WORK_DIR)

        # Download source
        source.provide(recipe_dir, source_section)
コード例 #29
0
def main():
    recipe_dir = os.environ["RECIPE_DIR"]
    src_dir = os.environ["SRC_DIR"]
    main_work_dir = source.WORK_DIR
    
    metadata = MetaData(recipe_dir)
    extra_sources_sections = metadata.get_section('extra')['sources']
    
    for name, source_section in extra_sources_sections.items():
        # Override the location to clone into
        source.WORK_DIR = main_work_dir + '/' + name
        os.makedirs(source.WORK_DIR)
    
        # Download source
        source.provide(recipe_dir, source_section)
コード例 #30
0
def parse_or_try_download(metadata, no_download_source):
    if not no_download_source:
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            source.provide(metadata.path, metadata.get_section('source'))
            metadata.parse_again(permit_undefined_jinja=False)
            need_source_download = False
        except subprocess.CalledProcessError:
            print("Warning: failed to download source.  If building, will try "
                  "again after downloading recipe dependencies.")
            need_source_download = True
    else:
        metadata.parse_again(permit_undefined_jinja=False)
        need_source_download = no_download_source
    return metadata, need_source_download
コード例 #31
0
def parse_or_try_download(metadata,
                          no_download_source,
                          config,
                          force_download=False):

    need_reparse_in_env = False
    if (force_download
            or (not no_download_source and metadata.needs_source_for_render)):
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            if not config.dirty:
                if len(os.listdir(config.work_dir)) == 0:
                    source.provide(metadata.path,
                                   metadata.get_section('source'),
                                   config=config)
                need_source_download = False
            try:
                metadata.parse_again(config=config,
                                     permit_undefined_jinja=False)
            except (ImportError,
                    exceptions.UnableToParseMissingSetuptoolsDependencies):
                need_reparse_in_env = True
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                  "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)
            need_source_download = True

    elif not metadata.get_section('source'):
        need_source_download = False
        if not os.path.isdir(config.work_dir):
            os.makedirs(config.work_dir)
    else:
        # we have not downloaded source in the render phase.  Download it in
        #     the build phase
        need_source_download = not no_download_source
    if not need_reparse_in_env:
        try:
            metadata.parse_until_resolved(config=config)
        except exceptions.UnableToParseMissingSetuptoolsDependencies:
            need_reparse_in_env = True
    if metadata.get_value('build/noarch'):
        config.noarch = True
    return metadata, need_source_download, need_reparse_in_env
コード例 #32
0
ファイル: core.py プロジェクト: mbargull/cf-scripts
def _get_source_code(recipe_dir):
    # Use conda build to do all the downloading/extracting bits
    md = render(recipe_dir, config=Config(**CB_CONFIG))
    if not md:
        return
    md = md[0][0]
    # provide source dir
    return provide(md)
コード例 #33
0
ファイル: create_test.py プロジェクト: evhub/conda-build
def create_files(m):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    rm_rf(m.config.test_dir)
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        copy_into(path,
                  join(m.config.test_dir, fn),
                  m.config.timeout,
                  locking=m.config.locking,
                  clobber=True)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(m.config.work_dir):
        source.provide(m)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                               "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(m.config.work_dir, pattern))
        if not files:
            raise RuntimeError(
                "Did not find any source_files for test with pattern %s",
                pattern)
        for f in files:
            try:
                copy_into(f,
                          f.replace(m.config.work_dir, m.config.test_dir),
                          m.config.timeout,
                          locking=m.config.locking)
            except OSError as e:
                log = logging.getLogger(__name__)
                log.warn("Failed to copy {0} into test files.  Error was: {1}".
                         format(f, str(e)))
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(m.config.test_dir, ext):
                os.remove(f)
    return has_files
コード例 #34
0
ファイル: test_source.py プロジェクト: Chilipp/conda-build
def test_git_repo_without_submodule(testing_metadata):
    testing_metadata.meta['source'] = [{
        'folder':
        'f1',
        'git_url':
        'https://github.com/Chilipp/conda_build_test_recipe'
    }, {
        'folder': 'f2',
        'git_url': 'https://github.com/Chilipp/conda_build_test_recipe',
        'git_recursive': False
    }]
    source.provide(testing_metadata)
    assert os.path.exists(
        os.path.join(testing_metadata.config.work_dir, 'f1', 'submodule',
                     'README.md'))
    assert not os.path.exists(
        os.path.join(testing_metadata.config.work_dir, 'f2', 'submodule',
                     'README.md'))
コード例 #35
0
ファイル: build.py プロジェクト: stjordanis/boa
def _try_download(m, interactive):
    try:
        source.provide(m)
    except RuntimeError as e:
        if interactive:
            msg = str(e)

            if "mismatch: " in msg:
                # parse the mismatch
                parts = msg.split(" ")
                hash_type = parts[0]
                hash_pkg = parts[2]
                hash_recipe = parts[4]
                console.print(
                    f"[red]Error: {hash_type} hash mismatch![/red]\nExpected: [red]{hash_recipe}[/red]\nGot:      [green]{hash_pkg}"
                )

                answer = Confirm.ask(
                    "Do you want to automatically update your recipe with the new hash?",
                    default=False,
                )

                if answer:
                    sources = m.get_section("source")
                    hash_type = hash_type.lower()
                    hash_pkg = hash_pkg[1:-1]
                    hash_recipe = hash_recipe[1:-1]
                    for x in sources:
                        if hash_type in x and x[hash_type] == hash_recipe:
                            x[hash_type] = hash_pkg
                            console.print("Changed ", x)

                    with open(m.meta_path) as fi:
                        recipe_txt = fi.read()
                    recipe_txt = recipe_txt.replace(hash_recipe, hash_pkg)
                    with open(m.meta_path, "w") as fo:
                        fo.write(recipe_txt)
                    console.print(f"Written new hash to {m.meta_path}")
                    # call self again
                    return _try_download(m, interactive)

            raise (e)
        else:
            raise (e)
コード例 #36
0
ファイル: core.py プロジェクト: isuruf/cf-scripts
def _get_source_code(recipe_dir):
    # Use conda build to do all the downloading/extracting bits
    md = render(
        recipe_dir, config=Config(**CB_CONFIG), finalize=False, bypass_env_check=True,
    )
    if not md:
        return None
    md = md[0][0]
    # provide source dir
    return provide(md)
コード例 #37
0
ファイル: render.py プロジェクト: conda/conda-build
def try_download(metadata, no_download_source, raise_error=False):
    if not metadata.source_provided and not no_download_source:
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            source.provide(metadata)
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)

    if not metadata.source_provided:
        if no_download_source:
            raise ValueError("no_download_source specified, but can't fully render recipe without"
                             " downloading source.  Please fix the recipe, or don't use "
                             "no_download_source.")
        elif raise_error:
            raise RuntimeError("Failed to download or patch source. Please see build log for info.")
コード例 #38
0
def try_download(metadata, no_download_source, raise_error=False):
    if not metadata.source_provided and not no_download_source:
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            source.provide(metadata)
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)

    if not metadata.source_provided:
        if no_download_source:
            raise ValueError("no_download_source specified, but can't fully render recipe without"
                             " downloading source.  Please fix the recipe, or don't use "
                             "no_download_source.")
        elif raise_error:
            raise RuntimeError("Failed to download or patch source. Please see build log for info.")
コード例 #39
0
ファイル: render.py プロジェクト: chaubold/conda-build
def try_download(metadata, no_download_source):
    need_source_download = len(os.listdir(metadata.config.work_dir)) == 0
    if need_source_download and not no_download_source:
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            source.provide(metadata)
            need_source_download = len(os.listdir(
                metadata.config.work_dir)) > 0
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                  "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)

    if need_source_download and no_download_source:
        raise ValueError(
            "no_download_source specified, but can't fully render recipe without"
            " downloading source.  Please fix the recipe, or don't use "
            "no_download_source.")
コード例 #40
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_append_hash_to_fn(testing_metadata, caplog):
    relative_zip = 'testfn.zip'
    assert source.append_hash_to_fn(relative_zip, '123') == 'testfn_123.zip'
    relative_tar_gz = 'testfn.tar.gz'
    assert source.append_hash_to_fn(relative_tar_gz, '123') == 'testfn_123.tar.gz'
    absolute_zip = '/abc/testfn.zip'
    assert source.append_hash_to_fn(absolute_zip, '123') == '/abc/testfn_123.zip'
    absolute_tar_gz = '/abc/testfn.tar.gz'
    assert source.append_hash_to_fn(absolute_tar_gz, '123') == '/abc/testfn_123.tar.gz'
    absolute_win_zip = 'C:\\abc\\testfn.zip'
    assert source.append_hash_to_fn(absolute_win_zip, '123') == 'C:\\abc\\testfn_123.zip'
    absolute_win_tar_gz = 'C:\\abc\\testfn.tar.gz'
    assert source.append_hash_to_fn(absolute_win_tar_gz, '123') == 'C:\\abc\\testfn_123.tar.gz'
    relative_whl = 'setuptools-36.4.0-py2.py3-none-any.whl'
    assert source.append_hash_to_fn(relative_whl, '123') == 'setuptools-36.4.0-py2.py3-none-any_123.whl'

    testing_metadata.meta['source'] = [
        {'folder': 'f1', 'url': os.path.join(thisdir, 'archives', 'a.tar.bz2')}]
    reset_deduplicator()
    source.provide(testing_metadata)
コード例 #41
0
ファイル: create_test.py プロジェクト: jjhelmus/conda-build
def create_files(m):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    rm_rf(m.config.test_dir)
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(path, join(m.config.test_dir, fn), m.config.timeout, locking=False,
                  clobber=True)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(m.config.work_dir):
        source.provide(m)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                                "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(m.config.work_dir, pattern))
        if not files:
            raise RuntimeError("Did not find any source_files for test with pattern %s", pattern)
        for f in files:
            try:
                # disable locking to avoid locking a temporary directory (the extracted test folder)
                copy_into(f, f.replace(m.config.work_dir, m.config.test_dir), m.config.timeout,
                          locking=False)
            except OSError as e:
                log = logging.getLogger(__name__)
                log.warn("Failed to copy {0} into test files.  Error was: {1}".format(f, str(e)))
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(m.config.test_dir, ext):
                os.remove(f)
    return has_files
コード例 #42
0
ファイル: render.py プロジェクト: patanijo/conda-build
def parse_or_try_download(metadata, no_download_source, config,
                          force_download=False):

    need_reparse_in_env = False
    if (force_download or (not no_download_source and (metadata.uses_vcs_in_meta or
                                                       metadata.uses_setup_py_in_meta))):

        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            if not config.dirty:
                if len(os.listdir(config.work_dir)) == 0:
                    source.provide(metadata.path, metadata.get_section('source'), config=config)
                need_source_download = False
            try:
                metadata.parse_again(config=config, permit_undefined_jinja=False)
            except (ImportError, exceptions.UnableToParseMissingSetuptoolsDependencies):
                need_reparse_in_env = True
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)
            need_source_download = True

    elif not metadata.get_section('source'):
        need_source_download = False
        if not os.path.isdir(config.work_dir):
            os.makedirs(config.work_dir)
    else:
        # we have not downloaded source in the render phase.  Download it in
        #     the build phase
        need_source_download = not no_download_source
    if not need_reparse_in_env:
        try:
            metadata.parse_until_resolved(config=config)
        except exceptions.UnableToParseMissingSetuptoolsDependencies:
            need_reparse_in_env = True
    return metadata, need_source_download, need_reparse_in_env
コード例 #43
0
ファイル: render.py プロジェクト: filmor/conda-build
def parse_or_try_download(metadata, no_download_source, verbose,
                          force_download=False, dirty=False):
    if (force_download or (not no_download_source and
                           any(var.startswith('GIT_') for var in metadata.undefined_jinja_vars))):
        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            source.provide(metadata.path, metadata.get_section('source'),
                           verbose=verbose, dirty=dirty)
            metadata.parse_again(permit_undefined_jinja=False)
            need_source_download = False
        except subprocess.CalledProcessError:
            print("Warning: failed to download source.  If building, will try "
                "again after downloading recipe dependencies.")
            need_source_download = True
        else:
            need_source_download = no_download_source
    else:
        # we have not downloaded source in the render phase.  Download it in
        #     the build phase
        need_source_download = True
    metadata.parse_again(permit_undefined_jinja=False)
    return metadata, need_source_download
コード例 #44
0
ファイル: render.py プロジェクト: ivoflipse/conda-build
def parse_or_try_download(metadata, no_download_source, verbose,
                          force_download=False, dirty=False):

    need_reparse_in_env = False
    if (force_download or (not no_download_source and (metadata.uses_vcs_in_meta() or
                                                       metadata.uses_setuptools_in_meta()))):
        if dirty:
            need_source_download = False

        # this try/catch is for when the tool to download source is actually in
        #    meta.yaml, and not previously installed in builder env.
        try:
            if not dirty:
                source.provide(metadata.path, metadata.get_section('source'),
                               verbose=verbose)
                need_source_download = False
            try:
                metadata.parse_again(permit_undefined_jinja=False)
            except exceptions.UnableToParseMissingSetuptoolsDependencies:
                need_reparse_in_env = True
        except subprocess.CalledProcessError as error:
            print("Warning: failed to download source.  If building, will try "
                "again after downloading recipe dependencies.")
            print("Error was: ")
            print(error)
            need_source_download = True
    elif not metadata.get_section('source'):
        need_source_download = False
    else:
        # we have not downloaded source in the render phase.  Download it in
        #     the build phase
        need_source_download = True
    try:
        metadata.parse_again(permit_undefined_jinja=False)
    except exceptions.UnableToParseMissingSetuptoolsDependencies:
        need_reparse_in_env = True
    return metadata, need_source_download, need_reparse_in_env
コード例 #45
0
ファイル: utils.py プロジェクト: Prodyte/cf-scripts
def _get_source_code(recipe_dir):
    from conda_build.api import render
    from conda_build.config import Config
    from conda_build.source import provide

    # Use conda build to do all the downloading/extracting bits
    md = render(
        recipe_dir,
        config=Config(**CB_CONFIG),
        finalize=False,
        bypass_env_check=True,
    )
    if not md:
        return None
    md = md[0][0]
    # provide source dir
    try:
        return provide(md)
    except SystemExit:
        raise RuntimeError(f"Could not download source for {recipe_dir}!")
コード例 #46
0
ファイル: utils.py プロジェクト: ryanvolz/cf-scripts
def _get_source_code(recipe_dir):
    try:
        from conda_build.api import render
        from conda_build.config import Config
        from conda_build.source import provide

        # Use conda build to do all the downloading/extracting bits
        md = render(
            recipe_dir,
            config=Config(**CB_CONFIG),
            finalize=False,
            bypass_env_check=True,
        )
        if not md:
            return None
        md = md[0][0]
        # provide source dir
        return provide(md)
    except (SystemExit, Exception) as e:
        raise RuntimeError("conda build src exception:" + str(e))
コード例 #47
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    all_versions = {
        'python': [26, 27, 33, 34],
        'numpy': [16, 17, 18, 19],
        'perl': None,
        'R': None,
        }
    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    if args.skip_existing:
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True,
            channel_urls=channel_urls,
            override_channels=args.override_channels)

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet,
                    channel_urls=channel_urls, override_channels=args.override_channels)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
コード例 #48
0
ファイル: render.py プロジェクト: chenghlee/conda-build
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package, and figure out
    # which python version we prefer. `python_age` can use used to tweak which
    # python gets used here.
    if metadata.noarch or metadata.noarch_python:
        from .conda_interface import VersionOrder
        age = int(
            metadata.get_value('build/noarch_python_build_age',
                               metadata.config.noarch_python_build_age))
        versions = []
        for variant in variants:
            if 'python' in variant:
                vo = variant['python']
                if vo not in versions:
                    versions.append(vo)
        version_indices = sorted(
            range(len(versions)),
            key=lambda k: VersionOrder(versions[k].split(' ')[0]))
        if age < 0:
            age = 0
        elif age > len(versions) - 1:
            age = len(versions) - 1
        build_ver = versions[version_indices[len(versions) - 1 - age]]
        variants = filter_by_key_value(variants, 'python', build_ver,
                                       'noarch_python_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    metadata.config.variant = variants[0]
    used_variables = metadata.get_used_loop_vars(force_global=False)
    top_loop = metadata.get_reduced_variant_set(used_variables)

    for variant in top_loop:
        from conda_build.build import get_all_replacements
        get_all_replacements(variant)
        mv = metadata.copy()
        mv.config.variant = variant

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(
                mv.config.variants, key, values,
                'distribute_variants_reduction') or mv.config.variants)
        get_all_replacements(mv.config.variants)
        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(
            mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        try:
            mv.parse_until_resolved(
                allow_no_other_outputs=allow_no_other_outputs,
                bypass_env_check=bypass_env_check)
        except SystemExit:
            pass
        need_source_download = (not mv.needs_source_for_render
                                or not mv.source_provided)

        rendered_metadata[(mv.dist(),
                           mv.config.variant.get('target_platform', mv.config.subdir),
                           tuple((var, mv.config.variant.get(var))
                                 for var in mv.get_used_vars()))] = \
                                     (mv, need_source_download, None)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
コード例 #49
0
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
            # We must use a long prefix here as the package will only be
            # installable into prefixes shorter than this one.
            config.use_long_build_prefix = True
        else:
            # In case there are multiple builds in the same process
            config.use_long_build_prefix = False

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
            [ms.spec for ms in m.ms_depends('build')],
            verbose=verbose, channel_urls=channel_urls,
            override_channels=override_channels)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files().difference(set(m.always_include_files()))
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1),
                          include_recipe=bool(m.path))
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
コード例 #50
0
ファイル: main_build.py プロジェクト: tkf/conda-build
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    if on_win:
        # needs to happen before any c extensions are imported that might be
        # hard-linked by files in the trash. one of those is markupsafe, used
        # by jinja2. see https://github.com/conda/conda-build/pull/520
        assert 'markupsafe' not in sys.modules
        delete_trash(None)

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2 and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    if args.skip_existing:
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True,
            channel_urls=channel_urls,
            override_channels=args.override_channels)

    already_built = []
    to_build = args.recipe[:]
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet,
                    channel_urls=channel_urls, override_channels=args.override_channels)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post,
                        channel_urls=channel_urls,
                        override_channels=args.override_channels, include_recipe=args.include_recipe)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build.append(dep_pkg)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
コード例 #51
0
ファイル: test_source.py プロジェクト: cav71/conda-build
def test_git_repo_with_single_subdir_does_not_enter_subdir(testing_metadata):
    """Regression test for https://github.com/conda/conda-build/issues/1910 """
    testing_metadata.meta['source'] = {
        'git_url': 'https://github.com/conda/conda_build_single_folder_test'}
    source.provide(testing_metadata)
    assert os.path.basename(testing_metadata.config.work_dir) != 'one_folder'
コード例 #52
0
ファイル: build.py プロジェクト: frol/conda-build
def build(m, get_src=True, verbose=True, post=None, channel_urls=(),
    override_channels=False, include_recipe=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
        or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        sys.exit(0)

    if post in [False, None]:
        print("Removing old build environment")
        if on_win:
            if isdir(config.short_build_prefix):
                move_to_trash(config.short_build_prefix, '')
            if isdir(config.long_build_prefix):
                move_to_trash(config.long_build_prefix, '')
        else:
            rm_rf(config.short_build_prefix)
            rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        if on_win:
            if isdir(source.WORK_DIR):
                move_to_trash(source.WORK_DIR, '')
        else:
            rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
            [ms.spec for ms in m.ms_depends('build')],
            verbose=verbose, channel_urls=channel_urls,
            override_channels=override_channels)

        if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." %
                m.name())
            index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for pat in m.always_include_files():
            has_matches = False
            for f in set(files1):
                if fnmatch.fnmatch(f, pat):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit("Error: Glob %s from always_include_files does not match any files" % pat)
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if isfile(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        if any(config.meta_dir in join(config.build_prefix, f) for f in
            files2 - files1):
            sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory.  This error
usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" %
                (tuple(f for f in files2 - files1 if config.meta_dir in
                    join(config.build_prefix, f)),)))
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1),
                          include_recipe=bool(m.path) and include_recipe)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
コード例 #53
0
ファイル: main_build.py プロジェクト: qwhelan/conda-build
def source_action(recipe, config):
    metadata = api.render(recipe, config=config)[0][0]
    source.provide(metadata)
    print('Source tree in:', metadata.config.work_dir)
コード例 #54
0
ファイル: build.py プロジェクト: tienhv/conda-build
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False):
    """
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    """

    if m.get_value("build/detect_binary_files_with_prefix") or m.binary_has_prefix_files():
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(
            config.build_prefix,
            [ms.spec for ms in m.ms_depends("build")],
            verbose=verbose,
            channel_urls=channel_urls,
            override_channels=override_channels,
        )

        if m.name() in [i.rsplit("-", 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." % m.name())
            index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section("source"))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for rx in m.always_include_files():
            pat = re.compile(rx)
            has_matches = False
            for f in set(files1):
                if pat.match(f):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit("Error: Regex %s from always_include_files does not match any files" % rx)
        # Save this for later
        with open(join(config.croot, "prefix_files.txt"), "w") as f:
            f.write(u"\n".join(sorted(list(files1))))
            f.write(u"\n")

        if sys.platform == "win32":
            import conda_build.windows as windows

            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, "build.sh")

            script = m.get_value("build/script", None)
            if script:
                if isinstance(script, list):
                    script = "\n".join(script)
                build_file = join(source.get_dir(), "conda_build.sh")
                with open(build_file, "w") as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ["/bin/bash", "-x", "-e", build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, "prefix_files.txt"), "r") as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value("build/entry_points"))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        assert not any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1)
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
        if m.get_value("build/noarch_python"):
            import conda_build.noarch_python as noarch_python

            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, "w:bz2")
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
コード例 #55
0
ファイル: main_build.py プロジェクト: jeanconn/conda-build
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    if args.python:
        if args.python == ['all']:
            for py in [26, 27, 33, 34]:
                args.python = [str(py)]
                execute(args, parser)
            return
        if len(args.python) > 1:
            for py in args.python[:]:
                args.python = [py]
                execute(args, parser)
        else:
            config.CONDA_PY = int(args.python[0].replace('.', ''))
    if args.perl:
        config.CONDA_PERL = args.perl
    if args.numpy:
        if args.numpy == ['all']:
            for npy in [16, 17, 18]:
                args.numpy = [str(npy)]
                execute(args, parser)
            return
        if len(args.numpy) > 1:
            for npy in args.numpy[:]:
                args.numpy = [npy]
                execute(args, parser)
        else:
            config.CONDA_NPY = int(args.numpy[0].replace('.', ''))

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding())
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found matching:'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
コード例 #56
0
ファイル: main_build.py プロジェクト: jni/conda-build
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn("Cannot delete trash; some c extension has been "
                          "imported that is hard-linked by files in the trash. "
                          "Will try again on next run.")

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ('python', 'numpy'):
                version = int(version.replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True)

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if m.skip():
                print("Skipped: The %s recipe defines build/skip for this "
                      "configuration." % m.dist())
                continue
            if args.output:
                try:
                    m.parse_again(permit_undefined_jinja=False)
                except SystemExit:
                    # Something went wrong; possibly due to undefined GIT_ jinja variables.
                    # Maybe we need to actually download the source in order to resolve the build_id.
                    source.provide(m.path, m.get_section('source'))
                    
                    # Parse our metadata again because we did not initialize the source
                    # information before.
                    m.parse_again(permit_undefined_jinja=False)

                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, move_broken=False)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, post=post,
                                include_recipe=args.include_recipe)
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')]
                        pkgs = [line.lstrip('- ') for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ['python', 'r']
                        pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not
                            in skip_names]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if ' ' in pkg:
                                pkg = pkg.split(' ')[0]
                            recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(("Missing dependency {0}, but found" +
                                           " recipe directory, so building " +
                                           "{0} first").format(pkg))
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m)

                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
コード例 #57
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import croot
    from conda_build.metadata import MetaData

    check_external()

    with Locked(croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding())
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                try:
                    build.build(m)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found matching:'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1].replace(' ', '-')
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
コード例 #58
0
ファイル: main_build.py プロジェクト: NewbiZ/conda-build
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    if on_win:
        # needs to happen before any c extensions are imported that might be
        # hard-linked by files in the trash. one of those is markupsafe, used
        # by jinja2. see https://github.com/conda/conda-build/pull/520
        assert "markupsafe" not in sys.modules
        delete_trash(None)

    conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"}

    for lang in ["python", "numpy", "perl", "R"]:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ["all"]:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ("python", "numpy"):
                version = int(version.replace(".", ""))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]:
            if all_versions[lang]:
                raise RuntimeError(
                    "%s must be major.minor, like %s, not %s"
                    % (conda_version[lang], all_versions[lang][-1] / 10, version)
                )
            else:
                raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(
            clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels
        )

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or "utf-8")
            if isfile(arg):
                if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, "r:*")
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value("build/noarch_python"):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(
                    m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels
                )
            elif args.source:
                source.provide(m.path, m.get_section("source"))
                print("Source tree in:", source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    if m.skip():
                        print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist())
                        continue
                    build.build(
                        m,
                        verbose=not args.quiet,
                        post=post,
                        channel_urls=channel_urls,
                        override_channels=args.override_channels,
                        include_recipe=args.include_recipe,
                    )
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith("No packages found") or error_str.startswith("Could not find some"):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(": ")[1]
                        # Handle package names that contain version deps.
                        if " " in dep_pkg:
                            dep_pkg = dep_pkg.split(" ")[0]
                        recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*")
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(
                                    (
                                        "Missing dependency {0}, but found"
                                        + " recipe directory, so building "
                                        + "{0} first"
                                    ).format(dep_pkg)
                                )
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")]
                        pkgs = [line.lstrip("- ") for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ["python", "r"]
                        pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if " " in pkg:
                                pkg = pkg.split(" ")[0]
                            recipe_glob = glob(pkg + "-[v0-9][0-9.]*")
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(
                                        (
                                            "Missing dependency {0}, but found"
                                            + " recipe directory, so building "
                                            + "{0} first"
                                        ).format(pkg)
                                    )
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(
                        m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels
                    )
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
コード例 #59
0
def build(m, get_src=True, verbose=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    '''
    rm_rf(prefix)

    print("BUILD START:", m.dist())
    create_env(prefix, [ms.spec for ms in m.ms_depends('build')],
               verbose=verbose)

    if get_src:
        source.provide(m.path, m.get_section('source'))
    assert isdir(source.WORK_DIR)
    if os.listdir(source.get_dir()):
        print("source tree in:", source.get_dir())
    else:
        print("no source")

    rm_rf(info_dir)
    files1 = prefix_files()

    if sys.platform == 'win32':
        import conda_build.windows as windows
        windows.build(m)
    else:
        env = environ.get_dict(m)
        build_file = join(m.path, 'build.sh')
        if exists(build_file):
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                with open(build_file, 'w', encoding='utf-8') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)
            cmd = ['/bin/bash', '-x', '-e', build_file]

            _check_call(cmd, env=env, cwd=source.get_dir())

    get_build_metadata(m)
    create_post_scripts(m)
    create_entry_points(m.get_value('build/entry_points'))
    post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

    assert not exists(info_dir)
    files2 = prefix_files()

    post_build(sorted(files2 - files1),
               binary_relocation=bool(
                   m.get_value('build/binary_relocation', True)))
    create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
    files3 = prefix_files()
    fix_permissions(files3 - files1)

    path = bldpkg_path(m)
    t = tarfile.open(path, 'w:bz2')
    for f in sorted(files3 - files1):
        t.add(join(prefix, f), f)
    t.close()

    print("BUILD END:", m.dist())

    # we're done building, perform some checks
    tarcheck.check_all(path)
    update_index(config.bldpkgs_dir)
コード例 #60
0
ファイル: build.py プロジェクト: shoyer/conda-build
def build(m, get_src=True, verbose=True, post=None):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if post in [False, None]:
        rm_rf(prefix)

        print("BUILD START:", m.dist())
        create_env(prefix, [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose)

        if get_src:
            source.provide(m.path, m.get_section('source'))
        assert isdir(source.WORK_DIR)
        if os.listdir(source.get_dir()):
            print("source tree in:", source.get_dir())
        else:
            print("no source")

        rm_rf(info_dir)
        files1 = prefix_files()
        if post == False:
            # Save this for later
            with open(join(source.WORK_DIR, 'prefix_files'), 'w') as f:
                json.dump(list(files1), f)

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')
            if exists(build_file):
                script = m.get_value('build/script', None)
                if script:
                    if isinstance(script, list):
                        script = '\n'.join(script)
                    with open(build_file, 'w', encoding='utf-8') as bf:
                        bf.write(script)
                    os.chmod(build_file, 0o766)
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=source.get_dir())

    if post in [True, None]:
        if post == True:
            with open(join(source.WORK_DIR, 'prefix_files')) as f:
                files1 = set(json.load(f))

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        assert not exists(info_dir)
        files2 = prefix_files()

        post_build(sorted(files2 - files1),
              binary_relocation=bool(m.get_value('build/binary_relocation', True)))
        create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())