Exemplo n.º 1
0
def build(m,
          post=None,
          include_recipe=True,
          keep_old_work=False,
          need_source_download=True,
          need_reparse_in_env=False,
          verbose=True,
          dirty=False,
          activate=True,
          debug=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()) and not on_win:
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        return

    with Locked(cc.root_dir):

        # If --keep-old-work, then move the contents of source.WORK_DIR to a
        # temporary directory for the duration of the build.
        # The source unpacking procedure is too varied and complex
        # to allow this to be written cleanly (see source.get_dir() for example)
        if keep_old_work:
            old_WORK_DIR = tempfile.mkdtemp()
            old_sub_dirs = [
                name for name in os.listdir(source.WORK_DIR)
                if os.path.isdir(os.path.join(source.WORK_DIR, name))
            ]
            if len(old_sub_dirs):
                print("Keeping old work directory backup: %s => %s" %
                      (old_sub_dirs, old_WORK_DIR))
                for old_sub in old_sub_dirs:
                    shutil.move(os.path.join(source.WORK_DIR, old_sub),
                                old_WORK_DIR)

        if post in [False, None]:
            print("Removing old build environment")
            print("BUILD START:", m.dist())
            if not need_source_download or not need_reparse_in_env:
                print(
                    "    (actual version deferred until further download or env creation)"
                )
            if on_win:
                if isdir(config.short_build_prefix):
                    move_to_trash(config.short_build_prefix, '')
                if isdir(config.long_build_prefix):
                    move_to_trash(config.long_build_prefix, '')
            else:
                rm_rf(config.short_build_prefix)
                rm_rf(config.long_build_prefix)

            specs = [ms.spec for ms in m.ms_depends('build')]
            if activate:
                # If we activate the build envrionment, we need to be sure that we
                #    have the appropriate VCS available in the environment.  People
                #    are not used to explicitly listing it in recipes, though.
                #    We add it for them here, but warn them about it.
                vcs_source = m.uses_vcs_in_build()
                if vcs_source and vcs_source not in specs:
                    vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
                    has_vcs_available = os.path.isfile(
                        external.find_executable(vcs_executable) or "")
                    if not has_vcs_available:
                        if (vcs_source != "mercurial" or not any(
                                spec.startswith('python') and "3." in spec
                                for spec in specs)):
                            specs.append(vcs_source)

                            log.warn(
                                "Your recipe depends on {} at build time (for templates), "
                                "but you have not listed it as a build dependency.  Doing "
                                "so for this build.")
                        else:
                            raise ValueError(
                                "Your recipe uses mercurial in build, but mercurial"
                                " does not yet support Python 3.  Please handle all of "
                                "your mercurial actions outside of your build script."
                            )
            # Display the name only
            # Version number could be missing due to dependency on source info.
            create_env(config.build_prefix, specs, debug=debug)

            if need_source_download:
                # Execute any commands fetching the source (e.g., git) in the _build environment.
                # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
                # dependencies.
                if not activate:
                    _old_path = os.environ['PATH']
                    os.environ['PATH'] = prepend_bin_path(
                        {'PATH': _old_path}, config.build_prefix)['PATH']
                try:
                    m, need_source_download, need_reparse_in_env = parse_or_try_download(
                        m,
                        no_download_source=False,
                        force_download=True,
                        verbose=verbose,
                        dirty=dirty)
                    assert not need_source_download, "Source download failed.  Please investigate."
                finally:
                    if not activate:
                        os.environ['PATH'] = _old_path
                print("BUILD START:", m.dist())

            if need_reparse_in_env:
                reparse(m)
                print("BUILD START:", m.dist())

            if m.name() in [
                    i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
            ]:
                print("%s is installed as a build dependency. Removing." %
                      m.name())
                index = get_build_index(clear_cache=False)
                actions = plan.remove_actions(config.build_prefix, [m.name()],
                                              index=index)
                assert not plan.nothing_to_do(actions), actions
                plan.display_actions(actions, index)
                plan.execute_actions(actions, index)

            print("Package:", m.dist())

            src_dir = source.get_dir()
            if isdir(source.WORK_DIR):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(source.WORK_DIR)

            rm_rf(config.info_dir)
            files1 = prefix_files()
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files"
                        % pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(source.WORK_DIR):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(source.get_dir(), 'bld.bat')
                        with open(join(source.get_dir(), 'bld.bat'),
                                  'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m,
                                  build_file,
                                  dirty=dirty,
                                  activate=activate)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        env = environ.get_dict(m, dirty=dirty)
                        work_file = join(source.get_dir(), 'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source activate {build_prefix}\n".format(
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                shutil.copy(build_file, work_file)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]

                            _check_call(cmd, env=env, cwd=src_dir)

        if post in [True, None]:
            if post:
                with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                    files1 = set(f.read().splitlines())

            get_build_metadata(m)
            create_post_scripts(m)
            create_entry_points(m.get_value('build/entry_points'))
            assert not exists(config.info_dir)
            files2 = prefix_files()

            post_process(sorted(files2 - files1),
                         preserve_egg_dir=bool(
                             m.get_value('build/preserve_egg_dir')))

            # The post processing may have deleted some files (like easy-install.pth)
            files2 = prefix_files()
            if any(config.meta_dir in join(config.build_prefix, f)
                   for f in files2 - files1):
                sys.exit(
                    indent(
                        """Error: Untracked file(s) %s found in conda-meta directory.
    This error usually comes from using conda in the build script.  Avoid doing this, as it
    can lead to packages that include their dependencies.""" %
                        (tuple(f for f in files2 - files1 if config.meta_dir in
                               join(config.build_prefix, f)), )))
            post_build(m, sorted(files2 - files1))
            create_info_files(m,
                              sorted(files2 - files1),
                              include_recipe=bool(m.path) and include_recipe)
            if m.get_value('build/noarch_python'):
                import conda_build.noarch_python as noarch_python
                noarch_python.transform(m, sorted(files2 - files1))

            files3 = prefix_files()
            fix_permissions(files3 - files1)

            path = bldpkg_path(m)
            t = tarfile.open(path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            print("BUILD END:", m.dist())

            # we're done building, perform some checks
            tarcheck.check_all(path)
            update_index(config.bldpkgs_dir)
        else:
            print("STOPPING BUILD BEFORE POST:", m.dist())

        if keep_old_work and len(old_sub_dirs):
            print("Restoring old work directory backup: %s :: %s => %s" %
                  (old_WORK_DIR, old_sub_dirs, source.WORK_DIR))
            for old_sub in old_sub_dirs:
                if os.path.exists(os.path.join(source.WORK_DIR, old_sub)):
                    print(
                        "Not restoring old source directory %s over new build's version"
                        % (old_sub))
                else:
                    shutil.move(os.path.join(old_WORK_DIR, old_sub),
                                source.WORK_DIR)
            shutil.rmtree(old_WORK_DIR, ignore_errors=True)
Exemplo n.º 2
0
def download(url, dst_path, session=None, md5=None, urlstxt=False):
    pp = dst_path + '.part'
    dst_dir = os.path.split(dst_path)[0]
    session = session or CondaSession()

    with Locked(dst_dir):
        try:
            resp = session.get(url, stream=True, proxies=session.proxies,
                               verify=config.ssl_verify)
            resp.raise_for_status()
        except requests.exceptions.HTTPError as e:
            if e.response.status_code == 407: # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt)
            msg = "HTTPError: %s: %s\n" % (e, url)
            log.debug(msg)
            raise RuntimeError(msg)

        except requests.exceptions.ConnectionError as e:
            # requests isn't so nice here. For whatever reason, https gives this
            # error and http gives the above error. Also, there is no status_code
            # attribute here. We have to just check if it looks like 407.  See
            # https://github.com/kennethreitz/requests/issues/2061.
            if "407" in str(e): # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url, dst_path, session=session, md5=md5,
                    urlstxt=urlstxt)
            msg = "Connection error: %s: %s\n" % (e, url)
            stderrlog.info('Could not connect to %s\n' % url)
            log.debug(msg)
            raise RuntimeError(msg)

        except IOError as e:
            raise RuntimeError("Could not open '%s': %s" % (url, e))

        size = resp.headers.get('Content-Length')
        if size:
            size = int(size)
            fn = basename(dst_path)
            getLogger('fetch.start').info((fn[:14], size))

        n = 0
        if md5:
            h = hashlib.new('md5')
        try:
            with open(pp, 'wb') as fo:
                for chunk in resp.iter_content(2**14):
                    try:
                        fo.write(chunk)
                    except IOError:
                        raise RuntimeError("Failed to write to %r." % pp)
                    if md5:
                        h.update(chunk)
                    n += len(chunk)
                    if size:
                        getLogger('fetch.update').info(n)
        except IOError as e:
            raise RuntimeError("Could not open %r for writing (%s).  "
                "Permissions problem or missing directory?" % (pp, e))

        if size:
            getLogger('fetch.stop').info(None)

        if md5 and h.hexdigest() != md5:
            raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
                               % (url, h.hexdigest(), md5))

        try:
            os.rename(pp, dst_path)
        except OSError as e:
            raise RuntimeError("Could not rename %r to %r: %r" % (pp,
                dst_path, e))

        if urlstxt:
            try:
                with open(join(dst_dir, 'urls.txt'), 'a') as fa:
                    fa.write('%s\n' % url)
            except IOError:
                pass
Exemplo n.º 3
0
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
    """
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    """
    index = index or {}
    source_dir = is_extracted(dist)
    assert source_dir is not None
    pkgs_dir = dirname(source_dir)
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))

    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
                    if on_win:
                        try:
                            move_path_to_trash(dst)
                        except ImportError:
                            # This shouldn't be an issue in the installer anyway
                            pass

            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                sys.exit('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                         (src, dst, lt, e))

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        # make sure that the child environment behaves like the parent,
        #    wrt user/system install on win
        # This is critical for doing shortcuts correctly
        if on_win:
            nonadmin = join(sys.prefix, ".nonadmin")
            if isfile(nonadmin):
                open(join(prefix, ".nonadmin"), 'w').close()

        if shortcuts:
            mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(dist)
        try:
            alt_files_path = join(prefix, 'conda-meta',
                                  dist2filename(dist, '.files'))
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Exemplo n.º 4
0
def download(url,
             dst_path,
             session=None,
             md5=None,
             urlstxt=False,
             retries=None):
    pp = dst_path + '.part'
    dst_dir = dirname(dst_path)
    session = session or CondaSession()

    if not config.ssl_verify:
        try:
            from requests.packages.urllib3.connectionpool import InsecureRequestWarning
        except ImportError:
            pass
        else:
            warnings.simplefilter('ignore', InsecureRequestWarning)

    if retries is None:
        retries = RETRIES
    with Locked(dst_dir):
        try:
            resp = session.get(url,
                               stream=True,
                               proxies=session.proxies,
                               verify=config.ssl_verify)
            resp.raise_for_status()
        except requests.exceptions.HTTPError as e:
            if e.response.status_code == 407:  # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url,
                                dst_path,
                                session=session,
                                md5=md5,
                                urlstxt=urlstxt,
                                retries=retries)
            msg = "HTTPError: %s: %s\n" % (e, url)
            log.debug(msg)
            raise RuntimeError(msg)

        except requests.exceptions.ConnectionError as e:
            # requests isn't so nice here. For whatever reason, https gives
            # this error and http gives the above error. Also, there is no
            # status_code attribute here.  We have to just check if it looks
            # like 407.
            # See: https://github.com/kennethreitz/requests/issues/2061.
            if "407" in str(e):  # Proxy Authentication Required
                handle_proxy_407(url, session)
                # try again
                return download(url,
                                dst_path,
                                session=session,
                                md5=md5,
                                urlstxt=urlstxt,
                                retries=retries)
            msg = "Connection error: %s: %s\n" % (e, url)
            stderrlog.info('Could not connect to %s\n' % url)
            log.debug(msg)
            raise RuntimeError(msg)

        except IOError as e:
            raise RuntimeError("Could not open '%s': %s" % (url, e))

        size = resp.headers.get('Content-Length')
        if size:
            size = int(size)
            fn = basename(dst_path)
            getLogger('fetch.start').info((fn[:14], size))

        n = 0
        if md5:
            h = hashlib.new('md5')
        try:
            with open(pp, 'wb') as fo:
                more = True
                while more:
                    # Use resp.raw so that requests doesn't decode gz files
                    chunk = resp.raw.read(2**14)
                    if not chunk:
                        more = False
                    try:
                        fo.write(chunk)
                    except IOError:
                        raise RuntimeError("Failed to write to %r." % pp)
                    if md5:
                        h.update(chunk)
                    # update n with actual bytes read
                    n = resp.raw.tell()
                    if size and 0 <= n <= size:
                        getLogger('fetch.update').info(n)
        except IOError as e:
            if e.errno == 104 and retries:  # Connection reset by pee
                # try again
                log.debug("%s, trying again" % e)
                return download(url,
                                dst_path,
                                session=session,
                                md5=md5,
                                urlstxt=urlstxt,
                                retries=retries - 1)
            raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))

        if size:
            getLogger('fetch.stop').info(None)

        if md5 and h.hexdigest() != md5:
            if retries:
                # try again
                log.debug("MD5 sums mismatch for download: %s (%s != %s), "
                          "trying again" % (url, h.hexdigest(), md5))
                return download(url,
                                dst_path,
                                session=session,
                                md5=md5,
                                urlstxt=urlstxt,
                                retries=retries - 1)
            raise RuntimeError(
                "MD5 sums mismatch for download: %s (%s != %s)" %
                (url, h.hexdigest(), md5))

        try:
            os.rename(pp, dst_path)
        except OSError as e:
            raise RuntimeError("Could not rename %r to %r: %r" %
                               (pp, dst_path, e))

        if urlstxt:
            try:
                with open(join(dst_dir, 'urls.txt'), 'a') as fa:
                    fa.write('%s\n' % url)
            except IOError:
                pass
Exemplo n.º 5
0
def test(m, move_broken=True):
    '''
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    '''

    with Locked(cc.root_dir):

        # remove from package cache
        rm_pkgs_cache(m.dist())

        tmp_dir = join(config.croot, 'test-tmp_dir')
        rm_rf(tmp_dir)
        if on_win:
            time.sleep(1)  # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir
        os.makedirs(tmp_dir)
        create_files(tmp_dir, m)
        # Make Perl or Python-specific test files
        if m.name().startswith('perl-'):
            pl_files = create_pl_files(tmp_dir, m)
            py_files = False
            lua_files = False
        else:
            py_files = create_py_files(tmp_dir, m)
            pl_files = False
            lua_files = False
        shell_files = create_shell_files(tmp_dir, m)
        if not (py_files or shell_files or pl_files or lua_files):
            print("Nothing to test for:", m.dist())
            return

        print("TEST START:", m.dist())
        if on_win:
            if isdir(config.build_prefix):
                move_to_trash(config.build_prefix, '')
            if isdir(config.test_prefix):
                move_to_trash(config.test_prefix, '')
        else:
            rm_rf(config.build_prefix)
            rm_rf(config.test_prefix)

        get_build_metadata(m)
        specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())]

        # add packages listed in the run environment and test/requires
        specs.extend(ms.spec for ms in m.ms_depends('run'))
        specs += m.get_value('test/requires', [])

        if py_files:
            # as the tests are run by python, ensure that python is installed.
            # (If they already provided python as a run or test requirement,
            #  this won't hurt anything.)
            specs += ['python %s*' % environ.get_py_ver()]
        if pl_files:
            # as the tests are run by perl, we need to specify it
            specs += ['perl %s*' % environ.get_perl_ver()]
        if lua_files:
            # not sure how this shakes out
            specs += ['lua %s*' % environ.get_lua_ver()]

        create_env(config.test_prefix, specs)

        env = dict(os.environ)
        env.update(environ.get_dict(m, prefix=config.test_prefix))

        # prepend bin (or Scripts) directory
        env = prepend_bin_path(env, config.test_prefix, prepend_prefix=True)

        if sys.platform == 'win32':
            env['PATH'] = config.test_prefix + os.pathsep + env['PATH']
        for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA':
            env[varname] = str(getattr(config, varname) or '')
        env['PREFIX'] = config.test_prefix

        # Python 2 Windows requires that envs variables be string, not unicode
        env = {str(key): str(value) for key, value in env.items()}
        if py_files:
            try:
                subprocess.check_call([config.test_python, '-s',
                                    join(tmp_dir, 'run_test.py')],
                                    env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m, move_broken=move_broken)

        if pl_files:
            try:
                subprocess.check_call([config.test_perl,
                                    join(tmp_dir, 'run_test.pl')],
                                    env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m, move_broken=move_broken)

        if lua_files:
            try:
                subprocess.check_call([config.test_lua,
                                    join(tmp_dir, 'run_test.lua')],
                                    env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m)

        if shell_files:
            if sys.platform == 'win32':
                test_file = join(tmp_dir, 'run_test.bat')
                cmd = [os.environ['COMSPEC'], '/c', 'call', test_file]
                try:
                    subprocess.check_call(cmd, env=env, cwd=tmp_dir)
                except subprocess.CalledProcessError:
                    tests_failed(m, move_broken=move_broken)
            else:
                test_file = join(tmp_dir, 'run_test.sh')
                # TODO: Run the test/commands here instead of in run_test.py
                cmd = [shell_path, '-x', '-e', test_file]
                try:
                    subprocess.check_call(cmd, env=env, cwd=tmp_dir)
                except subprocess.CalledProcessError:
                    tests_failed(m, move_broken=move_broken)

    print("TEST END:", m.dist())
Exemplo n.º 6
0
def rm_extracted(pkgs_dir, dist):
    with Locked(pkgs_dir):
        path = join(pkgs_dir, dist)
        rm_rf(path)
Exemplo n.º 7
0
def link(pkgs_dir,
         prefix,
         dist,
         linktype=LINK_HARD,
         index=None,
         target_prefix=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    if target_prefix is None:
        target_prefix = prefix
    index = index or {}
    log.debug(
        'pkgs_dir=%r, prefix=%r, target_prefix=%r, dist=%r, linktype=%r' %
        (pkgs_dir, prefix, target_prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix)
            and name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(prefix, dist, 'pre-link', target_prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    noarch = False
    # If the distribution is noarch, it will contain a `link.json` file in
    # the info_dir
    with open(join(info_dir, 'index.json'), 'r') as fh:
        index_data = json.loads(fh.read())
    if 'noarch' in index_data:
        noarch = index_data['noarch']
    elif 'noarch_python' in index_data:
        # `noarch_python` has been deprecated.
        if index_data['noarch_python'] is True:
            noarch = 'python'

    if noarch == 'python':
        if on_win:
            raise ValueError('Windows is not supported.')

        link_json = join(info_dir, 'link.json')
        if exists(link_json):
            with open(link_json, 'r') as fh:
                link_data = json.loads(fh.read())
            if 'noarch' in link_data:
                noarch_json = link_data['noarch']

        target_py_version = get_python_version(prefix)
        target_python_short_path = join('bin',
                                        'python{}'.format(target_py_version))
        target_site_packages = join('lib',
                                    'python{}'.format(target_py_version),
                                    'site-packages')

    # A list of the files, including pyc files and entrypoints, that will be
    # added to the metadata.
    all_files = []

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)

            if noarch == 'python':
                noarch_f = get_python_noarch_target_path(
                    f, target_site_packages)
                dst = join(prefix, noarch_f)
                all_files.append(noarch_f)
            # Non-noarch packages do not need special handling of the
            # site-packages
            else:
                dst = join(prefix, f)
                all_files.append(f)

            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        # noarch package specific installation steps
        if noarch == 'python':
            # Create entrypoints
            if 'entry_points' in noarch_json:
                for entry_point in noarch_json['entry_points']:

                    command, module, func = parse_entry_point_def(entry_point)
                    entry_point_file = create_python_entry_point(
                        join(prefix, 'bin', command),
                        join(prefix, target_python_short_path), module, func)
                    all_files.append(entry_point_file)

            # Compile pyc files
            for f in all_files:
                if f.endswith('.py'):
                    py_path = join(prefix, f)
                    pyc_filepath = compile_pyc(
                        join(prefix, target_python_short_path), py_path,
                        pyc_path(py_path, target_py_version))
                    if pyc_filepath.startswith(prefix):
                        all_files.append(pyc_filepath[len(prefix):])

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), target_prefix, placeholder,
                              mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link', target_prefix):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        try:
            alt_files_path = join(prefix, 'conda-meta', dist + '.files')
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = all_files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Exemplo n.º 8
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import croot
    from conda_build.metadata import MetaData

    check_external()

    with Locked(croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding())
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                try:
                    build.build(m, verbose=not args.quiet)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found matching:'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1].replace(' ', '-')
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
Exemplo n.º 9
0
             retries=None):
    pp = dst_path + '.part'
    dst_dir = dirname(dst_path)
    session = session or CondaSession()

    if not config.ssl_verify:
        try:
            from requests.packages.urllib3.connectionpool import InsecureRequestWarning
        except ImportError:
            pass
        else:
            warnings.simplefilter('ignore', InsecureRequestWarning)

    if retries is None:
        retries = RETRIES
    with Locked(dst_dir):
        try:
            resp = session.get(url, stream=True, proxies=session.proxies)
            resp.raise_for_status()
        except requests.exceptions.HTTPError as e:
            if e.response.status_code == 407: # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt, retries=retries)
            msg = "HTTPError: %s: %s\n" % (e, url)
            log.debug(msg)
            raise RuntimeError(msg)

        except requests.exceptions.ConnectionError as e:
            # requests isn't so nice here. For whatever reason, https gives
Exemplo n.º 10
0
    def run(self):
        # Make sure the metadata has the conda attributes, even if the
        # distclass isn't CondaDistribution. We primarily do this to simplify
        # the code below.

        metadata = self.distribution.metadata

        for attr in CondaDistribution.conda_attrs:
            if not hasattr(metadata, attr):
                setattr(metadata, attr,
                    CondaDistribution.conda_attrs[attr])

        # The command line takes precedence
        if self.buildnum is not None:
            metadata.conda_buildnum = self.buildnum

        with Locked(config.croot):
            d = defaultdict(dict)
            # PyPI allows uppercase letters but conda does not, so we fix the
            # name here.
            d['package']['name'] = metadata.name.lower()
            d['package']['version'] = metadata.version
            d['build']['number'] = metadata.conda_buildnum

            # MetaData does the auto stuff if the build string is None
            d['build']['string'] = metadata.conda_buildstr

            d['build']['binary_relocation'] = metadata.conda_binary_relocation
            d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir
            d['build']['features'] = metadata.conda_features
            d['build']['track_features'] = metadata.conda_track_features

            # XXX: I'm not really sure if it is correct to combine requires
            # and install_requires
            d['requirements']['run'] = d['requirements']['build'] = \
                [spec_from_line(i) for i in
                    (metadata.requires or []) +
                    (getattr(self.distribution, 'install_requires', []) or
                        [])] + ['python']
            if hasattr(self.distribution, 'tests_require'):
                # A lot of packages use extras_require['test'], but
                # tests_require is the one that is officially supported by
                # setuptools.
                d['test']['requires'] = [spec_from_line(i) for i in
                    self.distribution.tests_require or []]

            d['about']['home'] = metadata.url
            # Don't worry about classifiers. This isn't skeleton pypi. We
            # don't need to make this work with random stuff in the wild. If
            # someone writes their setup.py wrong and this doesn't work, it's
            # their fault.
            d['about']['license'] = metadata.license
            d['about']['summary'] = metadata.description

            # This is similar logic from conda skeleton pypi
            entry_points = getattr(self.distribution, 'entry_points', [])
            if entry_points:
                if isinstance(entry_points, string_types):
                    # makes sure it is left-shifted
                    newstr = "\n".join(x.strip() for x in
                        entry_points.split('\n'))
                    c = configparser.ConfigParser()
                    entry_points = {}
                    try:
                        c.readfp(StringIO(newstr))
                    except Exception as err:
                        # This seems to be the best error here
                        raise DistutilsGetoptError("ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr)
                    else:
                        for section in config.sections():
                            if section in ['console_scripts', 'gui_scripts']:
                                value = ['%s=%s' % (option, config.get(section, option))
                                         for option in config.options(section)]
                                entry_points[section] = value
                            else:
                                # Make sure setuptools is added as a dependency below
                                entry_points[section] = None

                if not isinstance(entry_points, dict):
                    raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + entry_points)
                else:
                    cs = entry_points.get('console_scripts', [])
                    gs = entry_points.get('gui_scripts', [])
                    # We have *other* kinds of entry-points so we need
                    # setuptools at run-time
                    if not cs and not gs and len(entry_points) > 1:
                        d['requirements']['run'].append('setuptools')
                        d['requirements']['build'].append('setuptools')
                    entry_list = cs + gs
                    if gs and conda.config.platform == 'osx':
                        d['build']['osx_is_app'] = True
                    if len(cs + gs) != 0:
                        d['build']['entry_points'] = entry_list
                        if metadata.conda_command_tests is True:
                            d['test']['commands'] = list(map(unicode, pypi.make_entry_tests(entry_list)))

            if 'setuptools' in d['requirements']['run']:
                d['build']['preserve_egg_dir'] = True

            if metadata.conda_import_tests:
                if metadata.conda_import_tests is True:
                    d['test']['imports'] = ((self.distribution.packages or [])
                                            + (self.distribution.py_modules or []))
                else:
                    d['test']['imports'] = metadata.conda_import_tests

            if (metadata.conda_command_tests and not
                isinstance(metadata.conda_command_tests,
                bool)):
                d['test']['commands'] = list(map(unicode, metadata.conda_command_tests))

            d = dict(d)
            m = MetaData.fromdict(d)
            # Shouldn't fail, but do you really trust the code above?
            m.check_fields()
            build.build(m, post=False)
            # Do the install
            if not PY3:
                # Command is an old-style class in Python 2
                install.run(self)
            else:
                super().run()
            build.build(m, post=True)
            build.test(m)
            if self.binstar_upload:
                class args:
                    binstar_upload = self.binstar_upload
                handle_binstar_upload(build.bldpkg_path(m), args)
            else:
                no_upload_message = """\
# If you want to upload this package to binstar.org later, type:
#
# $ binstar upload %s
""" % build.bldpkg_path(m)
                print(no_upload_message)
Exemplo n.º 11
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix)
            and name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        print('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))

    try:
        has_prefix_files = set(yield_lines(join(info_dir, 'has_prefix')))
    except IOError:
        has_prefix_files = set()

    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            update_prefix(join(prefix, f), prefix)

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            # when the post-link step fails, we don't write any package
            # metadata and return here.  This way the package is not
            # considered installed.
            return

        create_meta(
            prefix, dist, info_dir, {
                'url': read_url(pkgs_dir, dist),
                'files': files,
                'link': {
                    'source': source_dir,
                    'type': link_name_map.get(linktype)
                },
            })
Exemplo n.º 12
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    all_versions = {
        'python': [26, 27, 33, 34],
        'numpy': [16, 17, 18, 19],
        'perl': None,
        'R': None,
    }
    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
    }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                                   (conda_version[lang],
                                    all_versions[lang][-1] / 10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                                   (conda_version[lang], version))

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                channel_urls = args.channel or ()
                try:
                    build.build(m,
                                verbose=not args.quiet,
                                post=post,
                                channel_urls=channel_urls,
                                override_channels=args.override_channels)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith(
                            'No packages found') or error_str.startswith(
                                'Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m,
                               verbose=not args.quiet,
                               channel_urls=channel_urls,
                               override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
Exemplo n.º 13
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile, join

    from conda.lock import Locked
    import conda.builder.build as build
    import conda.builder.source as source
    from conda.builder.config import croot
    from conda.builder.metadata import MetaData

    check_external()

    with Locked(croot):
        for arg in args.recipe:
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                # See if it's a spec and the directory is in conda-recipes
                recipe_dir = join(config.root_dir, 'conda-recipes', arg)
                if not isdir(recipe_dir):
                    # if --use-pypi and recipe_dir is a spec
                    # try to create the skeleton
                    if args.pypi:
                        from conda.from_pypi import create_recipe
                        try:
                            recipe_dir = create_recipe(arg)
                        except:
                            recipe_dir = abspath(arg)
                    if not isdir(recipe_dir):
                        sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, pypi=args.pypi)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                build.build(m, pypi=args.pypi)
                if not args.notest:
                    build.test(m, pypi=args.pypi)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
Exemplo n.º 14
0
def build_wheel(recipe,
                versions_combis={
                    "python": None,
                    "numpy": None
                },
                conda_channel_urls=(),
                conda_override_channels=(),
                upload=[],
                wheel_dir="./build"):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    from conda_build.config import config
    from conda_build.metadata import MetaData

    import conda_build_wheel.build_wheel as build

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
    }
    for lang in ['python', 'numpy']:
        versions = versions_combis[lang]
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                print("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                versions_combis[lang] = [str(ver)]
                build_wheel(recipe,
                            versions_combis,
                            conda_channel_urls=conda_channel_urls,
                            conda_override_channels=conda_override_channels,
                            upload=upload,
                            wheel_dir=wheel_dir)
                # This is necessary to make all combinations build.
                versions_combis[lang] = versions
            return
        else:
            version = versions[0]
            if lang in ('python', 'numpy'):
                version = int(version.replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                                   (conda_version[lang],
                                    all_versions[lang][-1] / 10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                                   (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    with Locked(config.croot):
        # Don't use byte literals for paths in Python 2
        if not PY3:
            recipe = recipe.decode(getpreferredencoding() or 'utf-8')
        if isfile(recipe):
            if recipe.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(recipe, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % recipe)
                return
        else:
            recipe_dir = abspath(recipe)
            need_cleanup = False

        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        try:
            m = MetaData(recipe_dir)
            if m.get_value('build/noarch_python'):
                config.noarch = True
        except exceptions.YamlParsingError as e:
            sys.stderr.write(e.error_msg())
            sys.exit(1)
        m.check_fields()

        if m.skip():
            print("Skipped: The %s recipe defines build/skip for this "
                  "configuration." % m.dist())
            return
        build.build(m,
                    channel_urls=conda_channel_urls,
                    override_channels=conda_override_channels,
                    wheel_dir=wheel_dir)

        if need_cleanup:
            shutil.rmtree(recipe_dir)
Exemplo n.º 15
0
def test(m, move_broken=True, activate=True, debug=False):
    '''
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    '''

    with Locked(cc.root_dir):

        # remove from package cache
        rm_pkgs_cache(m.dist())

        tmp_dir = join(config.croot, 'test-tmp_dir')
        rm_rf(tmp_dir)
        if on_win:
            time.sleep(
                1
            )  # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir
        os.makedirs(tmp_dir)
        create_files(tmp_dir, m)
        # Make Perl or Python-specific test files
        if m.name().startswith('perl-'):
            pl_files = create_pl_files(tmp_dir, m)
            py_files = False
            lua_files = False
        else:
            py_files = create_py_files(tmp_dir, m)
            pl_files = False
            lua_files = False
        shell_files = create_shell_files(tmp_dir, m)
        if not (py_files or shell_files or pl_files or lua_files):
            print("Nothing to test for:", m.dist())
            return

        print("TEST START:", m.dist())
        if on_win:
            if isdir(config.build_prefix):
                move_to_trash(config.build_prefix, '')
            if isdir(config.test_prefix):
                move_to_trash(config.test_prefix, '')
        else:
            rm_rf(config.build_prefix)
            rm_rf(config.test_prefix)

        get_build_metadata(m)
        specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())]

        # add packages listed in the run environment and test/requires
        specs.extend(ms.spec for ms in m.ms_depends('run'))
        specs += m.get_value('test/requires', [])

        if py_files:
            # as the tests are run by python, ensure that python is installed.
            # (If they already provided python as a run or test requirement,
            #  this won't hurt anything.)
            specs += ['python %s*' % environ.get_py_ver()]
        if pl_files:
            # as the tests are run by perl, we need to specify it
            specs += ['perl %s*' % environ.get_perl_ver()]
        if lua_files:
            # not sure how this shakes out
            specs += ['lua %s*' % environ.get_lua_ver()]

        create_env(config.test_prefix, specs, debug=debug)
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m, prefix=config.test_prefix))

        if not activate:
            # prepend bin (or Scripts) directory
            env = prepend_bin_path(env,
                                   config.test_prefix,
                                   prepend_prefix=True)

            if on_win:
                env['PATH'] = config.test_prefix + os.pathsep + env['PATH']

        for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA':
            env[varname] = str(getattr(config, varname) or '')
        env['PREFIX'] = config.test_prefix

        # Python 2 Windows requires that envs variables be string, not unicode
        env = {str(key): str(value) for key, value in env.items()}
        suffix = "bat" if on_win else "sh"
        test_script = join(tmp_dir,
                           "conda_test_runner.{suffix}".format(suffix=suffix))

        with open(test_script, 'w') as tf:
            if activate:
                source = "call " if on_win else "source "
                ext = ".bat" if on_win else ""
                tf.write("{source}activate{ext} {test_env}\n".format(
                    source=source, ext=ext, test_env=config.test_prefix))
                tf.write("if errorlevel 1 exit 1\n") if on_win else None

            if py_files:
                tf.write("{python} -s {test_file}\n".format(
                    python=config.test_python,
                    test_file=join(tmp_dir, 'run_test.py')))
                tf.write("if errorlevel 1 exit 1\n") if on_win else None

            if pl_files:
                tf.write("{perl} {test_file}\n".format(python=config.test_perl,
                                                       test_file=join(
                                                           tmp_dir,
                                                           'run_test.pl')))
                tf.write("if errorlevel 1 exit 1\n") if on_win else None

            if lua_files:
                tf.write("{lua} {test_file}\n".format(python=config.test_perl,
                                                      test_file=join(
                                                          tmp_dir,
                                                          'run_test.lua')))
                tf.write("if errorlevel 1 exit 1\n") if on_win else None

            if shell_files:
                test_file = join(tmp_dir, 'run_test.' + suffix)
                if on_win:
                    tf.write("call {test_file}\n".format(test_file=test_file))
                    tf.write("if errorlevel 1 exit 1\n")
                else:
                    # TODO: Run the test/commands here instead of in run_test.py
                    tf.write("{shell_path} -x -e {test_file}\n".format(
                        shell_path=shell_path, test_file=test_file))
        if on_win:
            cmd = [env["COMSPEC"], "/d", "/c", test_script]
        else:
            cmd = [shell_path, '-x', '-e', test_script]
        try:
            subprocess.check_call(cmd, env=env, cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m, move_broken=move_broken)

    print("TEST END:", m.dist())
Exemplo n.º 16
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix) and
              name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                          (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        meta_dict['files'] = files
        meta_dict['link'] = {'source': source_dir,
                             'type': link_name_map.get(linktype)}
        create_meta(prefix, dist, info_dir, meta_dict)
Exemplo n.º 17
0
def rm_fetched(pkgs_dir, dist):
    with Locked(pkgs_dir):
        path = join(pkgs_dir, dist + '.tar.bz2')
        rm_rf(path)
Exemplo n.º 18
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn(
                "Cannot delete trash; some c extension has been "
                "imported that is hard-linked by files in the trash. "
                "Will try again on next run.")

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
    }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ('python', 'numpy'):
                version = int(version.replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                                   (conda_version[lang],
                                    all_versions[lang][-1] / 10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                                   (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True)

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if m.skip():
                print("Skipped: The %s recipe defines build/skip for this "
                      "configuration." % m.dist())
                continue
            if args.output:
                try:
                    m.parse_again(permit_undefined_jinja=False)
                except SystemExit:
                    # Something went wrong; possibly due to undefined GIT_ jinja variables.
                    # Maybe we need to actually download the source in order to resolve the build_id.
                    source.provide(m.path, m.get_section('source'))

                    # Parse our metadata again because we did not initialize the source
                    # information before.
                    m.parse_again(permit_undefined_jinja=False)

                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, move_broken=False)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m,
                                post=post,
                                include_recipe=args.include_recipe)
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith(
                            'No packages found') or error_str.startswith(
                                'Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [
                            line for line in error_str.splitlines()
                            if line.strip().startswith('- ')
                        ]
                        pkgs = [line.lstrip('- ') for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ['python', 'r']
                        pkgs = [
                            pkg for pkg in pkgs
                            if pkg.split(' ')[0] not in skip_names
                        ]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if ' ' in pkg:
                                pkg = pkg.split(' ')[0]
                            recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(
                                        ("Missing dependency {0}, but found" +
                                         " recipe directory, so building " +
                                         "{0} first").format(pkg))
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m)

                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
Exemplo n.º 19
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
                    if on_win:
                        try:
                            move_path_to_trash(dst)
                        except ImportError:
                            # This shouldn't be an issue in the installer anyway
                            pass

            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        try:
            alt_files_path = join(prefix, 'conda-meta', dist + '.files')
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
Exemplo n.º 20
0
def fetch_pkg(info, dst_dir=None):
    '''
    fetch a package given by `info` and store it into `dst_dir`
    '''
    if dst_dir is None:
        dst_dir = config.pkgs_dirs[0]

    fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
    url = info['channel'] + fn
    log.debug("url=%r" % url)
    path = join(dst_dir, fn)
    pp = path + '.part'

    with Locked(dst_dir):
        for x in range(retries):
            try:
                fi = connectionhandled_urlopen(url)
            except IOError:
                log.debug("attempt %d failed at urlopen" % x)
                continue
            if fi is None:
                log.debug("could not fetch (urlopen returned None)")
                continue
            n = 0
            h = hashlib.new('md5')
            getLogger('fetch.start').info((fn, info['size']))
            need_retry = False
            try:
                fo = open(pp, 'wb')
            except IOError:
                raise RuntimeError(
                    "Could not open %r for writing.  "
                    "Permissions problem or missing directory?" % pp)
            while True:
                try:
                    chunk = fi.read(16384)
                except IOError:
                    need_retry = True
                    break
                if not chunk:
                    break
                try:
                    fo.write(chunk)
                except IOError:
                    raise RuntimeError("Failed to write to %r." % pp)
                h.update(chunk)
                n += len(chunk)
                getLogger('fetch.update').info(n)

            fo.close()
            if need_retry:
                continue

            fi.close()
            getLogger('fetch.stop').info(None)
            if h.hexdigest() != info['md5']:
                raise RuntimeError(
                    "MD5 sums mismatch for download: %s (%s != %s)" %
                    (fn, h.hexdigest(), info['md5']))
            try:
                os.rename(pp, path)
            except OSError:
                raise RuntimeError("Could not rename %r to %r." % (pp, path))
            try:
                with open(join(dst_dir, 'urls.txt'), 'a') as fa:
                    fa.write('%s\n' % url)
            except IOError:
                pass
            return

    raise RuntimeError("Could not locate '%s'" % url)