示例#1
0
def extract(dist):
    """
    Extract a package, i.e. make a package available for linkage. We assume
    that the compressed package is located in the packages directory.
    """
    rec = package_cache()[dist]
    url = rec['urls'][0]
    fname = rec['files'][0]
    assert url and fname
    pkgs_dir = dirname(fname)
    with Locked(pkgs_dir):
        path = fname[:-8]
        temp_path = path + '.tmp'
        rm_rf(temp_path)
        with tarfile.open(fname) as t:
            t.extractall(path=temp_path)
        rm_rf(path)
        exp_backoff_fn(os.rename, temp_path, path)
        if sys.platform.startswith('linux') and os.getuid() == 0:
            # When extracting as root, tarfile will by restore ownership
            # of extracted files.  However, we want root to be the owner
            # (our implementation of --no-same-owner).
            for root, dirs, files in os.walk(path):
                for fn in files:
                    p = join(root, fn)
                    os.lchown(p, 0, 0)
        add_cached_package(pkgs_dir, url, overwrite=True)
示例#2
0
def rm_fetched(dist):
    """
    Checks to see if the requested package is in the cache; and if so, it removes both
    the package itself and its extracted contents.
    """
    rec = package_cache().get(dist)
    if rec is None:
        return
    for fname in rec['files']:
        del fname_table_[fname]
        del fname_table_[url_path(fname)]
        with Locked(dirname(fname)):
            rm_rf(fname)
    for fname in rec['dirs']:
        with Locked(dirname(fname)):
            rm_rf(fname)
    del package_cache_[dist]
示例#3
0
def test_lock_passes(tmpdir):
    with Locked(tmpdir.strpath) as lock:
        path = os.path.basename(lock.lock_path)
        assert tmpdir.join(path).exists() and tmpdir.join(path).isdir()

    # lock should clean up after itself
    assert not tmpdir.join(path).exists()
    assert not tmpdir.exists()
示例#4
0
def test_lock_locks(tmpdir):
    with Locked(tmpdir.strpath) as lock1:
        path = os.path.basename(lock1.lock_path)
        assert tmpdir.join(path).exists() and tmpdir.join(path).isdir()

        with pytest.raises(LockError) as execinfo:
            with Locked(tmpdir.strpath, retries=1) as lock2:
                assert False  # this should never happen
            assert lock2.lock_path == lock1.lock_path
        assert "LOCKERROR" in str(execinfo)
        assert "conda is already doing something" in str(execinfo)

        assert tmpdir.join(path).exists() and tmpdir.join(path).isdir()

    # lock should clean up after itself
    assert not tmpdir.join(path).exists()
    assert not tmpdir.exists()
示例#5
0
def extract(pkgs_dir, dist):
    """
    Extract a package, i.e. make a package available for linkage.  We assume
    that the compressed packages is located in the packages directory.
    """
    with Locked(pkgs_dir):
        path = join(pkgs_dir, dist)
        t = tarfile.open(path + '.tar.bz2')
        t.extractall(path=path)
        t.close()
示例#6
0
def build(meta, test=True):
    """Build (and optionally test) a recipe directory."""
    with Locked(conda_build.config.croot):
        meta.check_fields()
        if os.path.exists(conda_build.config.config.info_dir):
            shutil.rmtree(conda_build.config.config.info_dir)
        build_module.build(meta, verbose=False, post=None)
        if test:
            build_module.test(meta, verbose=False)
        return meta
示例#7
0
def execute(args, parser):
    import conda_build.pypi as pypi
    import conda_build.cpan as cpan
    from conda.lock import Locked
    from conda_build.config import croot

    with Locked(croot):
        if args.repo == "pypi":
            pypi.main(args, parser)
        elif args.repo == "cpan":
            cpan.main(args, parser)
示例#8
0
文件: render.py 项目: ovz/conda-build
def render_recipe(recipe_path, no_download_source, verbose, dirty=False):
    if not isdir(config.croot):
        os.makedirs(config.croot)
    with Locked(config.croot):
        if not dirty:
            if sys.platform == 'win32':
                if isdir(source.WORK_DIR):
                    move_to_trash(source.WORK_DIR, '')
            else:
                rm_rf(source.WORK_DIR)

            assert not isdir(source.WORK_DIR), (
                "Failed to clean work directory.  Please close open"
                " programs/terminals/folders and try again.")

        arg = recipe_path
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                return
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        try:
            m = MetaData(recipe_dir)
        except exceptions.YamlParsingError as e:
            sys.stderr.write(e.error_msg())
            sys.exit(1)

        m, need_download, need_reparse_in_env = parse_or_try_download(
            m,
            no_download_source=no_download_source,
            verbose=verbose,
            dirty=dirty)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

    return m, need_download, need_reparse_in_env
示例#9
0
def unlink(prefix, dist):
    '''
    Remove a package from the specified environment, it is an error if the
    package does not exist in the prefix.
    '''
    if (on_win and abspath(prefix) == abspath(sys.prefix)
            and name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    with Locked(prefix):
        run_script(prefix, dist, 'pre-unlink')

        meta_path = join(prefix, 'conda-meta', dist + '.json')
        with open(meta_path) as fi:
            meta = json.load(fi)

        mk_menus(prefix, meta['files'], remove=True)
        dst_dirs1 = set()

        for f in meta['files']:
            dst = join(prefix, f)
            dst_dirs1.add(dirname(dst))
            try:
                os.unlink(dst)
            except OSError:  # file might not exist
                log.debug("could not remove file: '%s'" % dst)
                if on_win and os.path.exists(join(prefix, f)):
                    try:
                        log.debug("moving to trash")
                        move_to_trash(prefix, f)
                    except ImportError:
                        # This shouldn't be an issue in the installer anyway
                        pass

        # remove the meta-file last
        os.unlink(meta_path)

        dst_dirs2 = set()
        for path in dst_dirs1:
            while len(path) > len(prefix):
                dst_dirs2.add(path)
                path = dirname(path)
        # in case there is nothing left
        dst_dirs2.add(join(prefix, 'conda-meta'))
        dst_dirs2.add(prefix)

        for path in sorted(dst_dirs2, key=len, reverse=True):
            rm_empty_dir(path)
示例#10
0
def rm_extracted(dist):
    """
    Removes any extracted versions of the given package found in the cache.
    """
    rec = package_cache().get(dist)
    if rec is None:
        return
    for fname in rec['dirs']:
        with Locked(dirname(fname)):
            rm_rf(fname)
    if rec['files']:
        rec['dirs'] = []
    else:
        del package_cache_[dist]
示例#11
0
def execute(args, parser):
    import conda_build.pypi as pypi
    import conda_build.cpan as cpan
    import conda_build.cran as cran
    from conda.lock import Locked
    from conda_build.config import config

    if not args.repo:
        parser.print_help()
    with Locked(config.croot):
        if args.repo == "pypi":
            pypi.main(args, parser)
        elif args.repo == "cpan":
            cpan.main(args, parser)
        elif args.repo == 'cran':
            cran.main(args, parser)
示例#12
0
def unlink(prefix, dist):
    '''
    Remove a package from the specified environment, it is an error if the
    package does not exist in the prefix.
    '''
    with Locked(prefix):
        run_script(prefix, dist, 'pre-unlink')

        meta_path = join(prefix, 'conda-meta', dist + '.json')
        with open(meta_path) as fi:
            meta = json.load(fi)

        mk_menus(prefix, meta['files'], remove=True)
        dst_dirs1 = set()

        for f in meta['files']:
            dst = join(prefix, f)
            dst_dirs1.add(dirname(dst))
            try:
                os.unlink(dst)
            except OSError:  # file might not exist
                log.debug("could not remove file: '%s'" % dst)
                if on_win and os.path.exists(join(prefix, f)):
                    try:
                        log.debug("moving to trash")
                        move_path_to_trash(dst)
                    except ImportError:
                        # This shouldn't be an issue in the installer anyway
                        #   but it can potentially happen with importing conda.config
                        log.debug(
                            "cannot import conda.config; probably not an issue"
                        )

        # remove the meta-file last
        os.unlink(meta_path)

        dst_dirs2 = set()
        for path in dst_dirs1:
            while len(path) > len(prefix):
                dst_dirs2.add(path)
                path = dirname(path)
        # in case there is nothing left
        dst_dirs2.add(join(prefix, 'conda-meta'))
        dst_dirs2.add(prefix)

        for path in sorted(dst_dirs2, key=len, reverse=True):
            rm_empty_dir(path)
示例#13
0
def build(meta, test=True):
    """Build (and optionally test) a recipe directory."""
    with Locked(conda_build.config.croot):
        # Check whether verbose is a module-level variable in
        # this version of conda_build and set it properly if it is.
        if 'verbose' in dir(build_module):
            build_module.verbose = False
            kwd = {}
        else:
            kwd = {'verbose': False}
        meta.check_fields()
        if os.path.exists(conda_build.config.config.info_dir):
            shutil.rmtree(conda_build.config.config.info_dir)
        build_module.build(meta, post=None, **kwd)
        if test:
            build_module.test(meta, **kwd)
        return meta
示例#14
0
def extract(pkgs_dir, dist):
    """
    Extract a package, i.e. make a package available for linkage.  We assume
    that the compressed packages is located in the packages directory.
    """
    with Locked(pkgs_dir):
        path = join(pkgs_dir, dist)
        t = tarfile.open(path + '.tar.bz2')
        t.extractall(path=path)
        t.close()
        if sys.platform.startswith('linux') and os.getuid() == 0:
            # When extracting as root, tarfile will by restore ownership
            # of extracted files.  However, we want root to be the owner
            # (our implementation of --no-same-owner).
            for root, dirs, files in os.walk(path):
                for fn in files:
                    p = join(root, fn)
                    os.lchown(p, 0, 0)
示例#15
0
def execute(args, parser):
    import conda_build.pypi as pypi
    import conda_build.cpan as cpan
    import conda_build.cran as cran
    import conda_build.luarocks as luarocks
    from conda.lock import Locked
    from conda_build.config import config

    if not args.repo:
        parser.print_help()
    if not os.path.isdir(config.croot):
        os.makedirs(config.croot)
    with Locked(config.croot):
        if args.repo == "pypi":
            pypi.main(args, parser)
        elif args.repo == "cpan":
            cpan.main(args, parser)
        elif args.repo == 'cran':
            cran.main(args, parser)
        elif args.repo == 'luarocks':
            luarocks.main(args, parser)
示例#16
0
def render_recipe(recipe_path, no_download_source, verbose, dirty=False):
    with Locked(config.croot):
        arg = recipe_path
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                return
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        try:
            m = MetaData(recipe_dir)
        except exceptions.YamlParsingError as e:
            sys.stderr.write(e.error_msg())
            sys.exit(1)

        m = parse_or_try_download(m,
                                  no_download_source=no_download_source,
                                  verbose=verbose,
                                  dirty=dirty)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

    return m
示例#17
0
def rm_fetched(pkgs_dir, dist):
    with Locked(pkgs_dir):
        path = join(pkgs_dir, dist + '.tar.bz2')
        rm_rf(path)
示例#18
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix)
            and name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        print('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))

    try:
        has_prefix_files = set(yield_lines(join(info_dir, 'has_prefix')))
    except IOError:
        has_prefix_files = set()

    if linktype == LINK_SOFT:
        try:
            no_softlink = set(yield_lines(join(info_dir, 'no_softlink')))
        except IOError:
            no_softlink = set()

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if (f in has_prefix_files
                    or (linktype == LINK_SOFT and f in no_softlink)
                    or islink(src)):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            update_prefix(join(prefix, f), prefix)

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            # when the post-link step fails, we don't write any package
            # metadata and return here.  This way the package is not
            # considered installed.
            return

        create_meta(
            prefix, dist, info_dir, {
                'url': read_url(pkgs_dir, dist),
                'files': files,
                'link': {
                    'source': source_dir,
                    'type': link_name_map.get(linktype)
                },
            })
示例#19
0
    def run(self):
        # Make sure the metadata has the conda attributes, even if the
        # distclass isn't CondaDistribution. We primarily do this to simplify
        # the code below.

        metadata = self.distribution.metadata

        for attr in CondaDistribution.conda_attrs:
            if not hasattr(metadata, attr):
                setattr(metadata, attr, CondaDistribution.conda_attrs[attr])

        # The command line takes precedence
        if self.buildnum is not None:
            metadata.conda_buildnum = self.buildnum

        with Locked(config.croot):
            d = defaultdict(dict)
            # Needs to be lowercase
            d['package']['name'] = metadata.name
            d['package']['version'] = metadata.version
            d['build']['number'] = metadata.conda_buildnum

            # MetaData does the auto stuff if the build string is None
            d['build']['string'] = metadata.conda_buildstr

            d['build']['binary_relocation'] = metadata.conda_binary_relocation
            d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir
            d['build']['features'] = metadata.conda_features
            d['build']['track_features'] = metadata.conda_track_features

            # XXX: I'm not really sure if it is correct to combine requires
            # and install_requires
            d['requirements']['run'] = d['requirements']['build'] = \
                [spec_from_line(i) for i in
                    (metadata.requires or []) +
                    (getattr(self.distribution, 'install_requires', []) or
                        [])] + ['python']
            if hasattr(self.distribution, 'tests_require'):
                # A lot of packages use extras_require['test'], but
                # tests_require is the one that is officially supported by
                # setuptools.
                d['test']['requires'] = [
                    spec_from_line(i)
                    for i in self.distribution.tests_require or []
                ]

            d['about']['home'] = metadata.url
            # Don't worry about classifiers. This isn't skeleton pypi. We
            # don't need to make this work with random stuff in the wild. If
            # someone writes their setup.py wrong and this doesn't work, it's
            # their fault.
            d['about']['license'] = metadata.license
            d['about']['summary'] = self.distribution.description

            # This is similar logic from conda skeleton pypi
            entry_points = getattr(self.distribution, 'entry_points', [])
            if entry_points:
                if isinstance(entry_points, string_types):
                    # makes sure it is left-shifted
                    newstr = "\n".join(x.strip()
                                       for x in entry_points.split('\n'))
                    c = configparser.ConfigParser()
                    entry_points = {}
                    try:
                        c.readfp(StringIO(newstr))
                    except Exception as err:
                        # This seems to be the best error here
                        raise DistutilsGetoptError(
                            "ERROR: entry-points not understood: " + str(err) +
                            "\nThe string was" + newstr)
                    else:
                        for section in config.sections():
                            if section in ['console_scripts', 'gui_scripts']:
                                value = [
                                    '%s=%s' %
                                    (option, config.get(section, option))
                                    for option in config.options(section)
                                ]
                                entry_points[section] = value
                            else:
                                # Make sure setuptools is added as a dependency below
                                entry_points[section] = None

                if not isinstance(entry_points, dict):
                    raise DistutilsGetoptError(
                        "ERROR: Could not add entry points. They were:\n" +
                        entry_points)
                else:
                    cs = entry_points.get('console_scripts', [])
                    gs = entry_points.get('gui_scripts', [])
                    # We have *other* kinds of entry-points so we need
                    # setuptools at run-time
                    if not cs and not gs and len(entry_points) > 1:
                        d['requirements']['run'].append('setuptools')
                        d['requirements']['build'].append('setuptools')
                    entry_list = cs + gs
                    if gs and conda.config.platform == 'osx':
                        d['build']['osx_is_app'] = True
                    if len(cs + gs) != 0:
                        d['build']['entry_points'] = entry_list
                        if metadata.conda_command_tests is True:
                            d['test']['commands'] = list(
                                map(unicode,
                                    pypi.make_entry_tests(entry_list)))

            if 'setuptools' in d['requirements']['run']:
                d['build']['preserve_egg_dir'] = True

            if metadata.conda_import_tests:
                if metadata.conda_import_tests is True:
                    d['test']['imports'] = (
                        (self.distribution.packages or []) +
                        (self.distribution.py_modules or []))
                else:
                    d['test']['imports'] = metadata.conda_import_tests

            if (metadata.conda_command_tests
                    and not isinstance(metadata.conda_command_tests, bool)):
                d['test']['commands'] = list(
                    map(unicode, metadata.conda_command_tests))

            d = dict(d)
            m = MetaData.fromdict(d)
            # Shouldn't fail, but do you really trust the code above?
            m.check_fields()
            build.build(m, post=False)
            # Do the install
            if not PY3:
                # Command is an old-style class in Python 2
                install.run(self)
            else:
                super().run()
            build.build(m, post=True)
            build.test(m)
            if self.binstar_upload:

                class args:
                    binstar_upload = self.binstar_upload

                handle_binstar_upload(build.bldpkg_path(m), args)
            else:
                no_upload_message = """\
# If you want to upload this package to binstar.org later, type:
#
# $ binstar upload %s
""" % build.bldpkg_path(m)
                print(no_upload_message)
示例#20
0
def link(prefix, dist, linktype=LINK_HARD, index=None, shortcuts=False):
    """
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    """
    index = index or {}
    source_dir = is_extracted(dist)
    assert source_dir is not None
    pkgs_dir = dirname(source_dir)
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))

    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
                    if on_win:
                        try:
                            move_path_to_trash(dst)
                        except ImportError:
                            # This shouldn't be an issue in the installer anyway
                            pass

            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                sys.exit('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                         (src, dst, lt, e))

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        # make sure that the child environment behaves like the parent,
        #    wrt user/system install on win
        # This is critical for doing shortcuts correctly
        if on_win:
            nonadmin = join(sys.prefix, ".nonadmin")
            if isfile(nonadmin):
                open(join(prefix, ".nonadmin"), 'w').close()

        if shortcuts:
            mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(dist)
        try:
            alt_files_path = join(prefix, 'conda-meta',
                                  dist2filename(dist, '.files'))
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
示例#21
0
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))
    if (on_win and abspath(prefix) == abspath(sys.prefix)
            and name_dist(dist) in win_ignore_root):
        # on Windows we have the file lock problem, so don't allow
        # linking or unlinking some packages
        log.warn('Ignored: %s' % dist)
        return

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error(
                    'failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                    (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        meta_dict['files'] = files
        meta_dict['link'] = {
            'source': source_dir,
            'type': link_name_map.get(linktype)
        }
        create_meta(prefix, dist, info_dir, meta_dict)
示例#22
0
def test(m, move_broken=True, activate=True):
    '''
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    '''

    with Locked(cc.root_dir):

        # remove from package cache
        rm_pkgs_cache(m.dist())

        tmp_dir = join(config.croot, 'test-tmp_dir')
        rm_rf(tmp_dir)
        if on_win:
            time.sleep(
                1
            )  # wait for rm_rf(tmp_dir) to finish before recreating tmp_dir
        os.makedirs(tmp_dir)
        create_files(tmp_dir, m)
        # Make Perl or Python-specific test files
        if m.name().startswith('perl-'):
            pl_files = create_pl_files(tmp_dir, m)
            py_files = False
            lua_files = False
        else:
            py_files = create_py_files(tmp_dir, m)
            pl_files = False
            lua_files = False
        shell_files = create_shell_files(tmp_dir, m)
        if not (py_files or shell_files or pl_files or lua_files):
            print("Nothing to test for:", m.dist())
            return

        print("TEST START:", m.dist())
        if on_win:
            if isdir(config.build_prefix):
                move_to_trash(config.build_prefix, '')
            if isdir(config.test_prefix):
                move_to_trash(config.test_prefix, '')
        else:
            rm_rf(config.build_prefix)
            rm_rf(config.test_prefix)

        get_build_metadata(m)
        specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())]

        # add packages listed in the run environment and test/requires
        specs.extend(ms.spec for ms in m.ms_depends('run'))
        specs += m.get_value('test/requires', [])

        if py_files:
            # as the tests are run by python, ensure that python is installed.
            # (If they already provided python as a run or test requirement,
            #  this won't hurt anything.)
            specs += ['python %s*' % environ.get_py_ver()]
        if pl_files:
            # as the tests are run by perl, we need to specify it
            specs += ['perl %s*' % environ.get_perl_ver()]
        if lua_files:
            # not sure how this shakes out
            specs += ['lua %s*' % environ.get_lua_ver()]

        create_env(config.test_prefix, specs)

        env = dict(os.environ)
        env.update(environ.get_dict(m, prefix=config.test_prefix))

        if not activate:
            # prepend bin (or Scripts) directory
            env = prepend_bin_path(env,
                                   config.test_prefix,
                                   prepend_prefix=True)

            if on_win:
                env['PATH'] = config.test_prefix + os.pathsep + env['PATH']

        for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL', 'CONDA_LUA':
            env[varname] = str(getattr(config, varname) or '')
        env['PREFIX'] = config.test_prefix

        # Python 2 Windows requires that envs variables be string, not unicode
        env = {str(key): str(value) for key, value in env.items()}
        suffix = "bat" if on_win else "sh"
        test_script = join(tmp_dir,
                           "conda_test_runner.{suffix}".format(suffix=suffix))

        with open(test_script, 'w') as tf:
            if activate:
                tf.write("{source}activate _test\n".format(
                    source="" if on_win else "source "))
            if py_files:
                tf.write("{python} -s {test_file}\n".format(
                    python=config.test_python,
                    test_file=join(tmp_dir, 'run_test.py')))

            if pl_files:
                tf.write("{perl} {test_file}\n".format(python=config.test_perl,
                                                       test_file=join(
                                                           tmp_dir,
                                                           'run_test.pl')))

            if lua_files:
                tf.write("{lua} {test_file}\n".format(python=config.test_perl,
                                                      test_file=join(
                                                          tmp_dir,
                                                          'run_test.lua')))

            if shell_files:
                test_file = join(tmp_dir, 'run_test.' + suffix)
                if on_win:
                    tf.write("call {test_file}\n".format(test_file=test_file))
                else:
                    # TODO: Run the test/commands here instead of in run_test.py
                    tf.write("{shell_path} -x -e {test_file}\n".format(
                        shell_path=shell_path, test_file=test_file))
        if on_win:
            cmd = [env["COMSPEC"], "/d", "/c", test_script]
        else:
            cmd = [shell_path, '-x', '-e', test_script]
        try:
            subprocess.check_call(cmd, env=env, cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m, move_broken=move_broken)

    print("TEST END:", m.dist())
示例#23
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    all_versions = {
        'python': [26, 27, 33, 34],
        'numpy': [16, 17, 18, 19],
        'perl': None,
        'R': None,
        }
    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    if args.skip_existing:
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True,
            channel_urls=channel_urls,
            override_channels=args.override_channels)

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet,
                    channel_urls=channel_urls, override_channels=args.override_channels)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
示例#24
0
文件: install.py 项目: vmuriart/conda
def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None):
    '''
    Set up a package in a specified (environment) prefix.  We assume that
    the package has been extracted (using extract() above).
    '''
    index = index or {}
    log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
              (pkgs_dir, prefix, dist, linktype))

    source_dir = join(pkgs_dir, dist)
    if not run_script(source_dir, dist, 'pre-link', prefix):
        sys.exit('Error: pre-link failed: %s' % dist)

    info_dir = join(source_dir, 'info')
    files = list(yield_lines(join(info_dir, 'files')))
    has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
    no_link = read_no_link(info_dir)

    with Locked(prefix), Locked(pkgs_dir):
        for f in files:
            src = join(source_dir, f)
            dst = join(prefix, f)
            dst_dir = dirname(dst)
            if not isdir(dst_dir):
                os.makedirs(dst_dir)
            if os.path.exists(dst):
                log.warn("file already exists: %r" % dst)
                try:
                    os.unlink(dst)
                except OSError:
                    log.error('failed to unlink: %r' % dst)
                    if on_win:
                        try:
                            move_path_to_trash(dst)
                        except ImportError:
                            # This shouldn't be an issue in the installer anyway
                            pass

            lt = linktype
            if f in has_prefix_files or f in no_link or islink(src):
                lt = LINK_COPY
            try:
                _link(src, dst, lt)
            except OSError as e:
                log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
                          (src, dst, lt, e))

        if name_dist(dist) == '_cache':
            return

        for f in sorted(has_prefix_files):
            placeholder, mode = has_prefix_files[f]
            try:
                update_prefix(join(prefix, f), prefix, placeholder, mode)
            except PaddingError:
                sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
                         (placeholder, dist))

        mk_menus(prefix, files, remove=False)

        if not run_script(prefix, dist, 'post-link'):
            sys.exit("Error: post-link failed for: %s" % dist)

        # Make sure the script stays standalone for the installer
        try:
            from conda.config import remove_binstar_tokens
        except ImportError:
            # There won't be any binstar tokens in the installer anyway
            def remove_binstar_tokens(url):
                return url

        meta_dict = index.get(dist + '.tar.bz2', {})
        meta_dict['url'] = read_url(pkgs_dir, dist)
        if meta_dict['url']:
            meta_dict['url'] = remove_binstar_tokens(meta_dict['url'])
        try:
            alt_files_path = join(prefix, 'conda-meta', dist + '.files')
            meta_dict['files'] = list(yield_lines(alt_files_path))
            os.unlink(alt_files_path)
        except IOError:
            meta_dict['files'] = files
        meta_dict['link'] = {'source': source_dir,
                             'type': link_name_map.get(linktype)}
        if 'channel' in meta_dict:
            meta_dict['channel'] = remove_binstar_tokens(meta_dict['channel'])
        if 'icon' in meta_dict:
            meta_dict['icondata'] = read_icondata(source_dir)

        create_meta(prefix, dist, info_dir, meta_dict)
示例#25
0
def rm_extracted(pkgs_dir, dist):
    with Locked(pkgs_dir):
        path = join(pkgs_dir, dist)
        rm_rf(path)
示例#26
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    if args.python:
        if args.python == ['all']:
            for py in [26, 27, 33, 34]:
                args.python = [str(py)]
                execute(args, parser)
            return
        if len(args.python) > 1:
            for py in args.python[:]:
                args.python = [py]
                execute(args, parser)
        else:
            config.CONDA_PY = int(args.python[0].replace('.', ''))
    if args.perl:
        config.CONDA_PERL = args.perl
    if args.numpy:
        if args.numpy == ['all']:
            for npy in [16, 17, 18, 19]:
                args.numpy = [str(npy)]
                execute(args, parser)
            return
        if len(args.numpy) > 1:
            for npy in args.numpy[:]:
                args.numpy = [npy]
                execute(args, parser)
        else:
            config.CONDA_NPY = int(args.numpy[0].replace('.', ''))

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding())
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
示例#27
0
文件: fetch.py 项目: moonbot/conda
def download(url, dst_path, session=None, md5=None, urlstxt=False,
             retries=None):
    pp = dst_path + '.part'
    dst_dir = dirname(dst_path)
    session = session or CondaSession()

    if not config.ssl_verify:
        try:
            from requests.packages.urllib3.connectionpool import InsecureRequestWarning
        except ImportError:
            pass
        else:
            warnings.simplefilter('ignore', InsecureRequestWarning)

    if retries is None:
        retries = RETRIES
    with Locked(dst_dir):
        try:
            resp = session.get(url, stream=True, proxies=session.proxies,
                               verify=config.ssl_verify)
            resp.raise_for_status()
        except requests.exceptions.HTTPError as e:
            if e.response.status_code == 407: # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt, retries=retries)
            msg = "HTTPError: %s: %s\n" % (e, url)
            log.debug(msg)
            raise RuntimeError(msg)

        except requests.exceptions.ConnectionError as e:
            # requests isn't so nice here. For whatever reason, https gives
            # this error and http gives the above error. Also, there is no
            # status_code attribute here.  We have to just check if it looks
            # like 407.
            # See: https://github.com/kennethreitz/requests/issues/2061.
            if "407" in str(e): # Proxy Authentication Required
                handle_proxy_407(url, session)
                # try again
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt, retries=retries)
            msg = "Connection error: %s: %s\n" % (e, url)
            stderrlog.info('Could not connect to %s\n' % url)
            log.debug(msg)
            raise RuntimeError(msg)

        except IOError as e:
            raise RuntimeError("Could not open '%s': %s" % (url, e))

        size = resp.headers.get('Content-Length')
        if size:
            size = int(size)
            fn = basename(dst_path)
            getLogger('fetch.start').info((fn[:14], size))

        n = 0
        if md5:
            h = hashlib.new('md5')
        try:
            with open(pp, 'wb') as fo:
                for chunk in resp.iter_content(2**14):
                    try:
                        fo.write(chunk)
                    except IOError:
                        raise RuntimeError("Failed to write to %r." % pp)
                    if md5:
                        h.update(chunk)
                    # update n with actual bytes read
                    n = resp.raw.tell()
                    if size and 0 <= n <= size:
                        getLogger('fetch.update').info(n)
        except IOError as e:
            if e.errno == 104 and retries: # Connection reset by pee
                # try again
                log.debug("%s, trying again" % e)
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt, retries=retries - 1)
            raise RuntimeError("Could not open %r for writing (%s)." % (pp, e))

        if size:
            getLogger('fetch.stop').info(None)

        if md5 and h.hexdigest() != md5:
            if retries:
                # try again
                log.debug("MD5 sums mismatch for download: %s (%s != %s), "
                          "trying again" % (url, h.hexdigest(), md5))
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt, retries=retries - 1)
            raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
                               % (url, h.hexdigest(), md5))

        try:
            os.rename(pp, dst_path)
        except OSError as e:
            raise RuntimeError("Could not rename %r to %r: %r" %
                               (pp, dst_path, e))

        if urlstxt:
            try:
                with open(join(dst_dir, 'urls.txt'), 'a') as fa:
                    fa.write('%s\n' % url)
            except IOError:
                pass
示例#28
0
def download(url, dst_path, session=None, md5=None, urlstxt=False):
    pp = dst_path + '.part'
    dst_dir = os.path.split(dst_path)[0]
    session = session or CondaSession()

    with Locked(dst_dir):
        try:
            resp = session.get(url, stream=True, proxies=session.proxies,
                               verify=config.ssl_verify)
            resp.raise_for_status()
        except requests.exceptions.HTTPError as e:
            if e.response.status_code == 407: # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url, dst_path, session=session, md5=md5,
                                urlstxt=urlstxt)
            msg = "HTTPError: %s: %s\n" % (e, url)
            log.debug(msg)
            raise RuntimeError(msg)

        except requests.exceptions.ConnectionError as e:
            # requests isn't so nice here. For whatever reason, https gives this
            # error and http gives the above error. Also, there is no status_code
            # attribute here. We have to just check if it looks like 407.  See
            # https://github.com/kennethreitz/requests/issues/2061.
            if "407" in str(e): # Proxy Authentication Required
                handle_proxy_407(url, session)
                # Try again
                return download(url, dst_path, session=session, md5=md5,
                    urlstxt=urlstxt)
            msg = "Connection error: %s: %s\n" % (e, url)
            stderrlog.info('Could not connect to %s\n' % url)
            log.debug(msg)
            raise RuntimeError(msg)

        except IOError as e:
            raise RuntimeError("Could not open '%s': %s" % (url, e))

        size = resp.headers.get('Content-Length')
        if size:
            size = int(size)
            fn = basename(dst_path)
            getLogger('fetch.start').info((fn[:14], size))

        n = 0
        if md5:
            h = hashlib.new('md5')
        try:
            with open(pp, 'wb') as fo:
                for chunk in resp.iter_content(2**14):
                    try:
                        fo.write(chunk)
                    except IOError:
                        raise RuntimeError("Failed to write to %r." % pp)
                    if md5:
                        h.update(chunk)
                    n += len(chunk)
                    if size:
                        getLogger('fetch.update').info(n)
        except IOError as e:
            raise RuntimeError("Could not open %r for writing (%s).  "
                "Permissions problem or missing directory?" % (pp, e))

        if size:
            getLogger('fetch.stop').info(None)

        if md5 and h.hexdigest() != md5:
            raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)"
                               % (url, h.hexdigest(), md5))

        try:
            os.rename(pp, dst_path)
        except OSError as e:
            raise RuntimeError("Could not rename %r to %r: %r" % (pp,
                dst_path, e))

        if urlstxt:
            try:
                with open(join(dst_dir, 'urls.txt'), 'a') as fa:
                    fa.write('%s\n' % url)
            except IOError:
                pass
示例#29
0
def fetch_pkg(info, dst_dir=None):
    '''
    fetch a package given by `info` and store it into `dst_dir`
    '''
    if dst_dir is None:
        dst_dir = config.pkgs_dirs[0]

    fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info
    url = info['channel'] + fn
    log.debug("url=%r" % url)
    path = join(dst_dir, fn)
    pp = path + '.part'

    with Locked(dst_dir):
        for x in range(retries):
            try:
                fi = connectionhandled_urlopen(url)
            except IOError:
                log.debug("attempt %d failed at urlopen" % x)
                continue
            if fi is None:
                log.debug("could not fetch (urlopen returned None)")
                continue
            n = 0
            h = hashlib.new('md5')
            getLogger('fetch.start').info((fn, info['size']))
            need_retry = False
            try:
                fo = open(pp, 'wb')
            except IOError:
                raise RuntimeError(
                    "Could not open %r for writing.  "
                    "Permissions problem or missing directory?" % pp)
            while True:
                try:
                    chunk = fi.read(16384)
                except IOError:
                    need_retry = True
                    break
                if not chunk:
                    break
                try:
                    fo.write(chunk)
                except IOError:
                    raise RuntimeError("Failed to write to %r." % pp)
                h.update(chunk)
                n += len(chunk)
                getLogger('fetch.update').info(n)

            fo.close()
            if need_retry:
                continue

            fi.close()
            getLogger('fetch.stop').info(None)
            if h.hexdigest() != info['md5']:
                raise RuntimeError(
                    "MD5 sums mismatch for download: %s (%s != %s)" %
                    (fn, h.hexdigest(), info['md5']))
            try:
                os.rename(pp, path)
            except OSError:
                raise RuntimeError("Could not rename %r to %r." % (pp, path))
            return

    raise RuntimeError("Could not locate '%s'" % url)
示例#30
0
def build(m,
          post=None,
          include_recipe=True,
          keep_old_work=False,
          need_source_download=True,
          verbose=True,
          dirty=False,
          activate=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()) and not on_win:
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        return

    with Locked(cc.root_dir):

        # If --keep-old-work, then move the contents of source.WORK_DIR to a
        # temporary directory for the duration of the build.
        # The source unpacking procedure is too varied and complex
        # to allow this to be written cleanly (see source.get_dir() for example)
        if keep_old_work:
            old_WORK_DIR = tempfile.mkdtemp()
            old_sub_dirs = [
                name for name in os.listdir(source.WORK_DIR)
                if os.path.isdir(os.path.join(source.WORK_DIR, name))
            ]
            if len(old_sub_dirs):
                print("Keeping old work directory backup: %s => %s" %
                      (old_sub_dirs, old_WORK_DIR))
                for old_sub in old_sub_dirs:
                    shutil.move(os.path.join(source.WORK_DIR, old_sub),
                                old_WORK_DIR)

        if post in [False, None]:
            print("Removing old build environment")
            print("BUILD START:", m.dist())
            if on_win:
                if isdir(config.short_build_prefix):
                    move_to_trash(config.short_build_prefix, '')
                if isdir(config.long_build_prefix):
                    move_to_trash(config.long_build_prefix, '')
            else:
                rm_rf(config.short_build_prefix)
                rm_rf(config.long_build_prefix)

            # Display the name only
            # Version number could be missing due to dependency on source info.
            create_env(config.build_prefix,
                       [ms.spec for ms in m.ms_depends('build')])

            if need_source_download:
                # Execute any commands fetching the source (e.g., git) in the _build environment.
                # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
                # dependencies.
                m, need_source_download = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    verbose=verbose,
                    dirty=dirty)
                assert not need_source_download, "Source download failed.  Please investigate."

            if m.name() in [
                    i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
            ]:
                print("%s is installed as a build dependency. Removing." %
                      m.name())
                index = get_build_index(clear_cache=False)
                actions = plan.remove_actions(config.build_prefix, [m.name()],
                                              index=index)
                assert not plan.nothing_to_do(actions), actions
                plan.display_actions(actions, index)
                plan.execute_actions(actions, index)

            print("Package:", m.dist())

            assert isdir(source.WORK_DIR)
            src_dir = source.get_dir()
            contents = os.listdir(src_dir)
            if contents:
                print("source tree in:", src_dir)
            else:
                print("no source")

            rm_rf(config.info_dir)
            files1 = prefix_files()
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    sys.exit(
                        "Error: Glob %s from always_include_files does not match any files"
                        % pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if on_win:
                build_file = join(m.path, 'bld.bat')
                if script:
                    build_file = join(source.get_dir(), 'bld.bat')
                    with open(join(source.get_dir(), 'bld.bat'), 'w') as bf:
                        bf.write(script)
                import conda_build.windows as windows
                windows.build(m, build_file, dirty=dirty, activate=activate)
            else:
                build_file = join(m.path, 'build.sh')

                # There is no sense in trying to run an empty build script.
                if isfile(build_file) or script:
                    env = environ.get_dict(m, dirty=dirty)
                    work_file = join(source.get_dir(), 'conda_build.sh')
                    if script:
                        with open(work_file, 'w') as bf:
                            bf.write(script)
                    if activate:
                        if isfile(build_file):
                            data = open(build_file).read()
                        else:
                            data = open(work_file).read()
                        with open(work_file, 'w') as bf:
                            bf.write("source activate {build_prefix}\n".format(
                                build_prefix=config.build_prefix))
                            bf.write(data)
                    else:
                        if not isfile(work_file):
                            shutil.copy(build_file, work_file)
                    os.chmod(work_file, 0o766)

                    if isfile(work_file):
                        cmd = [shell_path, '-x', '-e', work_file]

                        _check_call(cmd, env=env, cwd=src_dir)

        if post in [True, None]:
            if post:
                with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                    files1 = set(f.read().splitlines())

            get_build_metadata(m)
            create_post_scripts(m)
            create_entry_points(m.get_value('build/entry_points'))
            assert not exists(config.info_dir)
            files2 = prefix_files()

            post_process(sorted(files2 - files1),
                         preserve_egg_dir=bool(
                             m.get_value('build/preserve_egg_dir')))

            # The post processing may have deleted some files (like easy-install.pth)
            files2 = prefix_files()
            if any(config.meta_dir in join(config.build_prefix, f)
                   for f in files2 - files1):
                sys.exit(
                    indent(
                        """Error: Untracked file(s) %s found in conda-meta directory.
    This error usually comes from using conda in the build script.  Avoid doing this, as it
    can lead to packages that include their dependencies.""" %
                        (tuple(f for f in files2 - files1 if config.meta_dir in
                               join(config.build_prefix, f)), )))
            post_build(m, sorted(files2 - files1))
            create_info_files(m,
                              sorted(files2 - files1),
                              include_recipe=bool(m.path) and include_recipe)
            if m.get_value('build/noarch_python'):
                import conda_build.noarch_python as noarch_python
                noarch_python.transform(m, sorted(files2 - files1))

            files3 = prefix_files()
            fix_permissions(files3 - files1)

            path = bldpkg_path(m)
            t = tarfile.open(path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            print("BUILD END:", m.dist())

            # we're done building, perform some checks
            tarcheck.check_all(path)
            update_index(config.bldpkgs_dir)
        else:
            print("STOPPING BUILD BEFORE POST:", m.dist())

        if keep_old_work and len(old_sub_dirs):
            print("Restoring old work directory backup: %s :: %s => %s" %
                  (old_WORK_DIR, old_sub_dirs, source.WORK_DIR))
            for old_sub in old_sub_dirs:
                if os.path.exists(os.path.join(source.WORK_DIR, old_sub)):
                    print(
                        "Not restoring old source directory %s over new build's version"
                        % (old_sub))
                else:
                    shutil.move(os.path.join(old_WORK_DIR, old_sub),
                                source.WORK_DIR)
            shutil.rmtree(old_WORK_DIR, ignore_errors=True)