示例#1
0
文件: share.py 项目: rhs2132/conda
def clone_bundle(path, prefix):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix`.
    The directory `path` is located in should be some temp directory or
    some other directory OUTSITE /opt/anaconda (this function handles
    copying the of the file if necessary for you).  After calling this
    funtion, the original file (at `path`) may be removed.
    """
    assert not abspath(path).startswith(abspath(config.root_dir))
    assert not isdir(prefix)
    fn = basename(path)
    assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn
    dist = fn[:-8]

    if not install.is_extracted(config.pkgs_dir, dist):
        shutil.copyfile(path, join(config.pkgs_dir, dist + '.tar.bz2'))
        plan.execute_plan(['%s %s' % (plan.EXTRACT, dist)])
    assert install.is_extracted(config.pkgs_dir, dist)

    with open(join(config.pkgs_dir, dist, 'info', 'index.json')) as fi:
        meta = json.load(fi)

    # for backwards compatibility, use "requires" when "depends" is not there
    dists = ['-'.join(r.split())
             for r in meta.get('depends', meta.get('requires'))
             if not r.startswith('conda ')]
    dists.append(dist)

    actions = plan.ensure_linked_actions(dists, prefix)
    index = get_index()
    plan.display_actions(actions, index)
    plan.execute_actions(actions, index, verbose=True)

    os.unlink(join(prefix, 'conda-meta', dist + '.json'))
示例#2
0
def clone_bundle(path, prefix=None, bundle_name=None):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))

    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=True)

    bundle_dir = abspath(
        expanduser('~/bundles/%s' % (bundle_name or meta.get('bundle_name'))))
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(bundle_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)

    t.close()
示例#3
0
def old_clone_bundle(path, prefix):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix`.
    The directory `path` is located in should be some temp directory or
    some other directory OUTSITE /opt/anaconda (this function handles
    copying the of the file if necessary for you).  After calling this
    funtion, the original file (at `path`) may be removed.
    """
    assert not abspath(path).startswith(abspath(config.root_dir))
    assert not isdir(prefix)
    fn = basename(path)
    assert re.match(r'share-[0-9a-f]{40}-\d+\.tar\.bz2$', fn), fn
    dist = fn[:-8]

    pkgs_dir = config.pkgs_dirs[0]
    if not install.is_extracted(pkgs_dir, dist):
        shutil.copyfile(path, join(pkgs_dir, dist + '.tar.bz2'))
        inst.execute_instructions([(inst.EXTRACT, (dist,))])
    assert install.is_extracted(pkgs_dir, dist)

    with open(join(pkgs_dir, dist, 'info', 'index.json')) as fi:
        meta = json.load(fi)

    # for backwards compatibility, use "requires" when "depends" is not there
    dists = ['-'.join(r.split())
             for r in meta.get('depends', meta.get('requires', []))
             if not r.startswith('conda ')]
    dists.append(dist)

    actions = plan.ensure_linked_actions(dists, prefix)
    index = get_index()
    plan.execute_actions(actions, index, verbose=False)

    os.unlink(join(prefix, 'conda-meta', dist + '.json'))
示例#4
0
文件: bundle.py 项目: AnddyWang/conda
def clone_bundle(path, prefix=None, bundle_name=None):
    """
    Clone the bundle (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))

    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=True)

    bundle_dir = abspath(expanduser('~/bundles/%s' %
                                    (bundle_name or meta.get('bundle_name'))))
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(bundle_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)

    t.close()
示例#5
0
    def test_ensure_linked_actions_all_linked(self, load_meta):
        dists = [Dist("test-88"), Dist("test-spec-42"), Dist("test-spec2-8.0.0.0.1-9")]
        prefix = "some/prefix"

        link_actions = plan.ensure_linked_actions(dists, prefix)

        expected_output = defaultdict(list)
        expected_output["PREFIX"] = prefix
        expected_output["op_order"] = ('CHECK_FETCH', 'RM_FETCHED', 'FETCH', 'CHECK_EXTRACT',
                                       'RM_EXTRACTED', 'EXTRACT', 'UNLINK', 'LINK',
                                       'SYMLINK_CONDA')
        self.assertEquals(link_actions, expected_output)
示例#6
0
    def test_ensure_linked_actions_all_linked(self, load_meta):
        dists = [Dist("test-88"), Dist("test-spec-42"), Dist("test-spec2-8.0.0.0.1-9")]
        prefix = "some/prefix"

        link_actions = plan.ensure_linked_actions(dists, prefix)

        expected_output = defaultdict(list)
        expected_output["PREFIX"] = prefix
        expected_output["op_order"] = ('CHECK_FETCH', 'RM_FETCHED', 'FETCH', 'CHECK_EXTRACT',
                                       'RM_EXTRACTED', 'EXTRACT', 'UNLINK', 'LINK',
                                       'SYMLINK_CONDA')
        self.assertEquals(link_actions, expected_output)
示例#7
0
def clone_env(prefix1, prefix2, verbose=True, quiet=False, index=None):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)
    dists = discard_conda(install.linked(prefix1))

    if verbose:
        print('Packages: %d' % len(dists))
        print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)
        if islink(src):
            os.symlink(os.readlink(src), dst)
            continue

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError:  # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    if index is None:
        index = get_index()

    r = Resolve(index)
    sorted_dists = r.dependency_sort(dists)

    actions = ensure_linked_actions(sorted_dists, prefix2)
    execute_actions(actions, index=index, verbose=not quiet)

    return actions, untracked_files
示例#8
0
def clone_analysispackage(path,
                          prefix=None,
                          analysispackage_name=None,
                          data_path=None):
    """
    Clone the analysispackage (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))
    if prefix and isdir(prefix):
        print("erasing old environment at %s" % prefix)
        shutil.rmtree(prefix)
    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=False)
    if not data_path:
        analysispackage_dir = abspath(
            expanduser(
                '~/analysispackages/%s' %
                (analysispackage_name or meta.get('analysispackage_name'))))
    else:
        analysispackage_dir = data_path
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(analysispackage_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)
    with open(join(data_path, ".metadata.json"), "w+") as f:
        json.dump({'env': prefix}, f)
    t.close()
def clone_analysispackage(path, prefix=None, analysispackage_name=None, data_path=None):
    """
    Clone the analysispackage (located at `path`) by creating a new environment at
    `prefix` (unless prefix is None or the prefix directory already exists)
    """
    try:
        t = tarfile.open(path, 'r:*')
        meta = json.load(t.extractfile('info/index.json'))
    except tarfile.ReadError:
        raise RuntimeError('bad tar archive: %s' % path)
    except KeyError:
        raise RuntimeError("no archive 'info/index.json' in: %s" % (path))
    if prefix and isdir(prefix):
        print ("erasing old environment at %s" % prefix)
        shutil.rmtree(prefix)
    if prefix and not isdir(prefix):
        for m in t.getmembers():
            if m.path.startswith((BDP, 'info/')):
                continue
            t.extract(m, path=prefix)
        dists = discard_conda('-'.join(s.split())
                              for s in meta.get('depends', []))
        actions = plan.ensure_linked_actions(dists, prefix)
        index = get_index()
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=False)
    if not data_path:
        analysispackage_dir = abspath(expanduser('~/analysispackages/%s' %
                                        (analysispackage_name or meta.get('analysispackage_name'))))
    else:
        analysispackage_dir = data_path
    for m in t.getmembers():
        if m.path.startswith(BDP):
            targetpath = join(analysispackage_dir, m.path[len(BDP):])
            t._extract_member(m, targetpath)
    with open(join(data_path, ".metadata.json"), "w+") as f:
        json.dump({'env' : prefix}, f)
    t.close()
示例#10
0
文件: misc.py 项目: nvdnkpr/conda
def clone_env(prefix1, prefix2, verbose=True):
    """
    clone existing prefix1 into new prefix2
    """
    untracked_files = untracked(prefix1)
    dists = install.linked(prefix1)
    print('Packages: %d' % len(dists))
    print('Files: %d' % len(untracked_files))

    for f in untracked_files:
        src = join(prefix1, f)
        dst = join(prefix2, f)
        dst_dir = dirname(dst)
        if islink(dst_dir) or isfile(dst_dir):
            os.unlink(dst_dir)
        if not isdir(dst_dir):
            os.makedirs(dst_dir)

        try:
            with open(src, 'rb') as fi:
                data = fi.read()
        except IOError:
            continue

        try:
            s = data.decode('utf-8')
            s = s.replace(prefix1, prefix2)
            data = s.encode('utf-8')
        except UnicodeDecodeError: # data is binary
            pass

        with open(dst, 'wb') as fo:
            fo.write(data)
        shutil.copystat(src, dst)

    actions = ensure_linked_actions(dists, prefix2)
    execute_actions(actions, index=get_index(), verbose=verbose)