def test_six_py27_osx_no_binary_unmanageable(): anchor_file = "lib/python2.7/site-packages/six-1.11.0-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "python 2.7.*" ], "fn": "six-1.11.0-py2.7.egg-info", "name": "six", "package_type": "virtual_python_egg_unmanageable", "subdir": "pypi", "version": "1.11.0" } assert not files assert not prefix_rec.paths_data.paths
def test_six_py27_osx_no_binary_unmanageable(): anchor_file = "lib/python2.7/site-packages/six-1.11.0-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "python 2.7.*" ], "fn": "six-1.11.0-py2.7.egg-info", "name": "six", "package_type": "virtual_python_egg_unmanageable", "subdir": "pypi", "version": "1.11.0" } assert not files assert not prefix_rec.paths_data.paths
def test_scrapy_py27_osx_no_binary(): anchor_file = "lib/python2.7/site-packages/Scrapy-1.5.1-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "cssselect >=0.9", "lxml", "parsel >=1.1", "pydispatcher >=2.0.5", "pyopenssl", "python 2.7.*", "queuelib", "service-identity", "six >=1.5.2", "twisted >=13.1.0", "w3lib >=1.17.0" ], "fn": "Scrapy-1.5.1-py2.7.egg-info", "name": "scrapy", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", "version": "1.5.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py" in files assert sp_dir + "/scrapy/downloadermiddlewares/decompression.pyc" in files assert ("../bin/scrapy" if on_win else "bin/scrapy") in files pd1 = { "_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py", "path_type": "hardlink" } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.pyc", "path_type": "hardlink" } assert pd2 in paths_data["paths"] pd3 = { "_path": "../bin/scrapy" if on_win else "bin/scrapy", "path_type": "hardlink" } assert pd3 in paths_data["paths"]
def test_scrapy_py27_osx_no_binary(): anchor_file = "lib/python2.7/site-packages/Scrapy-1.5.1-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "cssselect >=0.9", "lxml", "parsel >=1.1", "pydispatcher >=2.0.5", "pyopenssl", "python 2.7.*", "queuelib", "service-identity", "six >=1.5.2", "twisted >=13.1.0", "w3lib >=1.17.0" ], "fn": "Scrapy-1.5.1-py2.7.egg-info", "name": "scrapy", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", "version": "1.5.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py" in files assert sp_dir + "/scrapy/downloadermiddlewares/decompression.pyc" in files assert ("../bin/scrapy" if on_win else "bin/scrapy") in files pd1 = { "_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py", "path_type": "hardlink" } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.pyc", "path_type": "hardlink" } assert pd2 in paths_data["paths"] pd3 = { "_path": "../bin/scrapy" if on_win else "bin/scrapy", "path_type": "hardlink" } assert pd3 in paths_data["paths"]
def test_twilio_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/twilio-6.16.1.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") pprint(prefix_rec.depends) pprint(prefix_rec.constrains) dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "pyjwt >=1.4.2", "pysocks", "python 3.6.*", "pytz", "requests >=2.0.0", "six" ], "fn": "twilio-6.16.1.dist-info", "name": "twilio", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "6.16.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert sp_dir + "/twilio/compat.py" in files assert sp_dir + "/twilio/__pycache__/compat.cpython-36.pyc" in files pd1 = { "_path": sp_dir + "/twilio/compat.py", "path_type": "hardlink", "sha256": "sJ1t7CKvxpipiX5cyH1YwXTf3n_FsLf_taUhuCVsCwE", "size_in_bytes": 517 } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/twilio/jwt/__pycache__/compat.cpython-36.pyc", "path_type": "hardlink", "sha256": None, "size_in_bytes": None } assert pd2 in paths_data["paths"]
def test_twilio_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/twilio-6.16.1.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") pprint(prefix_rec.depends) pprint(prefix_rec.constrains) dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "pyjwt >=1.4.2", "pysocks", "python 3.6.*", "pytz", "requests >=2.0.0", "six" ], "fn": "twilio-6.16.1.dist-info", "name": "twilio", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "6.16.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert sp_dir + "/twilio/compat.py" in files assert sp_dir + "/twilio/__pycache__/compat.cpython-36.pyc" in files pd1 = { "_path": sp_dir + "/twilio/compat.py", "path_type": "hardlink", "sha256": "sJ1t7CKvxpipiX5cyH1YwXTf3n_FsLf_taUhuCVsCwE", "size_in_bytes": 517 } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/twilio/jwt/__pycache__/compat.cpython-36.pyc", "path_type": "hardlink", "sha256": None, "size_in_bytes": None } assert pd2 in paths_data["paths"]
def test_cherrypy_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/CherryPy-17.2.0.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) constrains = dumped_rec.pop("constrains") depends = dumped_rec.pop("depends") assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "fn": "CherryPy-17.2.0.dist-info", "name": "cherrypy", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "17.2.0" } assert constrains == [ "jaraco-packaging >=3.2", # "pypiwin32 ==219", "pytest >=2.8", "python-memcached >=1.58", "routes >=2.3.1", "rst-linker >=1.9" ] if on_win: assert depends == [ "cheroot >=6.2.4", "more-itertools", "portend >=2.1.1", "python 3.6.*", "pywin32", "six >=1.11.0" ] else: assert depends == [ "cheroot >=6.2.4", "more-itertools", "portend >=2.1.1", "python 3.6.*", "six >=1.11.0" ]
def test_cherrypy_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/CherryPy-17.2.0.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) constrains = dumped_rec.pop("constrains") depends = dumped_rec.pop("depends") assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "fn": "CherryPy-17.2.0.dist-info", "name": "cherrypy", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "17.2.0" } assert constrains == [ "jaraco-packaging >=3.2", # "pypiwin32 ==219", "pytest >=2.8", "python-memcached >=1.58", "routes >=2.3.1", "rst-linker >=1.9" ] if on_win: assert depends == [ "cheroot >=6.2.4", "more-itertools", "portend >=2.1.1", "python 3.6.*", "pywin32", "six >=1.11.0" ] else: assert depends == [ "cheroot >=6.2.4", "more-itertools", "portend >=2.1.1", "python 3.6.*", "six >=1.11.0" ]
def get_installed_jsonfile(prefix): global installed_pkg_recs installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True) installed_json_f = tempfile.NamedTemporaryFile("w", delete=False) installed_json_f.write(json_dump(output)) installed_json_f.flush() return installed_json_f, installed_pkg_recs
def test_pyjwt_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/PyJWT-1.6.4.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [ "cryptography >=1.4", "pytest <4,>3" ], "depends": [ "python 3.6.*" ], "fn": "PyJWT-1.6.4.dist-info", "name": "pyjwt", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "1.6.4" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert ("../bin/pyjwt" if on_win else "bin/pyjwt") in files assert sp_dir + '/jwt/__pycache__/__init__.cpython-36.pyc' in files pd1 = { "_path": "../bin/pyjwt" if on_win else "bin/pyjwt", "path_type": "hardlink", "sha256": "wZET_24uZDEpsMdhAQ78Ass2k-76aQ59yPSE4DTE2To", "size_in_bytes": 260 } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/jwt/contrib/__pycache__/__init__.cpython-36.pyc", "path_type": "hardlink", "sha256": None, "size_in_bytes": None } assert pd2 in paths_data["paths"]
def test_pyjwt_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/PyJWT-1.6.4.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [ "cryptography >=1.4", "pytest <4,>3" ], "depends": [ "python 3.6.*" ], "fn": "PyJWT-1.6.4.dist-info", "name": "pyjwt", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "1.6.4" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert ("../bin/pyjwt" if on_win else "bin/pyjwt") in files assert sp_dir + '/jwt/__pycache__/__init__.cpython-36.pyc' in files pd1 = { "_path": "../bin/pyjwt" if on_win else "bin/pyjwt", "path_type": "hardlink", "sha256": "wZET_24uZDEpsMdhAQ78Ass2k-76aQ59yPSE4DTE2To", "size_in_bytes": 260 } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/jwt/contrib/__pycache__/__init__.cpython-36.pyc", "path_type": "hardlink", "sha256": None, "size_in_bytes": None } assert pd2 in paths_data["paths"]
def test_twilio_py27_osx_no_binary(): anchor_file = "lib/python2.7/site-packages/twilio-6.16.1-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") pprint(prefix_rec.depends) pprint(prefix_rec.constrains) dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "pyjwt >=1.4.2", "python 2.7.*", "pytz", "requests >=2.0.0", "six" ], "fn": "twilio-6.16.1-py2.7.egg-info", "name": "twilio", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", "version": "6.16.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert sp_dir + "/twilio/compat.py" in files assert sp_dir + "/twilio/compat.pyc" in files pd1 = { "_path": sp_dir + "/twilio/compat.py", "path_type": "hardlink" } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/twilio/jwt/compat.pyc", "path_type": "hardlink" } assert pd2 in paths_data["paths"]
def test_twilio_py27_osx_no_binary(): anchor_file = "lib/python2.7/site-packages/twilio-6.16.1-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") pprint(prefix_rec.depends) pprint(prefix_rec.constrains) dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "pyjwt >=1.4.2", "python 2.7.*", "pytz", "requests >=2.0.0", "six" ], "fn": "twilio-6.16.1-py2.7.egg-info", "name": "twilio", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", "version": "6.16.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert sp_dir + "/twilio/compat.py" in files assert sp_dir + "/twilio/compat.pyc" in files pd1 = { "_path": sp_dir + "/twilio/compat.py", "path_type": "hardlink" } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/twilio/jwt/compat.pyc", "path_type": "hardlink" } assert pd2 in paths_data["paths"]
def test_pyjwt_py27_osx_no_binary(): anchor_file = "lib/python2.7/site-packages/PyJWT-1.6.4-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [ "cryptography >=1.4", "pytest <4,>3" ], "depends": [ "python 2.7.*" ], "fn": "PyJWT-1.6.4-py2.7.egg-info", "name": "pyjwt", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", "version": "1.6.4" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert ('../bin/pyjwt' if on_win else 'bin/pyjwt') in files assert sp_dir + '/jwt/__init__.pyc' in files pd1 = { "_path": "../bin/pyjwt" if on_win else "bin/pyjwt", "path_type": "hardlink" } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/jwt/contrib/__init__.pyc", "path_type": "hardlink" } assert pd2 in paths_data["paths"]
def test_pyjwt_py27_osx_no_binary(): anchor_file = "lib/python2.7/site-packages/PyJWT-1.6.4-py2.7.egg-info/PKG-INFO" prefix_path = join(ENV_METADATA_DIR, "py27-osx-no-binary") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "2.7") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [ "cryptography >=1.4", "pytest <4,>3" ], "depends": [ "python 2.7.*" ], "fn": "PyJWT-1.6.4-py2.7.egg-info", "name": "pyjwt", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", "version": "1.6.4" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert ('../bin/pyjwt' if on_win else 'bin/pyjwt') in files assert sp_dir + '/jwt/__init__.pyc' in files pd1 = { "_path": "../bin/pyjwt" if on_win else "bin/pyjwt", "path_type": "hardlink" } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/jwt/contrib/__init__.pyc", "path_type": "hardlink" } assert pd2 in paths_data["paths"]
def get_virtual_packages(): result = {"packages": {}} # add virtual packages as installed packages # they are packages installed on the system that conda can do nothing # about (e.g. glibc) # if another version is needed, installation just fails # they don't exist anywhere (they start with __) installed = dict() _supplement_index_with_system(installed) installed = list(installed) for prec in installed: json_rec = prec.dist_fields_dump() json_rec["depends"] = prec.depends json_rec["build"] = prec.build result["packages"][prec.fn] = json_rec installed_json_f = tempfile.NamedTemporaryFile("w", delete=False) installed_json_f.write(json_dump(result)) installed_json_f.flush() return installed_json_f
def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() newenv = bool(command == 'create') isupdate = bool(command == 'update') isinstall = bool(command == 'install') if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) # context.__init__(argparse_args=args) prepend = not args.override_channels prefix = context.target_prefix index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not context.quiet) return else: raise CondaValueError("cannot mix specifications with conda package" " filenames") index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] for x in index: # add priority here if x.channel.name in index_args['channel_urls']: priority = len(index_args['channel_urls']) - index_args['channel_urls'].index(x.channel.name) else: priority = 0 channel_json.append((str(x.channel), x.cache_path_json, priority)) installed_pkg_recs, output = get_installed_packages(prefix, show_channel_urls=True) installed_json_f = tempfile.NamedTemporaryFile('w', delete=False) installed_json_f.write(json_dump(output)) installed_json_f.flush() specs = [] if args.file: for fpath in args.file: try: specs.extend(specs_from_url(fpath, json=context.json)) except Unicode: raise CondaError("Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in specs: explicit(specs, prefix, verbose=not context.quiet, index_args=index_args) return specs.extend(specs_from_args(args_packages, json=context.json)) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError("too few arguments, " "must supply command line package specs or --file") # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: print("Currently, mamba can only update explicit packages! (e.g. mamba update numpy python ...)") exit() if isupdate and context.update_modifier != UpdateModifier.UPDATE_ALL: prefix_data = PrefixData(prefix) for spec in specs: spec = MatchSpec(spec) if not spec.is_name_only_spec: raise CondaError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % spec) if not prefix_data.get(spec.name, None): raise PackageNotInstalledError(prefix, spec.name) if newenv and args.clone: if args.packages: raise TooManyArgumentsError(0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return specs = [MatchSpec(s) for s in specs] mamba_solve_specs = [s.conda_build_form() for s in specs] print("\n\nLooking for: {}\n\n".format(mamba_solve_specs)) strict_priority = (context.channel_priority == ChannelPriority.STRICT) if strict_priority: raise Exception("Cannot use strict priority with mamba!") to_link, to_unlink = api.solve(channel_json, installed_json_f.name, mamba_solve_specs, isupdate, strict_priority) to_link_records, to_unlink_records = [], [] final_precs = IndexedSet(PrefixData(prefix).iter_records()) def get_channel(c): for x in index: if str(x.channel) == c: return x for c, pkg in to_unlink: for i_rec in installed_pkg_recs: if i_rec.fn == pkg: final_precs.remove(i_rec) to_unlink_records.append(i_rec) break else: print("No package record found!") for c, pkg, jsn_s in to_link: sdir = get_channel(c) rec = to_package_record_from_subjson(sdir, pkg, jsn_s) final_precs.add(rec) to_link_records.append(rec) unlink_precs, link_precs = diff_for_unlink_link_precs(prefix, final_precs=IndexedSet(PrefixGraph(final_precs).graph), specs_to_add=specs, force_reinstall=context.force_reinstall) pref_setup = PrefixSetup( target_prefix = prefix, unlink_precs = unlink_precs, link_precs = link_precs, remove_specs = [], update_specs = specs ) conda_transaction = UnlinkLinkTransaction(pref_setup) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def test_scrapy_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/Scrapy-1.5.1.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "cssselect >=0.9", "lxml", "parsel >=1.1", "pydispatcher >=2.0.5", "pyopenssl", "python 3.6.*", "queuelib", "service-identity", "six >=1.5.2", "twisted >=13.1.0", "w3lib >=1.17.0" ], "fn": "Scrapy-1.5.1.dist-info", "name": "scrapy", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "1.5.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert sp_dir + "/scrapy/core/scraper.py" in files assert sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc" in files pd1 = { "_path": sp_dir + "/scrapy/core/scraper.py", "path_type": "hardlink", "sha256": "2559X9n2z1YKdFV9ElMRD6_88LIdqH1a2UwQimStt2k", "size_in_bytes": 9960 } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc", "path_type": "hardlink", "sha256": None, "size_in_bytes": None } assert pd2 in paths_data["paths"] pd3 = { "_path": "../bin/scrapy" if on_win else "bin/scrapy", "path_type": "hardlink", "sha256": "RncAAoxSEnSi_0VIopaRxsq6kryQGL61YbEweN2TW3g", "size_in_bytes": 268 } assert pd3 in paths_data["paths"]
def test_scrapy_py36_osx_whl(): anchor_file = "lib/python3.6/site-packages/Scrapy-1.5.1.dist-info/RECORD" prefix_path = join(ENV_METADATA_DIR, "py36-osx-whl") if not isdir(prefix_path): pytest.skip("test files not found: %s" % prefix_path) prefix_rec = read_python_record(prefix_path, anchor_file, "3.6") dumped_rec = json_load(json_dump(prefix_rec.dump())) files = dumped_rec.pop("files") paths_data = dumped_rec.pop("paths_data") print(json_dump(dumped_rec)) assert dumped_rec == { "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], "depends": [ "cssselect >=0.9", "lxml", "parsel >=1.1", "pydispatcher >=2.0.5", "pyopenssl", "python 3.6.*", "queuelib", "service-identity", "six >=1.5.2", "twisted >=13.1.0", "w3lib >=1.17.0" ], "fn": "Scrapy-1.5.1.dist-info", "name": "scrapy", "package_type": "virtual_python_wheel", "subdir": "pypi", "version": "1.5.1" } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert sp_dir + "/scrapy/core/scraper.py" in files assert sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc" in files pd1 = { "_path": sp_dir + "/scrapy/core/scraper.py", "path_type": "hardlink", "sha256": "2559X9n2z1YKdFV9ElMRD6_88LIdqH1a2UwQimStt2k", "size_in_bytes": 9960 } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc", "path_type": "hardlink", "sha256": None, "size_in_bytes": None } assert pd2 in paths_data["paths"] pd3 = { "_path": "../bin/scrapy" if on_win else "bin/scrapy", "path_type": "hardlink", "sha256": "RncAAoxSEnSi_0VIopaRxsq6kryQGL61YbEweN2TW3g", "size_in_bytes": 268 } assert pd3 in paths_data["paths"]