def execute(args, parser): prefix = common.get_prefix(args) regex = args.regex if args.full_name: regex = r'^%s$' % regex if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: common.stdout_json(h.object_log()) else: common.error_and_exit("No revision log found: %s\n" % h.path, json=args.json, error_type="NoRevisionLog") return if args.canonical: format = 'canonical' elif args.export: format = 'export' else: format = 'human' if args.json: format = 'canonical' exitcode = print_packages(prefix, regex, format, piplist=args.pip, json=args.json) sys.exit(exitcode)
def execute(args, parser): prefix = common.get_prefix(args) if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: common.stdout_json(h.object_log()) else: common.error_and_exit("No revision log found: %s\n" % h.path, json=args.json, error_type="NoRevisionLog") return if args.canonical: format = "canonical" elif args.export: format = "export" else: format = "human" if args.json: format = "canonical" print_packages(prefix, args.regex, format, piplist=args.pip, json=args.json)
def execute(args, parser): prefix = context.prefix_w_legacy_search regex = args.regex if args.full_name: regex = r'^%s$' % regex if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not context.json: h.print_log() else: stdout_json(h.object_log()) else: raise CondaFileNotFoundError(h.path) return if args.explicit: print_explicit(prefix, args.md5) return if args.canonical: format = 'canonical' elif args.export: format = 'export' else: format = 'human' if context.json: format = 'canonical' exitcode = print_packages(prefix, regex, format, piplist=args.pip, json=context.json, show_channel_urls=context.show_channel_urls) return exitcode
def execute(args, parser): prefix = common.get_prefix(args) regex = args.regex if args.full_name: regex = r'^%s$' % regex if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: common.stdout_json(h.object_log()) else: common.error_and_exit("No revision log found: %s\n" % h.path, json=args.json, error_type="NoRevisionLog") return if args.canonical: format = 'canonical' elif args.export: format = 'export' else: format = 'human' if args.json: format = 'canonical' print_packages(prefix, regex, format, piplist=args.pip, json=args.json)
def user_installed_packages(self) -> Iterable[str]: """return user installed packages in prefix. using undocumented conda Python API, see https://github.com/conda/conda/issues/4545#issuecomment-469984684 """ history = History(self.prefix) return history.get_requested_specs_map().keys()
def test_conda_comment_version_parsing(self): assert History._parse_comment_line("# conda version: 4.5.1") == { "conda_version": "4.5.1" } assert History._parse_comment_line("# conda version: 4.5.1rc1") == { "conda_version": "4.5.1rc1" } assert History._parse_comment_line("# conda version: 4.5.1dev0") == { "conda_version": "4.5.1dev0" }
def from_environment(name, prefix, no_builds=False, ignore_channels=False, from_history=False): """ Get environment object from prefix Args: name: The name of environment prefix: The path of prefix no_builds: Whether has build requirement ignore_channels: whether ignore_channels from_history: Whether environment file should be based on explicit specs in history Returns: Environment object """ # requested_specs_map = History(prefix).get_requested_specs_map() if from_history: history = History(prefix).get_requested_specs_map() deps = [str(package) for package in history.values()] return Environment(name=name, dependencies=deps, channels=list(context.channels), prefix=prefix) pd = PrefixData(prefix, pip_interop_enabled=True) precs = tuple(PrefixGraph(pd.iter_records()).graph) grouped_precs = groupby(lambda x: x.package_type, precs) conda_precs = sorted(concatv( grouped_precs.get(None, ()), grouped_precs.get(PackageType.NOARCH_GENERIC, ()), grouped_precs.get(PackageType.NOARCH_PYTHON, ()), ), key=lambda x: x.name) pip_precs = sorted(concatv( grouped_precs.get(PackageType.VIRTUAL_PYTHON_WHEEL, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_MANAGEABLE, ()), grouped_precs.get(PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE, ()), # grouped_precs.get(PackageType.SHADOW_PYTHON_EGG_LINK, ()), ), key=lambda x: x.name) if no_builds: dependencies = ['='.join((a.name, a.version)) for a in conda_precs] else: dependencies = ['='.join((a.name, a.version, a.build)) for a in conda_precs] if pip_precs: dependencies.append({'pip': ["%s==%s" % (a.name, a.version) for a in pip_precs]}) channels = list(context.channels) if not ignore_channels: for prec in conda_precs: canonical_name = prec.channel.canonical_name if canonical_name not in channels: channels.insert(0, canonical_name) return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix)
def test_returns_history_object_as_context_object(self): h = History("/path/to/prefix") with mock.patch.object(h, 'init_log_file') as init_log_file: init_log_file.return_value = None with mock.patch.object(h, 'update'): with h as h2: self.assertEqual(h, h2)
def test_empty_history_check_on_empty_env(self): with mock.patch.object(History, 'file_is_empty') as mock_file_is_empty: with History(make_temp_prefix()) as h: self.assertEqual(mock_file_is_empty.call_count, 0) self.assertEqual(mock_file_is_empty.call_count, 0) assert h.file_is_empty() self.assertEqual(mock_file_is_empty.call_count, 1) assert not h.file_is_empty()
def test_calls_update_on_exit(self): h = History("/path/to/prefix") with mock.patch.object(h, 'init_log_file') as init_log_file: init_log_file.return_value = None with mock.patch.object(h, 'update') as update: with h: self.assertEqual(0, update.call_count) pass self.assertEqual(1, update.call_count)
def execute(args, parser): prefix = context.prefix_w_legacy_search regex = args.regex if args.full_name: regex = r'^%s$' % regex if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): if not args.json: h.print_log() else: stdout_json(h.object_log()) else: raise CondaFileNotFoundError( h.path, "No revision log found: %s\n" % h.path) return if args.explicit: print_explicit(prefix, args.md5) return if args.canonical: format = 'canonical' elif args.export: print_explicit(prefix, args.md5) return else: format = 'human' if args.json: format = 'canonical' exitcode = print_packages(prefix, regex, format, piplist=args.pip, json=args.json, show_channel_urls=args.show_channel_urls) return exitcode
def execute(args, parser): prefix = common.get_prefix(args) if args.revisions: from conda.history import History h = History(prefix) if isfile(h.path): h.print_log() else: sys.stderr.write("No revision log found: %s\n" % h.path) return if args.canonical: format = 'canonical' elif args.export: format = 'export' else: format = 'human' sys.exit(list_packages(prefix, args.regex, format=format, piplist=args.pip))
def test_minimum_conda_version_error(): with tempdir() as prefix: assert not isfile(join(prefix, 'conda-meta', 'history')) mkdir_p(join(prefix, 'conda-meta')) copy2(join(dirname(__file__), 'conda-meta', 'history'), join(prefix, 'conda-meta', 'history')) with open(join(prefix, 'conda-meta', 'history'), 'a') as fh: fh.write("==> 2018-07-09 11:18:09 <==\n") fh.write("# cmd: blarg\n") fh.write("# conda version: 42.42.4242\n") h = History(prefix) with pytest.raises(CondaUpgradeError) as exc: h.get_user_requests() exception_string = repr(exc.value) print(exception_string) assert "minimum conda version: 42.42" in exception_string assert "$ conda install -p" in exception_string
def test_specs_line_parsing_44(self): # New format (>=4.4) item = History._parse_comment_line("# update specs: [\"param[version='>=1.5.1,<2.0']\"]") pprint(item) assert item == { "action": "update", "specs": [ "param[version='>=1.5.1,<2.0']", ], "update_specs": [ "param[version='>=1.5.1,<2.0']", ], }
def revert_actions(prefix, revision=-1): h = History(prefix) h.update() try: state = h.get_state(revision) except IndexError: sys.exit("Error: no such revision: %d" % revision) curr = h.get_state() if state == curr: return {} actions = ensure_linked_actions(state, prefix) for dist in curr - state:
def test_works_as_context_manager(self): h = History("/path/to/prefix") self.assertTrue(getattr(h, '__enter__')) self.assertTrue(getattr(h, '__exit__'))
def test_conda_comment_version_parsing(self): assert History._parse_comment_line("# conda version: 4.5.1") == {"conda_version": "4.5.1"} assert History._parse_comment_line("# conda version: 4.5.1rc1") == {"conda_version": "4.5.1rc1"} assert History._parse_comment_line("# conda version: 4.5.1dev0") == {"conda_version": "4.5.1dev0"}
def test_specs_line_parsing_43(self): # Old format (<4.4) item = History._parse_comment_line('# install specs: param >=1.5.1,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'param >=1.5.1,<2.0', ], 'update_specs': [ 'param >=1.5.1,<2.0', ], } item = History._parse_comment_line('# install specs: param >=1.5.1,<2.0,0packagename >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'param >=1.5.1,<2.0', '0packagename >=1.0.0,<2.0', ], 'update_specs': [ 'param >=1.5.1,<2.0', '0packagename >=1.0.0,<2.0', ], } item = History._parse_comment_line('# install specs: python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0,numpy >=1.11.0,<2.0,pandas >=0.19.2,<1.0,psycopg2 >=2.6.1,<3.0,pyyaml >=3.12,<4.0,scipy >=0.17.0,<1.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'python>=3.5.1', 'jupyter >=1.0.0,<2.0', 'matplotlib >=1.5.1,<2.0', 'numpy >=1.11.0,<2.0', 'pandas >=0.19.2,<1.0', 'psycopg2 >=2.6.1,<3.0', 'pyyaml >=3.12,<4.0', 'scipy >=0.17.0,<1.0', ], 'update_specs': [ 'python>=3.5.1', 'jupyter >=1.0.0,<2.0', 'matplotlib >=1.5.1,<2.0', 'numpy >=1.11.0,<2.0', 'pandas >=0.19.2,<1.0', 'psycopg2 >=2.6.1,<3.0', 'pyyaml >=3.12,<4.0', 'scipy >=0.17.0,<1.0', ], } item = History._parse_comment_line('# install specs: _license >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ '_license >=1.0.0,<2.0', ], 'update_specs': [ '_license >=1.0.0,<2.0', ], } item = History._parse_comment_line('# install specs: pandas,_license >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'pandas', '_license >=1.0.0,<2.0', ], 'update_specs': [ 'pandas', '_license >=1.0.0,<2.0', ], }
def execute_actions(actions, index=None, verbose=False): plan = plan_from_actions(actions) with History(actions[PREFIX]): execute_plan(plan, index, verbose)
if prefix.endswith(build_suffix): saw_build_link = True link(prefix, arg, index=index) elif cmd == UNLINK: install.unlink(prefix, arg) elif cmd == SYMLINK_CONDA: install.symlink_conda(prefix, arg) else: raise Exception("Did not expect command: %r" % cmd) >>>>>>> princeofdarkness76/rpath if not isinstance(actions[inst.PREFIX], (list, tuple)): actions[inst.PREFIX] = [actions[inst.PREFIX]] <<<<<<< HEAD with History(actions[inst.PREFIX][0]): inst.execute_instructions(plan, index, verbose) ======= if saw_build_link and config.post_link_patch_rpaths: print("Patching build environment...") assert prefix != config.root_dir, prefix from conda_build.dll import BuildRoot build_root = BuildRoot( prefix=prefix, forgiving=True, is_build=False, ) build_root.make_relocatable(copy=True) if saw_build_link and config.post_link_patch_rpaths: print("Patching build environment...")
class UserRequestsTestCase(unittest.TestCase): h = History(dirname(__file__)) user_requests = h.get_user_requests() def test_len(self): self.assertEqual(len(self.user_requests), 6) def test_0(self): self.assertEqual( self.user_requests[0], { 'cmd': ['conda', 'update', 'conda'], 'date': '2016-02-16 13:31:33', 'unlink_dists': (), 'link_dists': (), }) def test_last(self): self.assertEqual( self.user_requests[-1], { 'action': 'install', 'cmd': ['conda', 'install', 'pyflakes'], 'date': '2016-02-18 22:53:20', 'specs': ['pyflakes', 'conda', 'python 2.7*'], 'update_specs': ['pyflakes', 'conda', 'python 2.7*'], 'unlink_dists': (), 'link_dists': ['+pyflakes-1.0.0-py27_0'], }) def test_conda_comment_version_parsing(self): assert History._parse_comment_line("# conda version: 4.5.1") == { "conda_version": "4.5.1" } assert History._parse_comment_line("# conda version: 4.5.1rc1") == { "conda_version": "4.5.1rc1" } assert History._parse_comment_line("# conda version: 4.5.1dev0") == { "conda_version": "4.5.1dev0" } def test_specs_line_parsing_44(self): # New format (>=4.4) item = History._parse_comment_line( "# update specs: [\"param[version='>=1.5.1,<2.0']\"]") pprint(item) assert item == { "action": "update", "specs": [ "param[version='>=1.5.1,<2.0']", ], "update_specs": [ "param[version='>=1.5.1,<2.0']", ], } def test_specs_line_parsing_43(self): # Old format (<4.4) item = History._parse_comment_line( '# install specs: param >=1.5.1,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'param >=1.5.1,<2.0', ], 'update_specs': [ 'param >=1.5.1,<2.0', ], } item = History._parse_comment_line( '# install specs: param >=1.5.1,<2.0,0packagename >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'param >=1.5.1,<2.0', '0packagename >=1.0.0,<2.0', ], 'update_specs': [ 'param >=1.5.1,<2.0', '0packagename >=1.0.0,<2.0', ], } item = History._parse_comment_line( '# install specs: python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0,numpy >=1.11.0,<2.0,pandas >=0.19.2,<1.0,psycopg2 >=2.6.1,<3.0,pyyaml >=3.12,<4.0,scipy >=0.17.0,<1.0' ) pprint(item) assert item == { 'action': 'install', 'specs': [ 'python>=3.5.1', 'jupyter >=1.0.0,<2.0', 'matplotlib >=1.5.1,<2.0', 'numpy >=1.11.0,<2.0', 'pandas >=0.19.2,<1.0', 'psycopg2 >=2.6.1,<3.0', 'pyyaml >=3.12,<4.0', 'scipy >=0.17.0,<1.0', ], 'update_specs': [ 'python>=3.5.1', 'jupyter >=1.0.0,<2.0', 'matplotlib >=1.5.1,<2.0', 'numpy >=1.11.0,<2.0', 'pandas >=0.19.2,<1.0', 'psycopg2 >=2.6.1,<3.0', 'pyyaml >=3.12,<4.0', 'scipy >=0.17.0,<1.0', ], } item = History._parse_comment_line( '# install specs: _license >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ '_license >=1.0.0,<2.0', ], 'update_specs': [ '_license >=1.0.0,<2.0', ], } item = History._parse_comment_line( '# install specs: pandas,_license >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'pandas', '_license >=1.0.0,<2.0', ], 'update_specs': [ 'pandas', '_license >=1.0.0,<2.0', ], }
def test_parse_on_empty_env(self): with mock.patch.object(History, 'parse') as mock_parse: with History(make_temp_prefix(name=text_type(self.tmpdir))) as h: self.assertEqual(mock_parse.call_count, 0) self.assertEqual(len(h.parse()), 0) self.assertEqual(len(h.parse()), 1)
def execute_actions(actions, index=None, verbose=False): plan = plan_from_actions(actions) with History(actions[inst.PREFIX]): inst.execute_instructions(plan, index, verbose)
def install(args, parser, command='install'): """ mamba install, mamba update, and mamba create """ context.validate_configuration() check_non_admin() init_api_context(use_mamba_experimental) newenv = bool(command == 'create') isinstall = bool(command == 'install') solver_task = api.SOLVER_INSTALL isupdate = bool(command == 'update') if isupdate: solver_task = api.SOLVER_UPDATE solver_options.clear() if newenv: ensure_name_or_prefix(args, command) prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") if isupdate and not (args.file or args.packages or context.update_modifier == UpdateModifier.UPDATE_ALL): raise CondaValueError("""no package names supplied # If you want to update to a newer version of Anaconda, type: # # $ conda update --prefix %s anaconda """ % prefix) if not newenv: if isdir(prefix): if on_win: delete_trash(prefix) if not isfile(join(prefix, 'conda-meta', 'history')): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: if not path_is_clean(prefix): raise DirectoryNotACondaEnvironmentError(prefix) else: # fall-through expected under normal operation pass else: if hasattr(args, "mkdir") and args.mkdir: try: mkdir_p(prefix) except EnvironmentError as e: raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) else: raise EnvironmentLocationNotFound(prefix) prefix = context.target_prefix ############################# # Get SPECS # ############################# args_packages = [s.strip('"\'') for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec args_packages_names = [ pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages ] for default_pkg in context.create_default_packages: default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) num_cp = sum(s.endswith('.tar.bz2') for s in args_packages) if num_cp: if num_cp == len(args_packages): explicit(args_packages, prefix, verbose=not (context.quiet or context.json)) return else: raise CondaValueError( "cannot mix specifications with conda package" " filenames") specs = [] index_args = { 'use_cache': args.use_index_cache, 'channel_urls': context.channels, 'unknown': args.unknown, 'prepend': not args.override_channels, 'use_local': args.use_local } if args.file: file_specs = [] for fpath in args.file: try: file_specs += specs_from_url(fpath, json=context.json) except Unicode: raise CondaValueError( "Error reading file, file should be a text file containing" " packages \nconda create --help for details") if '@EXPLICIT' in file_specs: explicit(file_specs, prefix, verbose=not (context.quiet or context.json), index_args=index_args) return specs.extend([MatchSpec(s) for s in file_specs]) specs.extend(specs_from_args(args_packages, json=context.json)) # update channels from package specs (e.g. mychannel::mypackage adds mychannel) channels = [c for c in context.channels] for spec in specs: # CONDA TODO: correct handling for subdir isn't yet done spec_channel = spec.get_exact_value('channel') if spec_channel and spec_channel not in channels: channels.append(spec_channel) index_args['channel_urls'] = channels index = get_index(channel_urls=index_args['channel_urls'], prepend=index_args['prepend'], platform=None, use_local=index_args['use_local'], use_cache=index_args['use_cache'], unknown=index_args['unknown'], prefix=prefix) channel_json = [] strict_priority = (context.channel_priority == ChannelPriority.STRICT) subprio_index = len(index) if strict_priority: # first, count unique channels n_channels = len(set([channel.canonical_name for _, channel in index])) current_channel = index[0][1].canonical_name channel_prio = n_channels for subdir, chan in index: # add priority here if strict_priority: if chan.canonical_name != current_channel: channel_prio -= 1 current_channel = chan.canonical_name priority = channel_prio else: priority = 0 if strict_priority: subpriority = 0 if chan.platform == 'noarch' else 1 else: subpriority = subprio_index subprio_index -= 1 if subdir.loaded() == False and chan.platform != 'noarch': # ignore non-loaded subdir if channel is != noarch continue if context.verbosity != 0: print("Channel: {}, prio: {} : {}".format(chan, priority, subpriority)) print("Cache path: ", subdir.cache_path()) channel_json.append((chan, subdir, priority, subpriority)) installed_json_f, installed_pkg_recs = get_installed_jsonfile(prefix) if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): raise CondaValueError( "too few arguments, " "must supply command line package specs or --file") installed_names = [i_rec.name for i_rec in installed_pkg_recs] # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs if isupdate and context.update_modifier == UpdateModifier.UPDATE_ALL: history_dict = History(prefix).get_requested_specs_map() pins = {pin.name: pin for pin in get_pinned_specs(prefix)} # for key, match_spec in history_dict.items(): for key in installed_names: if key == 'python': i = installed_names.index('python') version = installed_pkg_recs[i].version py_ver = ".".join(version.split(".")[:2]) + '.*' # specs.append(MatchSpec(name="python", version=py_ver)) else: if key in pins: specs.append(pins[key]) else: specs.append(MatchSpec(key)) prefix_data = PrefixData(prefix) for s in args_packages: s = MatchSpec(s) if not s.is_name_only_spec: raise CondaValueError("Invalid spec for 'conda update': %s\n" "Use 'conda install' instead." % s) if not prefix_data.get(s.name, None): raise PackageNotInstalledError(prefix, s.name) elif context.update_modifier == UpdateModifier.UPDATE_DEPS: # find the deps for each package and add to the update job # solver_task |= api.SOLVER_FORCEBEST final_specs = specs for spec in specs: prec = installed_pkg_recs[installed_names.index(spec.name)] for dep in prec.depends: ms = MatchSpec(dep) if ms.name != 'python': final_specs.append(MatchSpec(ms.name)) specs = set(final_specs) if newenv and args.clone: if args.packages: raise TooManyArgumentsError( 0, len(args.packages), list(args.packages), 'did not expect any arguments for --clone') clone(args.clone, prefix, json=context.json, quiet=(context.quiet or context.json), index_args=index_args) touch_nonadmin(prefix) print_activate(args.name if args.name else prefix) return if not (context.quiet or context.json): print("\nLooking for: {}\n".format([str(s) for s in specs])) spec_names = [s.name for s in specs] # If python was not specified, check if it is installed. # If yes, add the installed python to the specs to prevent updating it. python_constraint = None additional_specs = [] if 'python' not in spec_names: if 'python' in installed_names: i = installed_names.index('python') version = installed_pkg_recs[i].version python_constraint = MatchSpec('python==' + version).conda_build_form() mamba_solve_specs = [s.__str__() for s in specs] pool = api.Pool() repos = [] if use_mamba_experimental or context.force_reinstall: prefix_data = api.PrefixData(context.target_prefix) prefix_data.load() # add installed if use_mamba_experimental: repo = api.Repo(pool, prefix_data) repos.append(repo) else: repo = api.Repo(pool, "installed", installed_json_f.name, "") repo.set_installed() repos.append(repo) for channel, subdir, priority, subpriority in channel_json: repo = subdir.create_repo(pool) repo.set_priority(priority, subpriority) repos.append(repo) if context.force_reinstall: solver = api.Solver(pool, solver_options, prefix_data) else: solver = api.Solver(pool, solver_options) solver.set_postsolve_flags([ (api.MAMBA_NO_DEPS, context.deps_modifier == DepsModifier.NO_DEPS), (api.MAMBA_ONLY_DEPS, context.deps_modifier == DepsModifier.ONLY_DEPS), (api.MAMBA_FORCE_REINSTALL, context.force_reinstall) ]) solver.add_jobs(mamba_solve_specs, solver_task) # as a security feature this will _always_ attempt to upgrade certain packages for a_pkg in [_.name for _ in context.aggressive_update_packages]: if a_pkg in installed_names: solver.add_jobs([a_pkg], api.SOLVER_UPDATE) if python_constraint: solver.add_constraint(python_constraint) success = solver.solve() if not success: print(solver.problems_to_str()) exit_code = 1 return exit_code package_cache = api.MultiPackageCache(context.pkgs_dirs) transaction = api.Transaction(solver, package_cache) mmb_specs, to_link, to_unlink = transaction.to_conda() specs_to_add = [MatchSpec(m) for m in mmb_specs[0]] specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]] transaction.log_json() downloaded = transaction.prompt(PackageCacheData.first_writable().pkgs_dir, repos) if not downloaded: exit(0) PackageCacheData.first_writable().reload() if use_mamba_experimental and not os.name == 'nt': if newenv and not isdir(context.target_prefix) and not context.dry_run: mkdir_p(prefix) transaction.execute(prefix_data, PackageCacheData.first_writable().pkgs_dir) else: conda_transaction = to_txn(specs_to_add, specs_to_remove, prefix, to_link, to_unlink, installed_pkg_recs, index) handle_txn(conda_transaction, prefix, args, newenv) try: installed_json_f.close() os.unlink(installed_json_f.name) except: pass
def test_specs_line_parsing_43(self): # Old format (<4.4) item = History._parse_comment_line( '# install specs: param >=1.5.1,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'param >=1.5.1,<2.0', ], 'update_specs': [ 'param >=1.5.1,<2.0', ], } item = History._parse_comment_line( '# install specs: param >=1.5.1,<2.0,0packagename >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'param >=1.5.1,<2.0', '0packagename >=1.0.0,<2.0', ], 'update_specs': [ 'param >=1.5.1,<2.0', '0packagename >=1.0.0,<2.0', ], } item = History._parse_comment_line( '# install specs: python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0,numpy >=1.11.0,<2.0,pandas >=0.19.2,<1.0,psycopg2 >=2.6.1,<3.0,pyyaml >=3.12,<4.0,scipy >=0.17.0,<1.0' ) pprint(item) assert item == { 'action': 'install', 'specs': [ 'python>=3.5.1', 'jupyter >=1.0.0,<2.0', 'matplotlib >=1.5.1,<2.0', 'numpy >=1.11.0,<2.0', 'pandas >=0.19.2,<1.0', 'psycopg2 >=2.6.1,<3.0', 'pyyaml >=3.12,<4.0', 'scipy >=0.17.0,<1.0', ], 'update_specs': [ 'python>=3.5.1', 'jupyter >=1.0.0,<2.0', 'matplotlib >=1.5.1,<2.0', 'numpy >=1.11.0,<2.0', 'pandas >=0.19.2,<1.0', 'psycopg2 >=2.6.1,<3.0', 'pyyaml >=3.12,<4.0', 'scipy >=0.17.0,<1.0', ], } item = History._parse_comment_line( '# install specs: _license >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ '_license >=1.0.0,<2.0', ], 'update_specs': [ '_license >=1.0.0,<2.0', ], } item = History._parse_comment_line( '# install specs: pandas,_license >=1.0.0,<2.0') pprint(item) assert item == { 'action': 'install', 'specs': [ 'pandas', '_license >=1.0.0,<2.0', ], 'update_specs': [ 'pandas', '_license >=1.0.0,<2.0', ], }