def get_sphinx_build_command(project, logger, builder): """Builds the sphinx-build command using properties. """ options = ["sphinx", "-b", builder ] verbose = None if project.get_property("verbose"): verbose = "-v" if logger.threshold == logger.DEBUG: verbose = "-vvvv" if verbose: options.append(verbose) options += as_list(project.get_property("sphinx_build_extra_args")) options.append(project.expand_path("$sphinx_config_path")) if len(as_list(project.get_property("sphinx_doc_builder"))) > 1 or \ project.get_property("sphinx_output_per_builder"): options.append(project.expand_path("$sphinx_output_dir", builder)) else: options.append(project.expand_path("$sphinx_output_dir")) return options
def _prepare_tasks(self, tasks): if not len(tasks): if self.project.default_task: tasks += as_list(self.project.default_task) else: raise PyBuilderException("No default task given.") else: new_tasks = [task for task in tasks if task[0] not in ("+", "^") or task in ("+", "^")] append_tasks = [task[1:] for task in tasks if task[0] == "+" and task != "+"] remove_tasks = [task[1:] for task in tasks if task[0] == "^" and task != "^"] if len(new_tasks): del tasks[:] tasks.extend(new_tasks) tasks.extend(append_tasks) for task in remove_tasks: try: tasks.remove(task) except ValueError: pass else: del tasks[:] if self.project.default_task: tasks += as_list(self.project.default_task) tasks += append_tasks for task in remove_tasks: try: tasks.remove(task) except ValueError: pass return tasks
def collect_tasks_and_actions_and_initializers(self, project_module): for name in dir(project_module): candidate = getattr(project_module, name) if hasattr(candidate, NAME_ATTRIBUTE): name = getattr(candidate, NAME_ATTRIBUTE) elif hasattr(candidate, "__name__"): name = candidate.__name__ description = getattr(candidate, DESCRIPTION_ATTRIBUTE) if hasattr( candidate, DESCRIPTION_ATTRIBUTE) else "" if hasattr(candidate, TASK_ATTRIBUTE) and getattr(candidate, TASK_ATTRIBUTE): dependencies = getattr(candidate, DEPENDS_ATTRIBUTE) if hasattr(candidate, DEPENDS_ATTRIBUTE) else None required_dependencies = [] optional_dependencies = [] if dependencies: dependencies = list(as_list(dependencies)) for d in dependencies: if type(d) is optional: d = as_list(d()) optional_dependencies += d else: required_dependencies.append(d) self.logger.debug("Found task '%s' with required dependencies %s and optional dependencies %s", name, required_dependencies, optional_dependencies) self.execution_manager.register_task( Task(name, candidate, required_dependencies, description, optional_dependencies)) elif hasattr(candidate, ACTION_ATTRIBUTE) and getattr(candidate, ACTION_ATTRIBUTE): before = getattr(candidate, BEFORE_ATTRIBUTE) if hasattr( candidate, BEFORE_ATTRIBUTE) else None after = getattr(candidate, AFTER_ATTRIBUTE) if hasattr( candidate, AFTER_ATTRIBUTE) else None only_once = False if hasattr(candidate, ONLY_ONCE_ATTRIBUTE): only_once = getattr(candidate, ONLY_ONCE_ATTRIBUTE) teardown = False if hasattr(candidate, TEARDOWN_ATTRIBUTE): teardown = getattr(candidate, TEARDOWN_ATTRIBUTE) self.logger.debug("Found action %s", name) self.execution_manager.register_action( Action(name, candidate, before, after, description, only_once, teardown)) elif hasattr(candidate, INITIALIZER_ATTRIBUTE) and getattr(candidate, INITIALIZER_ATTRIBUTE): environments = [] if hasattr(candidate, ENVIRONMENTS_ATTRIBUTE): environments = getattr(candidate, ENVIRONMENTS_ATTRIBUTE) self.execution_manager.register_initializer( Initializer(name, candidate, environments, description))
def build_pip_install_options(index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, trusted_host=None, constraint_file=None, eager_upgrade=False): options = [] if index_url: options.append("--index-url") options.append(index_url) if extra_index_url: extra_index_urls = as_list(extra_index_url) for url in extra_index_urls: options.append("--extra-index-url") options.append(url) if trusted_host: trusted_hosts = as_list(trusted_host) for host in trusted_hosts: options.append("--trusted-host") options.append(host) if upgrade: options.append("--upgrade") if pip_common.pip_version >= "9.0": options.append("--upgrade-strategy") if eager_upgrade: options.append("eager") else: options.append("only-if-needed") if verbose: options.append("--verbose") if force_reinstall: options.append("--force-reinstall") if target_dir: options.append("-t") options.append(target_dir) if constraint_file: options.append("-c") options.append(constraint_file) if insecure_installs: for insecure_install in insecure_installs: arguments_for_insecure_installation = ["--allow-unverified", insecure_install, "--allow-external", insecure_install] options.extend(arguments_for_insecure_installation) return options
def pip_install(install_targets, index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, logger=None, outfile_name=None, error_file_name=None, env=None, cwd=None, trusted_host=None): pip_command_line = list() pip_command_line.extend(PIP_EXEC_STANZA) pip_command_line.append("install") pip_command_line.extend( build_pip_install_options(index_url, extra_index_url, upgrade, insecure_installs, force_reinstall, target_dir, verbose, trusted_host)) for install_target in as_list(install_targets): pip_command_line.extend(as_pip_install_target(install_target)) if env is None: env = os.environ if logger: logger.debug("Invoking pip: %s", pip_command_line) return execute_command(pip_command_line, outfile_name=outfile_name, env=env, cwd=cwd, error_file_name=error_file_name, shell=False)
def build_binary_distribution(project, logger): reports_dir = project.expand_path("$dir_reports/distutils") if not os.path.exists(reports_dir): os.mkdir(reports_dir) setup_script = project.expand_path("$dir_dist/setup.py") logger.info("Building binary distribution in %s", project.expand_path("$dir_dist")) commands = as_list(project.get_property("distutils_commands")) for command in commands: logger.debug("Executing distutils command %s", command) with open(os.path.join(reports_dir, command), "w") as output_file: commands = [sys.executable, setup_script] commands.extend(command.split()) process = subprocess.Popen(commands, cwd=project.expand_path("$dir_dist"), stdout=output_file, stderr=output_file, shell=False) return_code = process.wait() if return_code != 0: raise BuildFailedException( "Error while executing setup command %s", command)
def upload(project, logger): repository = project.get_property("distutils_upload_repository") repository_args = [] if repository: repository_args = ["-r", repository] upload_sign = project.get_property("distutils_upload_sign") upload_sign_args = [] if upload_sign: upload_sign_args = ["--sign"] sign_identity = project.get_property("distutils_upload_sign_identity") if sign_identity: upload_sign_args += ["--identity", sign_identity] # Unfortunately, distutils/setuptools doesn't throw error if register fails # but upload command will fail if project will not be registered logger.info("Registering project %s-%s%s", project.name, project.version, (" into repository '%s'" % repository) if repository else "") register_cmd_line = [["register"] + repository_args] execute_distutils(project, logger, register_cmd_line, False) logger.info("Uploading project %s-%s%s%s%s", project.name, project.version, (" to repository '%s'" % repository) if repository else "", get_dist_version_string(project, " as version %s"), (" signing%s" % (" with %s" % sign_identity if sign_identity else "")) if upload_sign else "") upload_cmd_line = [build_command_with_options(cmd, project.get_property("distutils_command_options")) + ["upload"] + repository_args + upload_sign_args for cmd in as_list(project.get_property("distutils_commands"))] execute_distutils(project, logger, upload_cmd_line, True)
def build_binary_distribution(project, logger): logger.info("Building binary distribution in %s", project.expand_path("$dir_dist")) commands = [build_command_with_options(cmd, project.get_property("distutils_command_options")) for cmd in as_list(project.get_property("distutils_commands"))] execute_distutils(project, logger, commands, True)
def build_execution_plan(self, task_names): self.assert_dependencies_resolved() execution_plan = [] dependency_edges = {} for task in self.collect_all_transitive_tasks(as_list(task_names)): dependency_edges[task.name] = [dependency.name for dependency in task.dependencies] try: Graph(dependency_edges).assert_no_cycles_present() except GraphHasCycles as cycles: raise CircularTaskDependencyException(str(cycles)) for task_name in as_list(task_names): self._enqueue_task(execution_plan, task_name) return execution_plan
def build_execution_plan(self, task_names): self.assert_dependencies_resolved() execution_plan = [] for name in as_list(task_names): self.enqueue_task(execution_plan, name) return execution_plan
def execute_tool_on_source_files(project, name, command_and_arguments, logger=None, include_test_sources=False, include_scripts=False, include_dirs_only=False): if include_dirs_only: files = discover_affected_dirs(include_test_sources, include_scripts, project) else: files = discover_affected_files(include_test_sources, include_scripts, project) command = as_list(command_and_arguments) + [f for f in files] report_file = project.expand_path("$dir_reports/{0}".format(name)) execution_result = execute_command(command, report_file), report_file report_file = execution_result[1] report_lines = read_file(report_file) if project.get_property(name + "_verbose_output") and logger: log_report(logger, name, report_lines) return execution_result
def add_task_dependency(names, depends_on, optional): for name in as_list(names): if not isinstance(name, basestring): name = normalize_candidate_name(name) if name not in injected_task_dependencies: injected_task_dependencies[name] = list() injected_task_dependencies[name].append(TaskDependency(depends_on, optional))
def get_sphinx_apidoc_command(project, reactor): implicit_namespaces = False try: import sphinx if reactor.pybuilder_venv.version[:2] >= ( 3, 3) and sphinx.version_info[:2] >= (1, 5): implicit_namespaces = True except ImportError: pass options = [ "sphinx.apidoc", "-H", project.get_property("sphinx_project_name") ] if implicit_namespaces: options.append("--implicit-namespaces") options += as_list(project.get_property("sphinx_apidoc_extra_args")) options += [ "-o", project.expand_path(*SPHINX_PYB_RUNTIME_APIDOC_DIR), project.expand_path("$dir_source_main_python") ] return options
def build_binary_distribution(project, logger): reports_dir = project.expand_path("$dir_reports/distutils") if not os.path.exists(reports_dir): os.mkdir(reports_dir) setup_script = project.expand_path("$dir_dist/setup.py") logger.info("Building binary distribution in %s", project.expand_path("$dir_dist")) commands = as_list(project.get_property("distutils_commands")) for command in commands: logger.debug("Executing distutils command %s", command) with open(os.path.join(reports_dir, command.replace("/", "")), "w") as output_file: commands = [sys.executable, setup_script] commands.extend(command.split()) process = subprocess.Popen(commands, cwd=project.expand_path("$dir_dist"), stdout=output_file, stderr=output_file, shell=False) return_code = process.wait() if return_code != 0: raise BuildFailedException( "Error while executing setup command %s", command)
def do_decoration(callable): setattr(callable, INITIALIZER_ATTRIBUTE, True) if "environments" in additional_arguments: setattr(callable, ENVIRONMENTS_ATTRIBUTE, as_list(additional_arguments["environments"])) return callable
def build_entry_points_string(project): console_scripts = project.get_property('distutils_console_scripts') entry_points = project.get_property('distutils_entry_points') if console_scripts is not None and entry_points is not None: raise BuildFailedException( "'distutils_console_scripts' cannot be combined with 'distutils_entry_points'" ) if entry_points is None: entry_points = dict() if console_scripts is not None: entry_points['console_scripts'] = console_scripts if len(entry_points) == 0: return '{}' indent = 8 result = "{\n" for k in sorted(entry_points.keys()): result += " " * (indent + 4) result += "'%s': %s" % ( k, build_string_from_array(as_list(entry_points[k]), indent + 8)) + ",\n" result = result[:-2] + "\n" result += (" " * indent) + "}" return result
def upload(project, logger): repository = project.get_property("distutils_upload_repository") repository_args = [] if repository: repository_args = ["-r", repository] upload_sign = project.get_property("distutils_upload_sign") upload_sign_args = [] if upload_sign: upload_sign_args = ["--sign"] sign_identity = project.get_property("distutils_upload_sign_identity") if sign_identity: upload_sign_args += ["--identity", sign_identity] # Unfortunately, distutils/setuptools doesn't throw error if register fails # but upload command will fail if project will not be registered logger.info("Registering project %s-%s%s", project.name, project.version, (" into repository '%s'" % repository) if repository else "") register_cmd_line = [["register"] + repository_args] execute_distutils(project, logger, register_cmd_line, False) logger.info("Uploading project %s-%s%s%s%s", project.name, project.version, (" to repository '%s'" % repository) if repository else "", get_dist_version_string(project, " as version %s"), (" signing%s" % (" with %s" % sign_identity if sign_identity else "")) if upload_sign else "") upload_cmd_line = [ build_command_with_options( cmd, project.get_property("distutils_command_options")) + ["upload"] + repository_args + upload_sign_args for cmd in as_list(project.get_property("distutils_commands")) ] execute_distutils(project, logger, upload_cmd_line, True)
def build_entry_points_string(project): console_scripts = project.get_property('distutils_console_scripts') entry_points = project.get_property('distutils_entry_points') if console_scripts is not None and entry_points is not None: raise BuildFailedException("'distutils_console_scripts' cannot be combined with 'distutils_entry_points'") if entry_points is None: entry_points = dict() if console_scripts is not None: entry_points['console_scripts'] = console_scripts if len(entry_points) == 0: return '{}' indent = 8 result = "{\n" for k in sorted(entry_points.keys()): result += " " * (indent + 4) result += "'%s': %s" % (k, build_string_from_array(as_list(entry_points[k]), indent + 8)) + ",\n" result = result[:-2] + "\n" result += (" " * indent) + "}" return result
def pip_install(install_targets, index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, trusted_host=None, constraint_file=None, eager_upgrade=False, logger=None, outfile_name=None, error_file_name=None, env=None, cwd=None): pip_command_line = list() pip_command_line.extend(PIP_EXEC_STANZA) pip_command_line.append("install") pip_command_line.extend(build_pip_install_options(index_url=index_url, extra_index_url=extra_index_url, upgrade=upgrade, insecure_installs=insecure_installs, force_reinstall=force_reinstall, target_dir=target_dir, verbose=verbose, trusted_host=trusted_host, constraint_file=constraint_file, eager_upgrade=eager_upgrade )) for install_target in as_list(install_targets): pip_command_line.extend(as_pip_install_target(install_target)) if env is None: env = os.environ if logger: logger.debug("Invoking pip: %s", pip_command_line) return execute_command(pip_command_line, outfile_name=outfile_name, env=env, cwd=cwd, error_file_name=error_file_name, shell=False)
def build(self, tasks=None, environments=None): if not tasks: tasks = [] if not environments: environments = [] Reactor._current_instance = self if environments: self.logger.info( "Activated environments: %s", ", ".join(environments)) self.execution_manager.execute_initializers( environments, logger=self.logger, project=self.project) self.log_project_properties() self.validate_project() tasks = as_list(tasks) if not len(tasks): if self.project.default_task: tasks += as_list(self.project.default_task) else: raise PyBuilderException("No default task given.") execution_plan = self.execution_manager.build_execution_plan(tasks) self.logger.debug("Execution plan is %s", ", ".join( [task.name for task in execution_plan])) self.logger.info( "Building %s version %s", self.project.name, self.project.version) self.logger.info("Executing build in %s", self.project.basedir) if len(tasks) == 1: self.logger.info("Going to execute task %s", tasks[0]) else: list_of_tasks = ", ".join(tasks) self.logger.info("Going to execute tasks: %s", list_of_tasks) task_execution_summaries = self.execution_manager.execute_execution_plan( execution_plan, logger=self.logger, project=self.project, reactor=self) return BuildSummary(self.project, task_execution_summaries)
def build_pip_install_options(index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, trusted_host=None): options = [] if index_url: options.append("--index-url") options.append(index_url) if extra_index_url: extra_index_urls = as_list(extra_index_url) for url in extra_index_urls: options.append("--extra-index-url") options.append(url) if trusted_host: trusted_hosts = as_list(trusted_host) for host in trusted_hosts: options.append("--trusted-host") options.append(host) if upgrade: options.append("--upgrade") if verbose: options.append("--verbose") if force_reinstall: options.append("--force-reinstall") if target_dir: options.append("-t") options.append(target_dir) if _pip_disallows_insecure_packages_by_default() and insecure_installs: for insecure_install in insecure_installs: arguments_for_insecure_installation = [ "--allow-unverified", insecure_install, "--allow-external", insecure_install ] options.extend(arguments_for_insecure_installation) return options
def as_task_name_list(mixed): result = [] for item in as_list(mixed): if isinstance(item, types.FunctionType): result.append(item.__name__) else: result.append(str(item)) return result
def as_task_name_list(mixed): result = [] for d in as_list(mixed): if isinstance(d, types.FunctionType): result.append(d.__name__) else: result.append(str(d)) return result
def _filter_dependencies(logger, project, dependencies, entry_paths, ignore_installed): dependencies = as_list(dependencies) installed_packages = get_packages_info(entry_paths) dependencies_to_install = [] dependency_constraints = [] for dependency in dependencies: logger.debug("Inspecting package %s", dependency) if ignore_installed: logger.debug("Package %s will be installed because existing installation will be ignored", dependency) dependencies_to_install.append(dependency) continue if dependency.declaration_only: logger.info("Package %s is declaration-only and will not be installed", dependency) continue if isinstance(dependency, RequirementsFile): # Always add requirement file-based dependencies logger.debug("Package %s is a requirement file and will be updated", dependency) dependencies_to_install.append(dependency) continue elif isinstance(dependency, Dependency): if dependency.version: dependency_constraints.append(dependency) logger.debug("Package %s is added to the list of installation constraints", dependency) if dependency.url: # Always add dependency that is url-based logger.debug("Package %s is URL-based and will be updated", dependency) dependencies_to_install.append(dependency) continue if should_update_package(dependency.version) and not getattr(dependency, "version_not_a_spec", False): # Always add dependency that has a version specifier indicating desire to always update logger.debug("Package %s has a non-exact version specifier and will be updated", dependency) dependencies_to_install.append(dependency) continue dependency_name = dependency.name.lower() if dependency_name not in installed_packages: # If dependency not installed at all then install it logger.debug("Package %s is not installed and will be installed", dependency) dependencies_to_install.append(dependency) continue if dependency.version and not version_satisfies_spec(dependency.version, installed_packages[dependency_name].version): # If version is specified and version constraint is not satisfied logger.debug("Package '%s' is not satisfied by installed dependency version '%s' and will be installed" % (dependency, installed_packages[dependency_name].version)) dependencies_to_install.append(dependency) continue logger.debug("Package '%s' is already up-to-date and will be skipped" % dependency) return dependencies_to_install, installed_packages, dependency_constraints
def build_command_with_options(command, distutils_command_options=None): commands = [command] if distutils_command_options: try: command_options = as_list(distutils_command_options[command]) commands.extend(command_options) except KeyError: pass return commands
def build(self, tasks=None, environments=None): if not tasks: tasks = [] else: tasks = as_list(tasks) if not environments: environments = [] execution_plan = self.create_execution_plan(tasks, environments) return self.build_execution_plan(tasks, execution_plan)
def build(self, tasks=None, environments=None): if not tasks: tasks = [] else: tasks = as_list(tasks) if not environments: environments = [] execution_plan = self.create_execution_plan(tasks, environments) self.build_execution_plan(tasks, execution_plan)
def pip_install(install_targets, python_env, index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, trusted_host=None, constraint_file=None, eager_upgrade=False, ignore_installed=False, prefix_dir=None, logger=None, outfile_name=None, error_file_name=None, env=None, cwd=None): pip_command_line = list() pip_command_line.extend(python_env.executable + PIP_MODULE_STANZA) pip_command_line.append("install") pip_command_line.extend( build_pip_install_options( index_url=index_url, extra_index_url=extra_index_url, upgrade=upgrade, insecure_installs=insecure_installs, force_reinstall=force_reinstall, target_dir=target_dir, verbose=verbose, trusted_host=trusted_host, constraint_file=constraint_file, eager_upgrade=eager_upgrade, ignore_installed=ignore_installed, prefix_dir=prefix_dir, )) for install_target in as_list(install_targets): pip_command_line.extend(as_pip_install_target(install_target)) env_environ = python_env.environ if env is not None: env_environ.update(env) if logger: logger.debug("Invoking PIP: '%s'", _log_cmd_line(*pip_command_line)) return python_env.execute_command(pip_command_line, outfile_name=outfile_name, error_file_name=error_file_name, env=env_environ, cwd=cwd, shell=False, no_path_search=True)
def execute_tool_on_modules(project, name, command_and_arguments, extend_pythonpath=True): source_dir = project.expand_path("$dir_source_main_python") modules = discover_modules(source_dir) command = as_list(command_and_arguments) + modules report_file = project.expand_path("$dir_reports/%s" % name) env = os.environ if extend_pythonpath: env["PYTHONPATH"] = source_dir return execute_command(command, report_file, env=env), report_file
def _filter_dependencies(logger, project, dependencies): dependencies = as_list(dependencies) installed_packages = get_package_version(dependencies) dependencies_to_install = [] for dependency in dependencies: logger.debug("Inspecting dependency '%s'" % dependency) if isinstance(dependency, RequirementsFile): # Always add requirement file-based dependencies logger.debug( "Dependency '%s' is a requirement file and will be included" % dependency) dependencies_to_install.append(dependency) continue elif isinstance(dependency, Dependency): if dependency.url: # Always add dependency that is url-based logger.debug( "Dependency '%s' is URL-based and will be included" % dependency) dependencies_to_install.append(dependency) continue if should_update_package(dependency.version) and not getattr( dependency, "version_not_a_spec", False): # Always add dependency that has a version specifier indicating desire to always update logger.debug( "Dependency '%s' has a non-exact version specifier and will be included" % dependency) dependencies_to_install.append(dependency) continue dependency_name = dependency.name.lower() if dependency_name not in installed_packages: # If dependency not installed at all then install it logger.debug( "Dependency '%s' is not installed and will be included" % dependency) dependencies_to_install.append(dependency) continue if dependency.version and not version_satisfies_spec( dependency.version, installed_packages[dependency_name]): # If version is specified and version constraint is not satisfied logger.debug( "Dependency '%s' is not satisfied by installed dependency version '%s' and will be included" % (dependency, installed_packages[dependency_name])) dependencies_to_install.append(dependency) continue logger.debug( "Dependency '%s' is already up-to-date and will be skipped" % dependency) return dependencies_to_install, installed_packages
def upload(project, logger): repository = project.get_property("distutils_upload_repository") repository_args = [] if repository: repository_args = ["-r", repository] logger.info("Uploading project %s-%s%s%s", project.name, project.version, (" to repository '%s'" % repository) if repository else "", get_dist_version_string(project, " as version %s")) upload_cmd_line = [build_command_with_options(cmd, project.get_property("distutils_command_options")) + ["upload"] + repository_args for cmd in as_list(project.get_property("distutils_commands"))] execute_distutils(project, logger, upload_cmd_line, True)
def _prepare_tasks(self, tasks): if not len(tasks): if self.project.default_task: tasks += as_list(self.project.default_task) else: raise PyBuilderException("No default task given.") else: new_tasks = [ task for task in tasks if task[0] not in ("+", "^") or task in ("+", "^") ] append_tasks = [ task[1:] for task in tasks if task[0] == "+" and task != "+" ] remove_tasks = [ task[1:] for task in tasks if task[0] == "^" and task != "^" ] if len(new_tasks): del tasks[:] tasks.extend(new_tasks) tasks.extend(append_tasks) for task in remove_tasks: try: tasks.remove(task) except ValueError: pass else: del tasks[:] if self.project.default_task: tasks += as_list(self.project.default_task) tasks += append_tasks for task in remove_tasks: try: tasks.remove(task) except ValueError: pass return tasks
def build_pip_install_options(index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, trusted_host=None): options = [] if index_url: options.append("--index-url") options.append(index_url) if extra_index_url: extra_index_urls = as_list(extra_index_url) for url in extra_index_urls: options.append("--extra-index-url") options.append(url) if trusted_host: trusted_hosts = as_list(trusted_host) for host in trusted_hosts: options.append("--trusted-host") options.append(host) if upgrade: options.append("--upgrade") if verbose: options.append("--verbose") if force_reinstall: options.append("--force-reinstall") if target_dir: options.append("-t") options.append(target_dir) if _pip_disallows_insecure_packages_by_default() and insecure_installs: for insecure_install in insecure_installs: arguments_for_insecure_installation = ["--allow-unverified", insecure_install, "--allow-external", insecure_install] options.extend(arguments_for_insecure_installation) return options
def build(self, tasks=None, environments=None): if not tasks: tasks = [] else: tasks = as_list(tasks) if not environments: environments = [] execution_plan = self.create_execution_plan(tasks, environments) execution_summary = self.build_execution_plan(tasks, execution_plan) self.execution_manager.execute_finalizers(environments, logger=self.logger, project=self.project, reactor=self) return execution_summary
def _filter_dependencies(logger, project, dependencies): dependencies = as_list(dependencies) installed_packages = pip_utils.get_package_version(dependencies) dependencies_to_install = [] dependency_constraints = [] for dependency in dependencies: logger.debug("Inspecting dependency '%s'" % dependency) if isinstance(dependency, RequirementsFile): # Always add requirement file-based dependencies logger.debug("Dependency '%s' is a requirement file and will be included" % dependency) dependencies_to_install.append(dependency) continue elif isinstance(dependency, Dependency): if dependency.version: dependency_constraints.append(dependency) logger.debug( "Dependency '%s' is added to the list of installation constraints" % dependency) if dependency.url: # Always add dependency that is url-based logger.debug("Dependency '%s' is URL-based and will be included" % dependency) dependencies_to_install.append(dependency) continue if pip_utils.should_update_package(dependency.version) \ and not getattr(dependency, "version_not_a_spec", False): # Always add dependency that has a version specifier indicating desire to always update logger.debug("Dependency '%s' has a non-exact version specifier and will be included" % dependency) dependencies_to_install.append(dependency) continue dependency_name = dependency.name.lower() if dependency_name not in installed_packages: # If dependency not installed at all then install it logger.debug("Dependency '%s' is not installed and will be included" % dependency) dependencies_to_install.append(dependency) continue if dependency.version \ and not pip_utils.version_satisfies_spec(dependency.version, installed_packages[dependency_name]): # If version is specified and version constraint is not satisfied logger.debug("Dependency '%s' is not satisfied by installed dependency version '%s' and will be included" % (dependency, installed_packages[dependency_name])) dependencies_to_install.append(dependency) continue logger.debug("Dependency '%s' is already up-to-date and will be skipped" % dependency) return dependencies_to_install, installed_packages, dependency_constraints
def create_venvs(logger, project, reactor): log_dir = project.expand_path("$dir_install_logs") logger.debug("Creating log directory '%s'", log_dir) mkdir(log_dir) venv_dependencies_map = project.get_property("venv_dependencies") if "build" not in venv_dependencies_map: venv_dependencies_map["build"] = as_list( project.build_dependencies) + as_list(project.dependencies) if "test" not in venv_dependencies_map: venv_dependencies_map["test"] = as_list(project.dependencies) per = reactor.python_env_registry system_env = per["system"] clear = project.get_property("refresh_venvs") or system_env.is_pypy for venv_name in project.get_property("venv_names"): venv_dir = project.expand_path("$dir_target/venv", venv_name, system_env.versioned_dir_name) logger.info("Creating target '%s' VEnv in '%s'%s", venv_name, venv_dir, " (refreshing)" if clear else "") per[venv_name] = current_env = PythonEnv( venv_dir, reactor).create_venv(with_pip=True, symlinks=system_env.venv_symlinks, clear=clear, offline=project.offline) venv_dependencies = venv_dependencies_map.get(venv_name) if venv_dependencies: install_log_path = project.expand_path( "$dir_install_logs", "venv_%s_install_logs" % venv_name) constraints_file_name = project.get_property( "install_dependencies_constraints") current_env.install_dependencies( venv_dependencies, install_log_path=install_log_path, local_mapping={}, constraints_file_name=constraints_file_name)
def get_package_version(mixed, logger=None, entry_paths=None): def normalize_dependency_package(mixed): if isinstance(mixed, RequirementsFile): return None if isinstance(mixed, Dependency): if mixed.url: return None return mixed.name else: return mixed entry_paths = as_list(entry_paths) if entry_paths is not None else None package_query = [ normalized_package for normalized_package in (normalize_dependency_package(p) for p in as_list(mixed)) if normalized_package ] ws = WorkingSet(entry_paths) search_packages_results = list(search_packages_info(package_query, ws)) return { result['name'].lower(): result['version'] for result in search_packages_results }
def get_sphinx_build_command(project, logger, builder): """Builds the sphinx-build command using properties. """ options = ["sphinx", "-b", builder] verbose = None if project.get_property("verbose"): verbose = "-v" if logger.threshold == logger.DEBUG: verbose = "-vvvv" if verbose: options.append(verbose) options += as_list(project.get_property("sphinx_build_extra_args")) options.append(project.expand_path("$sphinx_config_path")) if len(as_list(project.get_property("sphinx_doc_builder"))) > 1 or \ project.get_property("sphinx_output_per_builder"): options.append(project.expand_path("$sphinx_output_dir", builder)) else: options.append(project.expand_path("$sphinx_output_dir")) return options
def get_package_version(mixed, logger=None): def normalize_dependency_package(mixed): if isinstance(mixed, RequirementsFile): return None if isinstance(mixed, Dependency): if mixed.url: return None return mixed.name else: return mixed package_query = [normalized_package for normalized_package in (normalize_dependency_package(p) for p in as_list(mixed)) if normalized_package] pip_working_set_init() search_packages_results = search_packages_info(package_query) return dict(((result['name'].lower(), result['version']) for result in search_packages_results))
def as_pip_install_target(mixed): arguments = [] targets = as_list(mixed) for target in targets: if isinstance(target, RequirementsFile): arguments.extend(("-r", target.name)) elif isinstance(target, Dependency): if target.url: arguments.append(target.url) else: arguments.append("{0}{1}".format(target.name, build_dependency_version_string(target))) else: arguments.append(str(target)) return arguments
def get_package_version(mixed, logger=None): def normalize_dependency_package(mixed): if isinstance(mixed, RequirementsFile): return None if isinstance(mixed, Dependency): if mixed.url: return None return mixed.name else: return mixed package_query = [normalized_package for normalized_package in (normalize_dependency_package(p) for p in as_list(mixed)) if normalized_package] pip_common.pip_working_set_init() search_packages_results = pip_common.search_packages_info(package_query) return dict(((result['name'].lower(), result['version']) for result in search_packages_results))
def get_packages_info(entry_paths=None): """ Gather details from installed distributions. Print distribution name, version, location, and installed files. """ entry_paths = as_list(entry_paths) if entry_paths is not None else None ws = WorkingSet(entry_paths) installed = {} for dist in ws: package = _PackageInfo(canonicalize_name(dist.project_name), dist.version, dist.location, [dep.project_name for dep in dist.requires()]) installed[package.name] = package return installed
def as_pip_install_target(mixed): arguments = [] targets = as_list(mixed) for target in targets: if isinstance(target, RequirementsFile): arguments.extend(("-r", target.name)) elif isinstance(target, Dependency): if target.url: arguments.append(target.url) else: arguments.append("{0}{1}".format( target.name, build_dependency_version_string(target))) else: arguments.append(str(target)) return arguments
def install_plugin(self, reactor, plugin_defs): plugin_defs = as_list(plugin_defs) pip_batch = [] for plugin_def in plugin_defs: self._check_plugin_def_type(plugin_def) display_name = str(plugin_def) self.logger.info("Installing or updating plugin %r", display_name) pip_batch.append(plugin_def.dependency) try: reactor.pybuilder_venv.install_dependencies(pip_batch, package_type="plugin") except BuildFailedException as e: self.logger.warn(e.message)
def execute_tool_on_source_files(project, name, command_and_arguments, logger=None, include_test_sources=False): files = discover_affected_files(include_test_sources, project) command = as_list(command_and_arguments) + [f for f in files] report_file = project.expand_path("$dir_reports/{0}".format(name)) execution_result = execute_command(command, report_file), report_file report_file = execution_result[1] report_lines = read_file(report_file) if project.get_property(name + "_verbose_output") and logger: log_report(logger, name, report_lines) return execution_result
def sphinx_generate(project, logger): """Runs sphinx-build against rst sources for the given project. """ sphinx_pyb_dir = project.expand_path(*SPHINX_PYB_RUNTIME_DIR) if exists(sphinx_pyb_dir): logger.debug("Removing %s", sphinx_pyb_dir) rmtree(sphinx_pyb_dir) logger.debug("Creating %s", sphinx_pyb_dir) mkdir(sphinx_pyb_dir) generate_sphinx_pyb_runtime_config(project, logger) generate_sphinx_apidocs(project, logger) builders = as_list(project.get_property("sphinx_doc_builder")) for builder in builders: build_command = get_sphinx_build_command(project, logger, builder) run_sphinx_build(build_command, "sphinx_%s" % builder, logger, project, builder=builder)
def create_execution_plan(self, tasks, environments): Reactor._current_instance = self if environments: self.logger.info("Activated environments: %s", ", ".join(environments)) self.execution_manager.execute_initializers(environments, logger=self.logger, project=self.project) self.log_project_properties() self.validate_project() if not len(tasks): if self.project.default_task: tasks += as_list(self.project.default_task) else: raise PyBuilderException("No default task given.") return self.execution_manager.build_execution_plan(tasks)
def pip_install(install_targets, index_url=None, extra_index_url=None, upgrade=False, insecure_installs=None, force_reinstall=False, target_dir=None, verbose=False, logger=None, outfile_name=None, error_file_name=None, env=None, cwd=None): pip_command_line = list() pip_command_line.append(PIP_EXECUTABLE) pip_command_line.append("install") pip_command_line.extend(build_pip_install_options(index_url, extra_index_url, upgrade, insecure_installs, force_reinstall, target_dir, verbose )) for install_target in as_list(install_targets): pip_command_line.extend(as_pip_install_target(install_target)) if logger: logger.debug("Invoking pip: %s", pip_command_line) return execute_command(pip_command_line, outfile_name=outfile_name, env=env, cwd=cwd, error_file_name=error_file_name, shell=False)
def get_sphinx_apidoc_command(project): implicit_namespaces = False try: import sphinx if sys.version_info[:2] >= (3, 3) and sphinx.version_info[:2] >= (1, 5): implicit_namespaces = True except ImportError: pass options = ["sphinx.apidoc", "-H", project.get_property("sphinx_project_name") ] if implicit_namespaces: options.append("--implicit-namespaces") options += as_list(project.get_property("sphinx_apidoc_extra_args")) options += ["-o", project.expand_path(*SPHINX_PYB_RUNTIME_APIDOC_DIR), project.expand_path("$dir_source_main_python")] return options
def upload(project, logger): repository = project.get_property("distutils_upload_repository") repository_args = [] if repository: repository_args = ["-r", repository] upload_sign = project.get_property("distutils_upload_sign") upload_sign_args = [] if upload_sign: upload_sign_args = ["--sign"] sign_identity = project.get_property("distutils_upload_sign_identity") if sign_identity: upload_sign_args += ["--identity", sign_identity] logger.info("Uploading project %s-%s%s%s%s", project.name, project.version, (" to repository '{0!s}'".format(repository)) if repository else "", get_dist_version_string(project, " as version %s"), (" signing{0!s}".format((" with {0!s}".format(sign_identity) if sign_identity else ""))) if upload_sign else "") upload_cmd_line = [build_command_with_options(cmd, project.get_property("distutils_command_options")) + ["upload"] + repository_args + upload_sign_args for cmd in as_list(project.get_property("distutils_commands"))] execute_distutils(project, logger, upload_cmd_line, True)