def _handle_dirty(self, part, step, dirty_report): if step not in _STEPS_TO_AUTOMATICALLY_CLEAN_IF_DIRTY: message_components = [ 'The {!r} step of {!r} is out of date:\n'.format( step, part.name) ] if dirty_report.dirty_properties: humanized_properties = formatting_utils.humanize_list( dirty_report.dirty_properties, 'and') pluralized_connection = formatting_utils.pluralize( dirty_report.dirty_properties, 'property appears', 'properties appear') message_components.append( 'The {} part {} to have changed.\n'.format( humanized_properties, pluralized_connection)) if dirty_report.dirty_project_options: humanized_options = formatting_utils.humanize_list( dirty_report.dirty_project_options, 'and') pluralized_connection = formatting_utils.pluralize( dirty_report.dirty_project_options, 'option appears', 'options appear') message_components.append( 'The {} project {} to have changed.\n'.format( humanized_options, pluralized_connection)) message_components.append( "In order to continue, please clean that part's {0!r} step " "by running: snapcraft clean {1} -s {0}\n".format( step, part.name)) raise RuntimeError(''.join(message_components)) staged_state = self.config.get_project_state('stage') primed_state = self.config.get_project_state('prime') # We need to clean this step, but if it involves cleaning the stage # step and it has dependents that have been built, we need to ask for # them to first be cleaned (at least back to the build step). index = common.COMMAND_ORDER.index(step) dependents = self.parts_config.get_dependents(part.name) if (index <= common.COMMAND_ORDER.index('stage') and not part.is_clean('stage') and dependents): for dependent in self.config.all_parts: if (dependent.name in dependents and not dependent.is_clean('build')): humanized_parts = formatting_utils.humanize_list( dependents, 'and') pluralized_depends = formatting_utils.pluralize( dependents, "depends", "depend") raise RuntimeError( 'The {0!r} step for {1!r} needs to be run again, but ' '{2} {3} upon it. Please clean the build ' 'step of {2} first.'.format(step, part.name, humanized_parts, pluralized_depends)) part.clean(staged_state, primed_state, step, '(out of date)')
def _handle_dirty(self, part, step, dirty_report): if step not in _STEPS_TO_AUTOMATICALLY_CLEAN_IF_DIRTY: message_components = [ 'The {!r} step of {!r} is out of date:\n\n'.format( step, part.name)] if dirty_report.dirty_properties: humanized_properties = formatting_utils.humanize_list( dirty_report.dirty_properties, 'and') pluralized_connection = formatting_utils.pluralize( dirty_report.dirty_properties, 'property appears', 'properties appear') message_components.append( 'The {} part {} to have changed.\n'.format( humanized_properties, pluralized_connection)) if dirty_report.dirty_project_options: humanized_options = formatting_utils.humanize_list( dirty_report.dirty_project_options, 'and') pluralized_connection = formatting_utils.pluralize( dirty_report.dirty_project_options, 'option appears', 'options appear') message_components.append( 'The {} project {} to have changed.\n'.format( humanized_options, pluralized_connection)) message_components.append( "\nIn order to continue, please clean that part's {0!r} step " "by running: snapcraft clean {1} -s {0}\n".format( step, part.name)) raise RuntimeError(''.join(message_components)) staged_state = self.config.get_project_state('stage') primed_state = self.config.get_project_state('prime') # We need to clean this step, but if it involves cleaning the stage # step and it has dependents that have been built, we need to ask for # them to first be cleaned (at least back to the build step). index = common.COMMAND_ORDER.index(step) dependents = self.parts_config.get_dependents(part.name) if (index <= common.COMMAND_ORDER.index('stage') and not part.is_clean('stage') and dependents): for dependent in self.config.all_parts: if (dependent.name in dependents and not dependent.is_clean('build')): humanized_parts = formatting_utils.humanize_list( dependents, 'and') pluralized_depends = formatting_utils.pluralize( dependents, "depends", "depend") raise RuntimeError( 'The {0!r} step for {1!r} needs to be run again, but ' '{2} {3} upon it. Please clean the build ' 'step of {2} first.'.format( step, part.name, humanized_parts, pluralized_depends)) part.clean(staged_state, primed_state, step, '(out of date)')
def __init__( self, *, step, part, dirty_report=None, outdated_report=None, dependents=None ): messages = [] if dirty_report: messages.append(dirty_report.get_report()) if outdated_report: messages.append(outdated_report.get_report()) if dependents: humanized_dependents = formatting_utils.humanize_list(dependents, "and") pluralized_dependents = formatting_utils.pluralize( dependents, "depends", "depend" ) messages.append( "The {0!r} step for {1!r} needs to be run again, " "but {2} {3} on it.\n".format( step.name, part, humanized_dependents, pluralized_dependents ) ) parts_names = ["{!s}".format(d) for d in sorted(dependents)] else: parts_names = [part] super().__init__( step=step, part=part, report="".join(messages), parts_names=" ".join(parts_names), )
def push(snap_file, release): """Push <snap-file> to the store. By passing --release with a comma separated list of channels the snap would be released to the selected channels if the store review passes for this <snap-file>. This operation will block until the store finishes processing this <snap-file>. If --release is used, the channel map will be displayed after the operation takes place. \b Examples: snapcraft push my-snap_0.1_amd64.snap snapcraft push my-snap_0.2_amd64.snap --release edge snapcraft push my-snap_0.3_amd64.snap --release candidate,beta """ click.echo('Pushing {}'.format(os.path.basename(snap_file))) channel_list = [] if release: channel_list = release.split(',') click.echo('After pushing, an attempt will be made to release to {}' ''.format( formatting_utils.humanize_list(channel_list, 'and'))) snapcraft.push(snap_file, channel_list)
def push(snap_file, release): """Push <snap-file> to the store. By passing --release with a comma separated list of channels the snap would be released to the selected channels if the store review passes for this <snap-file>. This operation will block until the store finishes processing this <snap-file>. If --release is used, the channel map will be displayed after the operation takes place. \b Examples: snapcraft push my-snap_0.1_amd64.snap snapcraft push my-snap_0.2_amd64.snap --release edge snapcraft push my-snap_0.3_amd64.snap --release candidate,beta """ click.echo("Pushing {}".format(os.path.basename(snap_file))) channel_list = [] if release: channel_list = release.split(",") click.echo( "After pushing, an attempt will be made to release to {}" "".format(formatting_utils.humanize_list(channel_list, "and")) ) snapcraft.push(snap_file, channel_list)
def __init__(self, *, force_provider: str = None) -> None: """Instantiate a BuildEnvironmentConfig. :param str force_provider: ignore the hints from the environment and use the specified provider. """ if force_provider: build_provider = force_provider elif common.is_docker_instance(): build_provider = "host" else: build_provider = os.environ.get("SNAPCRAFT_BUILD_ENVIRONMENT", "multipass") valid_providers = ["host", "multipass", "managed-host", "lxd"] if build_provider not in valid_providers: raise errors.SnapcraftEnvironmentError( "The snapcraft build environment must be one of: {}.".format( humanize_list(items=valid_providers, conjunction="or") ) ) self.provider = build_provider self.is_host = build_provider == "host" self.is_multipass = build_provider == "multipass" self.is_multipass = build_provider == "lxd" self.is_managed_host = build_provider == "managed-host"
def _rerun_step(self, *, step: steps.Step, part, progress, hint='', prerequisite_step: steps.Step=steps.STAGE): staged_state = self.config.get_project_state(steps.STAGE) primed_state = self.config.get_project_state(steps.PRIME) # We need to clean this step, but if it involves cleaning any steps # upon which other parts depend, those parts need to be marked as dirty # so they can run again, taking advantage of the new dependency. dirty_parts = mark_dependents_dirty(part.name, step, self.config) # First clean the step, then run it again part.clean(staged_state, primed_state, step) for current_step in [step] + step.next_steps(): self._dirty_reports[part.name][current_step] = None self._steps_run[part.name].discard(current_step) self._run_step( step=step, part=part, progress=progress, hint=hint, prerequisite_step=prerequisite_step) if dirty_parts: logger.warning( 'The following {} now out of date: {}'.format( formatting_utils.pluralize( dirty_parts, 'part is', 'parts are'), formatting_utils.humanize_list(dirty_parts, 'and')))
def push(snap_file, release): """Push <snap-file> to the store. By passing --release with a comma separated list of channels the snap would be released to the selected channels if the store review passes for this <snap-file>. This operation will block until the store finishes processing this <snap-file>. If --release is used, the channel map will be displayed after the operation takes place. \b Examples: snapcraft push my-snap_0.1_amd64.snap snapcraft push my-snap_0.2_amd64.snap --release edge snapcraft push my-snap_0.3_amd64.snap --release candidate,beta """ click.echo("Preparing to push {!r}.".format(os.path.basename(snap_file))) if release: channel_list = release.split(",") click.echo( "After pushing, the resulting snap revision will be released to " "{} when it passes the Snap Store review." "".format(formatting_utils.humanize_list(channel_list, "and"))) else: channel_list = None review_snap(snap_file=snap_file) snapcraft.push(snap_file, channel_list)
def _handle_dirty(self, part, step): if step not in _STEPS_TO_AUTOMATICALLY_CLEAN_IF_DIRTY: raise RuntimeError( 'The {0!r} step of {1!r} is out of date. Please clean that ' "part's {0!r} step in order to rebuild".format( step, part.name)) staged_state = self.config.get_project_state('stage') primed_state = self.config.get_project_state('prime') # We need to clean this step, but if it involves cleaning the stage # step and it has dependents that have been built, we need to ask for # them to first be cleaned (at least back to the build step). index = common.COMMAND_ORDER.index(step) dependents = self.parts_config.get_dependents(part.name) if (index <= common.COMMAND_ORDER.index('stage') and not part.is_clean('stage') and dependents): for dependent in self.config.all_parts: if (dependent.name in dependents and not dependent.is_clean('build')): humanized_parts = formatting_utils.humanize_list( dependents, 'and') raise RuntimeError( 'The {0!r} step for {1!r} needs to be run again, but ' '{2} depend{3} upon it. Please clean the build ' 'step of {2} first.'.format( step, part.name, humanized_parts, 's' if len(dependents) == 1 else '')) part.clean(staged_state, primed_state, step, '(out of date)')
def _handle_dirty(self, part, step): if step not in _STEPS_TO_AUTOMATICALLY_CLEAN_IF_DIRTY: raise RuntimeError( 'The {0!r} step of {1!r} is out of date. Please clean that ' "part's {0!r} step in order to rebuild".format( step, part.name)) staged_state = self.config.get_project_state('stage') primed_state = self.config.get_project_state('prime') # We need to clean this step, but if it involves cleaning the stage # step and it has dependents that have been built, we need to ask for # them to first be cleaned (at least back to the build step). index = common.COMMAND_ORDER.index(step) dependents = self.parts_config.get_dependents(part.name) if (index <= common.COMMAND_ORDER.index('stage') and not part.is_clean('stage') and dependents): for dependent in self.config.all_parts: if (dependent.name in dependents and not dependent.is_clean('build')): humanized_parts = formatting_utils.humanize_list( dependents, 'and') raise RuntimeError( 'The {0!r} step for {1!r} needs to be run again, but ' '{2} depend{3} upon it. Please clean the build ' 'step of {2} first.'.format( step, part.name, humanized_parts, 's' if len(dependents) == 1 else '')) part.clean(staged_state, primed_state, step, '(out of date)')
def __init__(self, *, step, part, dirty_report=None, outdated_report=None, dependents=None): messages = [] if dirty_report: messages.append(dirty_report.get_report()) if outdated_report: messages.append(outdated_report.get_report()) if dependents: humanized_dependents = formatting_utils.humanize_list( dependents, "and") pluralized_dependents = formatting_utils.pluralize( dependents, "depends", "depend") messages.append("The {0!r} step for {1!r} needs to be run again, " "but {2} {3} on it.\n".format( step.name, part, humanized_dependents, pluralized_dependents)) parts_names = ["{!s}".format(d) for d in sorted(dependents)] else: parts_names = [part] super().__init__( step=step, part=part, report="".join(messages), parts_names=" ".join(parts_names), )
def __init__(self, step: steps.Step, other_step: steps.Step, keys: List[str]) -> None: self.keys = keys super().__init__(step=step, other_step=other_step, humanized_keys=formatting_utils.humanize_list( keys, 'and'))
def get_report(self) -> str: """Get verbose report. :return: Report why the part is dirty. :rtype: str """ messages = [] # type: List[str] if self.dirty_properties: humanized_properties = formatting_utils.humanize_list( self.dirty_properties, "and" ) pluralized_connection = formatting_utils.pluralize( self.dirty_properties, "property appears", "properties appear" ) messages.append( "The {} part {} to have changed.\n".format( humanized_properties, pluralized_connection ) ) if self.dirty_project_options: humanized_options = formatting_utils.humanize_list( self.dirty_project_options, "and" ) pluralized_connection = formatting_utils.pluralize( self.dirty_project_options, "option appears", "options appear" ) messages.append( "The {} project {} to have changed.\n".format( humanized_options, pluralized_connection ) ) if self.changed_dependencies: dependencies = [d.part_name for d in self.changed_dependencies] messages.append( "{} changed: {}\n".format( formatting_utils.pluralize( dependencies, "A dependency has", "Some dependencies have" ), formatting_utils.humanize_list(dependencies, "and"), ) ) return "".join(messages)
def __init__(self, *, base: str, version: str, valid_versions: Sequence[str]) -> None: super().__init__( base=base, version=version, valid_versions=formatting_utils.humanize_list(valid_versions, conjunction="or"), )
def _handle_macaroon_permission_required(response_json): if "permission" in response_json and "channels" in response_json: if response_json["permission"] == "channel": channels = response_json["channels"] return "Lacking permission to release to channel(s) {}".format( formatting_utils.humanize_list(channels, "and")) return ""
def _handle_macaroon_permission_required(response_json): if 'permission' in response_json and 'channels' in response_json: if response_json['permission'] == 'channel': channels = response_json['channels'] return 'Lacking permission to release to channel(s) {}'.format( formatting_utils.humanize_list(channels, 'and')) return ''
def test_two_items(self): items = ["foo", "bar"] output = formatting_utils.humanize_list(items, "and") self.assertThat( output, Equals("'bar' and 'foo'"), "Expected 'bar' before 'foo' due to sorting", )
def __init__(self, *, step, part, dirty_properties=None, dirty_project_options=None, changed_dependencies=None, dependents=None): messages = [] if dirty_properties: humanized_properties = formatting_utils.humanize_list( dirty_properties, 'and') pluralized_connection = formatting_utils.pluralize( dirty_properties, 'property appears', 'properties appear') messages.append('The {} part {} to have changed.\n'.format( humanized_properties, pluralized_connection)) if dirty_project_options: humanized_options = formatting_utils.humanize_list( dirty_project_options, 'and') pluralized_connection = formatting_utils.pluralize( dirty_project_options, 'option appears', 'options appear') messages.append('The {} project {} to have changed.\n'.format( humanized_options, pluralized_connection)) if changed_dependencies: dependencies = [d['name'] for d in changed_dependencies] messages.append('{} changed: {}\n'.format( formatting_utils.pluralize(dependencies, 'A dependency has', 'Some dependencies have'), formatting_utils.humanize_list(dependencies, 'and'))) if dependents: humanized_dependents = formatting_utils.humanize_list( dependents, 'and') pluralized_dependents = formatting_utils.pluralize( dependents, "depends", "depend") messages.append('The {0!r} step for {1!r} needs to be run again, ' 'but {2} {3} on it.\n'.format( step.name, part, humanized_dependents, pluralized_dependents)) parts_names = ['{!s}'.format(d) for d in sorted(dependents)] else: parts_names = [part] super().__init__(step=step, part=part, report=''.join(messages), parts_names=' '.join(parts_names))
def __init__( self, step: steps.Step, other_step: steps.Step, keys: List[str] ) -> None: self.keys = keys super().__init__( step=step, other_step=other_step, humanized_keys=formatting_utils.humanize_list(keys, "and"), )
def _handle_macaroon_permission_required(response_json): if "permission" in response_json and "channels" in response_json: if response_json["permission"] == "channel": channels = response_json["channels"] return "Lacking permission to release to channel(s) {}".format( formatting_utils.humanize_list(channels, "and") ) return ""
def __init__( self, *, snap_name: str, channel: channels.Channel, channel_outliers: List[status.SnapStatusChannelDetails], ) -> None: arches = formatting_utils.humanize_list( [c.arch for c in channel_outliers], "and") super().__init__(snap_name=snap_name, channel=channel, arches=arches)
def _monitor_build(lp: LaunchpadClient) -> None: target_list = humanize_list(lp.architectures, "and", "{}") echo.info( f"Building snap package for {target_list}. This may take some time to finish." ) lp.monitor_build() echo.info("Build complete.") lp.cleanup()
def _determine_cause(error): messages = [] # error.validator_value may contain a custom validation error message. # If so, use it instead of the garbage message jsonschema gives us. with contextlib.suppress(TypeError, KeyError): messages.append( error.validator_value["validation-failure"].format(error)) # The schema itself may have a custom validation error message. If so, # use it as well. with contextlib.suppress(AttributeError, TypeError, KeyError): key = error if (error.schema.get("type") == "object" and error.validator == "additionalProperties"): key = list(error.instance.keys())[0] messages.append(error.schema["validation-failure"].format(key)) # anyOf failures might have usable context... try to improve them a bit if error.validator == "anyOf": contextual_messages = OrderedDict() # type: Dict[str, str] for contextual_error in error.context: key = contextual_error.schema_path.popleft() if key not in contextual_messages: contextual_messages[key] = [] message = contextual_error.message if message: # Sure it starts lower-case (not all messages do) contextual_messages[key].append(message[0].lower() + message[1:]) oneOf_messages = [] # type: List[str] for key, value in contextual_messages.items(): oneOf_messages.append( formatting_utils.humanize_list(value, "and", "{}")) messages.append( formatting_utils.humanize_list(oneOf_messages, "or", "{}")) return " ".join(messages)
def __init__( self, *, default="host", additional_providers: List[str] = None ) -> None: """Instantiate a BuildEnvironmentConfig. :param str default: the default provider to use among the list of valid ones. :param str additional_providers: Additional providers allowed in the environment. """ valid_providers = ["host", "lxd"] if additional_providers is not None: valid_providers.extend(additional_providers) use_lxd = None container_builds = os.environ.get("SNAPCRAFT_CONTAINER_BUILDS") if container_builds: echo.warning( "The flag SNAPCRAFT_CONTAINER_BUILDS has been deprecated. " "Use SNAPCRAFT_BUILD_ENVIRONMENT=lxd instead." ) try: use_lxd = util.strtobool(container_builds) except ValueError: raise errors.SnapcraftEnvironmentError( "The experimental feature of using non-local LXD remotes " "with SNAPCRAFT_CONTAINER_BUILDS has been dropped." ) build_provider = os.environ.get("SNAPCRAFT_BUILD_ENVIRONMENT") if build_provider and use_lxd: raise errors.SnapcraftEnvironmentError( "SNAPCRAFT_BUILD_ENVIRONMENT and SNAPCRAFT_CONTAINER_BUILDS " "cannot be used together.\n" "Given that SNAPCRAFT_CONTAINER_BUILDS is deprecated, " "unset that variable from the environment and try again." ) if use_lxd: build_provider = "lxd" elif not build_provider: build_provider = default elif build_provider not in valid_providers: raise errors.SnapcraftEnvironmentError( "SNAPCRAFT_BUILD_ENVIRONMENT must be one of: {}.".format( humanize_list(items=valid_providers, conjunction="or") ) ) self.provider = build_provider self.is_host = build_provider == "host" self.is_lxd = build_provider == "lxd"
def _adopt_info(config_data: Dict[str, Any], extracted_metadata: _metadata.ExtractedMetadata): ignored_keys = _adopt_keys(config_data, extracted_metadata) if ignored_keys: logger.warning( 'The {keys} {plural_property} {plural_is} specified in adopted ' 'info as well as the YAML: taking the {plural_property} from the ' 'YAML'.format( keys=formatting_utils.humanize_list(list(ignored_keys), 'and'), plural_property=formatting_utils.pluralize( ignored_keys, 'property', 'properties'), plural_is=formatting_utils.pluralize(ignored_keys, 'is', 'are')))
def _determine_cause(error): messages = [] # error.validator_value may contain a custom validation error message. # If so, use it instead of the garbage message jsonschema gives us. with contextlib.suppress(TypeError, KeyError): messages.append(error.validator_value["validation-failure"].format(error)) # The schema itself may have a custom validation error message. If so, # use it as well. with contextlib.suppress(AttributeError, TypeError, KeyError): key = error if ( error.schema.get("type") == "object" and error.validator == "additionalProperties" ): key = list(error.instance.keys())[0] messages.append(error.schema["validation-failure"].format(key)) # anyOf failures might have usable context... try to improve them a bit if error.validator == "anyOf": contextual_messages = OrderedDict() # type: Dict[str, str] for contextual_error in error.context: key = contextual_error.schema_path.popleft() if key not in contextual_messages: contextual_messages[key] = [] message = contextual_error.message if message: # Sure it starts lower-case (not all messages do) contextual_messages[key].append(message[0].lower() + message[1:]) oneOf_messages = [] # type: List[str] for key, value in contextual_messages.items(): oneOf_messages.append(formatting_utils.humanize_list(value, "and", "{}")) messages.append(formatting_utils.humanize_list(oneOf_messages, "or", "{}")) return " ".join(messages)
def __init__(self, *, step, part, dirty_properties=None, dirty_project_options=None, dependents=None): messages = [] if dirty_properties: humanized_properties = formatting_utils.humanize_list( dirty_properties, 'and') pluralized_connection = formatting_utils.pluralize( dirty_properties, 'property appears', 'properties appear') messages.append( 'The {} part {} to have changed.\n'.format( humanized_properties, pluralized_connection)) if dirty_project_options: humanized_options = formatting_utils.humanize_list( dirty_project_options, 'and') pluralized_connection = formatting_utils.pluralize( dirty_project_options, 'option appears', 'options appear') messages.append( 'The {} project {} to have changed.\n'.format( humanized_options, pluralized_connection)) if dependents: humanized_dependents = formatting_utils.humanize_list( dependents, 'and') pluralized_dependents = formatting_utils.pluralize( dependents, "depends", "depend") messages.append('The {0!r} step for {1!r} needs to be run again, ' 'but {2} {3} on it.\n'.format( step, part, humanized_dependents, pluralized_dependents)) parts_names = ['{!s}'.format(d) for d in sorted(dependents)] else: parts_names = [part] super().__init__(step=step, part=part, report=''.join(messages), parts_names=' '.join(parts_names))
def __init__(self, *, default="host", additional_providers: List[str] = None) -> None: """Instantiate a BuildEnvironmentConfig. :param str default: the default provider to use among the list of valid ones. :param str additional_providers: Additional providers allowed in the environment. """ valid_providers = ["host", "multipass", "lxd", "managed-host"] if additional_providers is not None: valid_providers.extend(additional_providers) use_lxd = None container_builds = os.environ.get("SNAPCRAFT_CONTAINER_BUILDS") if container_builds: echo.warning( "The flag SNAPCRAFT_CONTAINER_BUILDS has been deprecated. " "Use SNAPCRAFT_BUILD_ENVIRONMENT=lxd instead.") try: use_lxd = util.strtobool(container_builds) except ValueError: raise errors.SnapcraftEnvironmentError( "The experimental feature of using non-local LXD remotes " "with SNAPCRAFT_CONTAINER_BUILDS has been dropped.") build_provider = os.environ.get("SNAPCRAFT_BUILD_ENVIRONMENT") if build_provider and use_lxd: raise errors.SnapcraftEnvironmentError( "SNAPCRAFT_BUILD_ENVIRONMENT and SNAPCRAFT_CONTAINER_BUILDS " "cannot be used together.\n" "Given that SNAPCRAFT_CONTAINER_BUILDS is deprecated, " "unset that variable from the environment and try again.") if use_lxd: build_provider = "lxd" elif not build_provider: build_provider = default elif build_provider not in valid_providers: raise errors.SnapcraftEnvironmentError( "SNAPCRAFT_BUILD_ENVIRONMENT must be one of: {}.".format( humanize_list(items=valid_providers, conjunction="or"))) self.provider = build_provider self.is_host = build_provider == "host" self.is_lxd = build_provider == "lxd" self.is_multipass = build_provider == "multipass" self.is_managed_host = build_provider == "managed-host"
def _adopt_info(config_data: Dict[str, Any], extracted_metadata: _metadata.ExtractedMetadata): ignored_keys = _adopt_keys(config_data, extracted_metadata) if ignored_keys: logger.warning( "The {keys} {plural_property} {plural_is} specified in adopted " "info as well as the YAML: taking the {plural_property} from the " "YAML".format( keys=formatting_utils.humanize_list(list(ignored_keys), "and"), plural_property=formatting_utils.pluralize( ignored_keys, "property", "properties"), plural_is=formatting_utils.pluralize(ignored_keys, "is", "are"), ))
def _determine_cause(error): messages = [] # error.validator_value may contain a custom validation error message. # If so, use it instead of the garbage message jsonschema gives us. with contextlib.suppress(TypeError, KeyError): messages.append( error.validator_value['validation-failure'].format(error)) # The schema itself may have a custom validation error message. If so, # use it as well. with contextlib.suppress(AttributeError, TypeError, KeyError): key = error if (error.schema.get('type') == 'object' and error.validator == 'additionalProperties'): key = list(error.instance.keys())[0] messages.append(error.schema['validation-failure'].format(key)) # anyOf failures might have usable context... try to improve them a bit if error.validator == 'anyOf': contextual_messages = OrderedDict() # type: Dict[str, str] for contextual_error in error.context: key = contextual_error.schema_path.popleft() if key not in contextual_messages: contextual_messages[key] = [] contextual_messages[key].append(contextual_error.message) oneOf_messages = [] # type: List[str] for key, value in contextual_messages.items(): oneOf_messages.append( formatting_utils.humanize_list(value, 'and', '{}')) messages.append( formatting_utils.humanize_list(oneOf_messages, 'or', '{}')) return ' '.join(messages)
def __init__(self, name, options, project): super().__init__(name, options, project) self.build_packages.extend(["libc6-dev", "make", "python-pip"]) self.__pip = None # roslib is the base requiremet to actually create a workspace with # setup.sh and the necessary hooks. self.stage_packages.append("ros-{}-roslib".format( self.options.rosdistro)) # Get a unique set of packages self.catkin_packages = None if options.catkin_packages is not None: self.catkin_packages = set(options.catkin_packages) self._rosdep_path = os.path.join(self.partdir, "rosdep") self._compilers_path = os.path.join(self.partdir, "compilers") self._catkin_path = os.path.join(self.partdir, "catkin") self._wstool_path = os.path.join(self.partdir, "wstool") # The path created via the `source` key (or a combination of `source` # and `source-subdir` keys) needs to point to a valid Catkin workspace # containing another subdirectory called the "source space." By # default, this is a directory named "src," but it can be remapped via # the `source-space` key. It's important that the source space is not # the root of the Catkin workspace, since Catkin won't work that way # and it'll create a circular link that causes rosdep to hang. if self.options.source_subdir: self._ros_package_path = os.path.join(self.sourcedir, self.options.source_subdir, self.options.source_space) else: self._ros_package_path = os.path.join(self.sourcedir, self.options.source_space) if os.path.abspath(self.sourcedir) == os.path.abspath( self._ros_package_path): raise RuntimeError( "source-space cannot be the root of the Catkin workspace") # Validate selected ROS distro if self.options.rosdistro not in _ROS_RELEASE_MAP: raise RuntimeError( "Unsupported rosdistro: {!r}. The supported ROS distributions " "are {}".format( self.options.rosdistro, formatting_utils.humanize_list(_ROS_RELEASE_MAP.keys(), "and"), ))
def __init__(self, name, options, project): super().__init__(name, options, project) self.build_packages.extend(["libc6-dev", "make", "python-pip"]) self.__pip = None # roslib is the base requiremet to actually create a workspace with # setup.sh and the necessary hooks. self.stage_packages.append("ros-{}-roslib".format(self.options.rosdistro)) # Get a unique set of packages self.catkin_packages = None if options.catkin_packages is not None: self.catkin_packages = set(options.catkin_packages) self._rosdep_path = os.path.join(self.partdir, "rosdep") self._compilers_path = os.path.join(self.partdir, "compilers") self._catkin_path = os.path.join(self.partdir, "catkin") self._wstool_path = os.path.join(self.partdir, "wstool") # The path created via the `source` key (or a combination of `source` # and `source-subdir` keys) needs to point to a valid Catkin workspace # containing another subdirectory called the "source space." By # default, this is a directory named "src," but it can be remapped via # the `source-space` key. It's important that the source space is not # the root of the Catkin workspace, since Catkin won't work that way # and it'll create a circular link that causes rosdep to hang. if self.options.source_subdir: self._ros_package_path = os.path.join( self.sourcedir, self.options.source_subdir, self.options.source_space ) else: self._ros_package_path = os.path.join( self.sourcedir, self.options.source_space ) if os.path.abspath(self.sourcedir) == os.path.abspath(self._ros_package_path): raise RuntimeError( "source-space cannot be the root of the Catkin workspace" ) # Validate selected ROS distro if self.options.rosdistro not in _ROS_RELEASE_MAP: raise RuntimeError( "Unsupported rosdistro: {!r}. The supported ROS distributions " "are {}".format( self.options.rosdistro, formatting_utils.humanize_list(_ROS_RELEASE_MAP.keys(), "and"), ) )
def _interpret_anyOf(error): """Interpret a validation error caused by the anyOf validator. Returns: A string containing a (hopefully) helpful validation error message. It may be empty. """ usages = [] try: for validator in error.validator_value: usages.append(validator["usage"]) except (TypeError, KeyError): return "" return "must be one of {}".format(formatting_utils.humanize_list(usages, "or"))
def _adopt_info( config_data: Dict[str, Any], extracted_metadata: _metadata.ExtractedMetadata ): ignored_keys = _adopt_keys(config_data, extracted_metadata) if ignored_keys: logger.warning( "The {keys} {plural_property} {plural_is} specified in adopted " "info as well as the YAML: taking the {plural_property} from the " "YAML".format( keys=formatting_utils.humanize_list(list(ignored_keys), "and"), plural_property=formatting_utils.pluralize( ignored_keys, "property", "properties" ), plural_is=formatting_utils.pluralize(ignored_keys, "is", "are"), ) )
def _interpret_anyOf(error): """Interpret a validation error caused by the anyOf validator. Returns: A string containing a (hopefully) helpful validation error message. It may be empty. """ usages = [] try: for validator in error.validator_value: usages.append(validator["usage"]) except (TypeError, KeyError): return "" return "must be one of {}".format(formatting_utils.humanize_list(usages, "or"))
def get_summary(self) -> str: """Get summarized report. :return: Short summary of why the step is dirty. :rtype: str """ reasons = [] if self.previous_step_modified: reasons.append("{!r} step".format( self.previous_step_modified.name)) if self.source_updated: reasons.append("source") return "{} changed".format( formatting_utils.humanize_list(reasons, "and", "{}"))
def _verify_dependents_will_be_cleaned(part_name, clean_part_names, step, config): # Get the name of the parts that depend upon this one dependents = config.parts.get_dependents(part_name) # Verify that they're either already clean, or that they will be cleaned. if not dependents.issubset(clean_part_names): for part in config.all_parts: if part.name in dependents and not part.is_clean(step): humanized_parts = formatting_utils.humanize_list( dependents, 'and') raise RuntimeError( 'Requested clean of {!r} but {} depend{} upon it. Please ' "add each to the clean command if that's what you " 'intended.'.format(part_name, humanized_parts, 's' if len(dependents) == 1 else ''))
def apply_extensions(yaml_data: Dict[str, Any]) -> Dict[str, Any]: """Apply all extensions. :param dict yaml_data: Loaded, unprocessed snapcraft.yaml :returns: Modified snapcraft.yaml data with extensions applied """ # Don't modify the dict passed in yaml_data = copy.deepcopy(yaml_data) base = yaml_data.get("base") applied_extension_names = set() # type: Set[str] global_extension_names = yaml_data.get("extensions", []) _validate_extension_format(global_extension_names) for app_name, app_definition in yaml_data.get("apps", dict()).items(): extension_names = app_definition.get("extensions") _validate_extension_format(extension_names) # Make sure global extensions are assigned to any app without extensions if extension_names is None: extension_names = global_extension_names for extension_name in extension_names: extension = _load_extension(base, extension_name, yaml_data) _apply_extension(yaml_data, app_name, extension_name, extension) # Keep track of the extensions applied so we can warn about any that # are declared, but not used applied_extension_names.update(extension_names) # Now that extensions have been applied, remove the specification from # this app with contextlib.suppress(KeyError): del yaml_data["apps"][app_name]["extensions"] # Now that extensions have been applied, remove the global specification with contextlib.suppress(KeyError): del yaml_data["extensions"] unused_extensions = set(global_extension_names) - applied_extension_names if unused_extensions: logger.warning( "The following extensions are declared, but not used: {}".format( formatting_utils.humanize_list(unused_extensions, "and"))) return yaml_data
def warn_ld_library_paths(self) -> None: root_ld_library_path = self._snap_meta.environment.get("LD_LIBRARY_PATH") # Dictionary of app names with LD_LIBRARY_PATH in their environment. app_environment: Dict[str, str] = dict() for app_name, app_props in self._config_data.get("apps", dict()).items(): with contextlib.suppress(KeyError): app_environment[app_name] = app_props["environment"]["LD_LIBRARY_PATH"] if root_ld_library_path is None and not app_environment: return ld_library_path_empty: Set[str] = set() if root_ld_library_path is None and app_environment: ld_library_path_empty = { name for name, ld_env in app_environment.items() if "$LD_LIBRARY_PATH" in ld_env or "${LD_LIBRARY_PATH}" in ld_env } elif ( root_ld_library_path is not None and "LD_LIBRARY_PATH" in root_ld_library_path ): ld_library_path_empty = {"."} _EMPTY_LD_LIBRARY_PATH_ITEM_PATTERN = re.compile("^:|::|:$") for name, ld_env in app_environment.items(): if _EMPTY_LD_LIBRARY_PATH_ITEM_PATTERN.findall(ld_env): ld_library_path_empty.add(name) if ( root_ld_library_path is not None and _EMPTY_LD_LIBRARY_PATH_ITEM_PATTERN.findall(root_ld_library_path) ): ld_library_path_empty.add(".") if ld_library_path_empty: logger.warning( "CVE-2020-27348: A potentially empty LD_LIBRARY_PATH has been set for environment " "in {}. " "The current working directory will be added to the library path if empty. " "This can cause unexpected libraries to be loaded.".format( formatting_utils.humanize_list(sorted(ld_library_path_empty), "and") ) )
def upload(snap_file, release): """Upload <snap-file> to the store. By passing --release with a comma separated list of channels the snap would be released to the selected channels if the store review passes for this <snap-file>. This operation will block until the store finishes processing this <snap-file>. If --release is used, the channel map will be displayed after the operation takes place. \b Examples: snapcraft upload my-snap_0.1_amd64.snap snapcraft upload my-snap_0.2_amd64.snap --release edge snapcraft upload my-snap_0.3_amd64.snap --release candidate,beta """ click.echo("Preparing to upload {!r}.".format(os.path.basename(snap_file))) if release: channel_list = release.split(",") click.echo( "After uploading, the resulting snap revision will be released to " "{} when it passes the Snap Store review." "".format(formatting_utils.humanize_list(channel_list, "and")) ) else: channel_list = None review_snap(snap_file=snap_file) snap_name, snap_revision = snapcraft.upload(snap_file, channel_list) echo.info("Revision {!r} of {!r} created.".format(snap_revision, snap_name)) if channel_list: store_client_cli = StoreClientCLI() snap_channel_map = store_client_cli.get_snap_channel_map(snap_name=snap_name) click.echo( get_tabulated_channel_map( snap_channel_map, architectures=snap_channel_map.get_revision( snap_revision ).architectures, ) )
def _verify_dependents_will_be_cleaned(part_name, clean_part_names, step, config): # Get the name of the parts that depend upon this one dependents = config.parts.get_dependents(part_name) # Verify that they're either already clean, or that they will be cleaned. if not dependents.issubset(clean_part_names): for part in config.all_parts: if part.name in dependents and not part.is_clean(step): humanized_parts = formatting_utils.humanize_list( dependents, 'and') raise errors.SnapcraftEnvironmentError( 'Requested clean of {!r} but {} depend{} upon it. Please ' "add each to the clean command if that's what you " 'intended.'.format(part_name, humanized_parts, 's' if len(dependents) == 1 else ''))
def _clean_parts(part_names, step, config, staged_state, primed_state): if not step: step = steps.next_step(None) for part_name in part_names: resulting_dirty_parts = _clean_part(part_name, step, config, staged_state, primed_state) parts_not_being_cleaned = resulting_dirty_parts.difference(part_names) if parts_not_being_cleaned: logger.warning( 'Cleaned {!r}, which makes the following {} out of date: ' '{}'.format( part_name, formatting_utils.pluralize(parts_not_being_cleaned, 'part', 'parts'), formatting_utils.humanize_list(parts_not_being_cleaned, 'and')))
def get_summary(self) -> str: """Get summarized report. :return: Short summary of why the part is dirty. :rtype: str """ reasons = [] reasons_count = 0 if self.dirty_properties: reasons_count += 1 if self.dirty_project_options: reasons_count += 1 if self.changed_dependencies: reasons_count += 1 if self.dirty_properties: # Be specific only if this is the only reason if reasons_count > 1 or len(self.dirty_properties) > 1: reasons.append("properties") else: reasons.append( "{!r} property".format(next(iter(self.dirty_properties))) ) if self.dirty_project_options: # Be specific only if this is the only reason if reasons_count > 1 or len(self.dirty_project_options) > 1: reasons.append("options") else: reasons.append( "{!r} option".format(next(iter(self.dirty_project_options))) ) if self.changed_dependencies: # Be specific only if this is the only reason if reasons_count > 1 or len(self.changed_dependencies) > 1: reasons.append("dependencies") else: reasons.append( "{!r}".format(next(iter(self.changed_dependencies)).part_name) ) return "{} changed".format(formatting_utils.humanize_list(reasons, "and", "{}"))
def _verify_dependents_will_be_cleaned(part_name, clean_part_names, step, config): # Get the name of the parts that depend upon this one dependents = config.parts.get_dependents(part_name) additional_dependents = [] # Verify that they're either already clean, or that they will be cleaned. if not dependents.issubset(clean_part_names): for part in config.all_parts: if part.name in dependents and not part.is_clean(step): humanized_parts = formatting_utils.humanize_list( dependents, 'and') additional_dependents.append(part_name) logger.warning( 'Requested clean of {!r} which requires also cleaning ' 'the part{} {}'.format(part_name, '' if len(dependents) == 1 else 's', humanized_parts))
def _clean_parts(part_names, step, config, staged_state, primed_state): if not step: step = steps.next_step(None) for part_name in part_names: dirty_parts = _clean_part(part_name, step, config, staged_state, primed_state) dirty_part_names = {p.name for p in dirty_parts} parts_not_being_cleaned = dirty_part_names.difference(part_names) if parts_not_being_cleaned: logger.warning( "Cleaned {!r}, which makes the following {} out of date: " "{}".format( part_name, formatting_utils.pluralize( parts_not_being_cleaned, "part", "parts" ), formatting_utils.humanize_list(parts_not_being_cleaned, "and"), ) )
def __init__(self, name, options, project): super().__init__(name, options, project) self.build_packages.extend(['gcc', 'libc6-dev', 'make']) self.stage_packages.extend(['gcc', 'g++']) # Get a unique set of packages self.catkin_packages = set(options.catkin_packages) self._rosdep_path = os.path.join(self.partdir, 'rosdep') # The path created via the `source` key (or a combination of `source` # and `source-subdir` keys) needs to point to a valid Catkin workspace # containing another subdirectory called the "source space." By # default, this is a directory named "src," but it can be remapped via # the `source-space` key. It's important that the source space is not # the root of the Catkin workspace, since Catkin won't work that way # and it'll create a circular link that causes rosdep to hang. if self.options.source_subdir: self._ros_package_path = os.path.join(self.sourcedir, self.options.source_subdir, self.options.source_space) else: self._ros_package_path = os.path.join(self.sourcedir, self.options.source_space) if os.path.abspath(self.sourcedir) == os.path.abspath( self._ros_package_path): raise RuntimeError( 'source-space cannot be the root of the Catkin workspace') # Validate selected ROS distro if self.options.rosdistro not in _ROS_RELEASE_MAP: raise RuntimeError( 'Unsupported rosdistro: {!r}. The supported ROS distributions ' 'are {}'.format( self.options.rosdistro, formatting_utils.humanize_list( _ROS_RELEASE_MAP.keys(), 'and')))
def test_no_items(self): items = [] output = formatting_utils.humanize_list(items, 'and') self.assertThat(output, Equals(''))
def __init__(self, keys: List[str]) -> None: super().__init__(keys=formatting_utils.humanize_list(keys, "and"))
def test_three_items(self): items = ['foo', 'bar', 'baz'] output = formatting_utils.humanize_list(items, 'and') self.assertEqual(output, "'bar', 'baz', and 'foo'")
def test_two_items(self): items = ['foo', 'bar'] output = formatting_utils.humanize_list(items, 'and') self.assertThat(output, Equals("'bar' and 'foo'"), "Expected 'bar' before 'foo' due to sorting")
def test_four_items(self): items = ['foo', 'bar', 'baz', 'qux'] output = formatting_utils.humanize_list(items, 'and') self.assertThat(output, Equals("'bar', 'baz', 'foo', and 'qux'"))
def test_one_item(self): items = ['foo'] output = formatting_utils.humanize_list(items, 'and') self.assertThat(output, Equals("'foo'"))
def test_another_conjunction(self): items = ['foo', 'bar', 'baz', 'qux'] output = formatting_utils.humanize_list(items, 'or') self.assertThat(output, Equals("'bar', 'baz', 'foo', or 'qux'"))