コード例 #1
0
    def __init__(
        self, stacks: List[Stack], use_raw_codeuri: bool = False, ignore_code_extraction_warnings: bool = False
    ) -> None:
        """
        Initialize the class with SAM template data. The SAM template passed to this provider is assumed
        to be valid, normalized and a dictionary. It should be normalized by running all pre-processing
        before passing to this class. The process of normalization will remove structures like ``Globals``, resolve
        intrinsic functions etc.
        This class does not perform any syntactic validation of the template.

        After the class is initialized, any changes to the ``template_dict`` will not be reflected in here.
        You need to explicitly update the class with new template, if necessary.

        :param dict stacks: List of stacks functions are extracted from
        :param bool use_raw_codeuri: Do not resolve adjust core_uri based on the template path, use the raw uri.
            Note(xinhol): use_raw_codeuri is temporary to fix a bug, and will be removed for a permanent solution.
        :param bool ignore_code_extraction_warnings: Ignores Log warnings
        """

        self.stacks = stacks

        for stack in stacks:
            LOG.debug("%d resources found in the stack %s", len(stack.resources), stack.stack_path)

        # Store a map of function full_path to function information for quick reference
        self.functions = SamFunctionProvider._extract_functions(
            self.stacks, use_raw_codeuri, ignore_code_extraction_warnings
        )

        self._deprecated_runtimes = {"nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"}
        self._colored = Colored()
コード例 #2
0
    def __init__(self,
                 template_dict,
                 parameter_overrides=None,
                 ignore_code_extraction_warnings=False):
        """
        Initialize the class with SAM template data. The SAM template passed to this provider is assumed
        to be valid, normalized and a dictionary. It should be normalized by running all pre-processing
        before passing to this class. The process of normalization will remove structures like ``Globals``, resolve
        intrinsic functions etc.
        This class does not perform any syntactic validation of the template.

        After the class is initialized, any changes to the ``template_dict`` will not be reflected in here.
        You need to explicitly update the class with new template, if necessary.

        :param dict template_dict: SAM Template as a dictionary
        :param dict parameter_overrides: Optional dictionary of values for SAM template parameters that might want
            to get substituted within the template
        :param bool ignore_code_extraction_warnings: Ignores Log warnings
        """

        self.template_dict = SamFunctionProvider.get_template(
            template_dict, parameter_overrides)
        self.ignore_code_extraction_warnings = ignore_code_extraction_warnings
        self.resources = self.template_dict.get("Resources", {})

        LOG.debug("%d resources found in the template", len(self.resources))

        # Store a map of function name to function information for quick reference
        self.functions = self._extract_functions(
            self.resources, self.ignore_code_extraction_warnings)

        self._deprecated_runtimes = {
            "nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"
        }
        self._colored = Colored()
コード例 #3
0
 def __init__(
     self,
     template_file,
     stack_name,
     s3_bucket,
     s3_prefix,
     region=None,
     profile=None,
     confirm_changeset=None,
     capabilities=None,
     parameter_overrides=None,
     save_to_config=True,
     config_section=None,
 ):
     self.template_file = template_file
     self.stack_name = stack_name
     self.s3_bucket = s3_bucket
     self.s3_prefix = s3_prefix
     self.region = region
     self.profile = profile
     self.confirm_changeset = confirm_changeset
     self.capabilities = (capabilities, )
     self.parameter_overrides = parameter_overrides
     self.save_to_config = save_to_config
     self.config_section = config_section
     self.guided_stack_name = None
     self.guided_s3_bucket = None
     self.guided_s3_prefix = None
     self.guided_region = None
     self.guided_profile = None
     self._capabilities = None
     self._parameter_overrides = None
     self.start_bold = "\033[1m"
     self.end_bold = "\033[0m"
     self.color = Colored()
コード例 #4
0
    def test_various_decorations(self, decoration_name, ansi_prefix):
        expected = ansi_prefix + self.msg + "\x1b[0m"

        with_color = Colored()
        without_color = Colored(colorize=False)

        self.assertEqual(expected, getattr(with_color, decoration_name)(self.msg))
        self.assertEqual(self.msg, getattr(without_color, decoration_name)(self.msg))
コード例 #5
0
def guided_deploy(
    stack_name, s3_bucket, region, profile, confirm_changeset, parameter_override_keys, parameter_overrides
):
    default_stack_name = stack_name or "sam-app"
    default_region = region or "us-east-1"
    default_capabilities = ("CAPABILITY_IAM",)
    input_capabilities = None

    color = Colored()
    start_bold = "\033[1m"
    end_bold = "\033[0m"

    click.echo(
        color.yellow("\n\tSetting default arguments for 'sam deploy'\n\t=========================================")
    )

    stack_name = click.prompt(f"\t{start_bold}Stack Name{end_bold}", default=default_stack_name, type=click.STRING)
    s3_prefix = stack_name
    region = click.prompt(f"\t{start_bold}AWS Region{end_bold}", default=default_region, type=click.STRING)
    input_parameter_overrides = prompt_parameters(parameter_override_keys, start_bold, end_bold)

    click.secho("\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy")
    confirm_changeset = click.confirm(
        f"\t{start_bold}Confirm changes before deploy{end_bold}", default=confirm_changeset
    )
    click.secho("\t#SAM needs permission to be able to create roles to connect to the resources in your template")
    capabilities_confirm = click.confirm(f"\t{start_bold}Allow SAM CLI IAM role creation{end_bold}", default=True)

    if not capabilities_confirm:
        input_capabilities = click.prompt(
            f"\t{start_bold}Capabilities{end_bold}",
            default=default_capabilities[0],
            type=FuncParamType(func=_space_separated_list_func_type),
        )

    save_to_config = click.confirm(f"\t{start_bold}Save arguments to samconfig.toml{end_bold}", default=True)

    s3_bucket = manage_stack(profile=profile, region=region)
    click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}")
    click.echo("\t\tA different default S3 bucket can be set in samconfig.toml")

    return (
        stack_name,
        s3_bucket,
        s3_prefix,
        region,
        profile,
        confirm_changeset,
        input_capabilities if input_capabilities else default_capabilities,
        input_parameter_overrides if input_parameter_overrides else parameter_overrides,
        save_to_config,
    )
コード例 #6
0
    def __init__(
        self,
        resources_to_build,
        build_dir,
        base_dir,
        is_building_specific_resource=False,
        manifest_path_override=None,
        container_manager=None,
        parallel=False,
        mode=None,
    ):
        """
        Initialize the class

        Parameters
        ----------
        functions_to_build: Iterator
            Iterator that can vend out functions available in the SAM template

        build_dir : str
            Path to the directory where we will be storing built artifacts

        base_dir : str
            Path to a folder. Use this folder as the root to resolve relative source code paths against

        is_building_specific_resource : boolean
            Whether customer requested to build a specific resource alone in isolation,
            by specifying function_identifier to the build command.
            Ex: sam build MyServerlessFunction

        container_manager : samcli.local.docker.manager.ContainerManager
            Optional. If provided, we will attempt to build inside a Docker Container

        parallel : bool
            Optional. Set to True to build each function in parallel to improve performance

        mode : str
            Optional, name of the build mode to use ex: 'debug'
        """
        self._resources_to_build = resources_to_build
        self._build_dir = build_dir
        self._base_dir = base_dir
        self._manifest_path_override = manifest_path_override
        self._is_building_specific_resource = is_building_specific_resource

        self._container_manager = container_manager
        self._parallel = parallel
        self._mode = mode

        self._deprecated_runtimes = {"nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"}
        self._colored = Colored()
コード例 #7
0
 def __init__(
     self,
     template_file,
     stack_name,
     s3_bucket,
     image_repository,
     image_repositories,
     s3_prefix,
     region=None,
     profile=None,
     confirm_changeset=None,
     capabilities=None,
     signing_profiles=None,
     parameter_overrides=None,
     save_to_config=True,
     config_section=None,
     config_env=None,
     config_file=None,
 ):
     self.template_file = template_file
     self.stack_name = stack_name
     self.s3_bucket = s3_bucket
     self.image_repository = image_repository
     self.image_repositories = image_repositories
     self.s3_prefix = s3_prefix
     self.region = region
     self.profile = profile
     self.confirm_changeset = confirm_changeset
     self.capabilities = (capabilities, )
     self.parameter_overrides_from_cmdline = parameter_overrides
     self.save_to_config = save_to_config
     self.config_section = config_section
     self.config_env = config_env
     self.config_file = config_file
     self.guided_stack_name = None
     self.guided_s3_bucket = None
     self.guided_image_repository = None
     self.guided_image_repositories = None
     self.guided_s3_prefix = None
     self.guided_region = None
     self.guided_profile = None
     self.signing_profiles = signing_profiles
     self._capabilities = None
     self._parameter_overrides = None
     self.start_bold = "\033[1m"
     self.end_bold = "\033[0m"
     self.color = Colored()
     self.function_provider = None
コード例 #8
0
    def colored(self):
        """
        Instance of Colored object to colorize strings

        Returns
        -------
        samcli.commands.utils.colors.Colored
        """
        # No colors if we are writing output to a file
        return Colored(colorize=self._must_print_colors)
コード例 #9
0
def _deprecate_notification(runtime):
    from samcli.lib.utils.colors import Colored

    deprecated_runtimes = {"dotnetcore1.0", "dotnetcore2.0"}
    if runtime in deprecated_runtimes:
        message = (
            f"WARNING: {runtime} is no longer supported by AWS Lambda, please update to a newer supported runtime. SAM CLI "
            f"will drop support for all deprecated runtimes {deprecated_runtimes} on May 1st. "
            f"See issue: https://github.com/awslabs/aws-sam-cli/issues/1934 for more details."
        )
        LOG.warning(Colored().yellow(message))
コード例 #10
0
 def __init__(self):
     self._color = Colored()
     self.changeset_color_map = {
         "Add": "green",
         "Modify": "yellow",
         "Remove": "red"
     }
     self.status_color_map = {
         "CREATE_COMPLETE": "green",
         "CREATE_FAILED": "red",
         "CREATE_IN_PROGRESS": "yellow",
         "DELETE_COMPLETE": "green",
         "DELETE_FAILED": "red",
         "DELETE_IN_PROGRESS": "red",
         "REVIEW_IN_PROGRESS": "yellow",
         "ROLLBACK_COMPLETE": "red",
         "ROLLBACK_IN_PROGRESS": "red",
         "UPDATE_COMPLETE": "green",
         "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS": "yellow",
         "UPDATE_IN_PROGRESS": "yellow",
         "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS": "red",
         "UPDATE_ROLLBACK_FAILED": "red",
         "UPDATE_ROLLBACK_IN_PROGRESS": "red",
     }
コード例 #11
0
class SamFunctionProvider(SamBaseProvider):
    """
    Fetches and returns Lambda Functions from a SAM Template. The SAM template passed to this provider is assumed
    to be valid, normalized and a dictionary.

    It may or may not contain a function.
    """

    def __init__(
        self, stacks: List[Stack], use_raw_codeuri: bool = False, ignore_code_extraction_warnings: bool = False
    ) -> None:
        """
        Initialize the class with SAM template data. The SAM template passed to this provider is assumed
        to be valid, normalized and a dictionary. It should be normalized by running all pre-processing
        before passing to this class. The process of normalization will remove structures like ``Globals``, resolve
        intrinsic functions etc.
        This class does not perform any syntactic validation of the template.

        After the class is initialized, any changes to the ``template_dict`` will not be reflected in here.
        You need to explicitly update the class with new template, if necessary.

        :param dict stacks: List of stacks functions are extracted from
        :param bool use_raw_codeuri: Do not resolve adjust core_uri based on the template path, use the raw uri.
            Note(xinhol): use_raw_codeuri is temporary to fix a bug, and will be removed for a permanent solution.
        :param bool ignore_code_extraction_warnings: Ignores Log warnings
        """

        self.stacks = stacks

        for stack in stacks:
            LOG.debug("%d resources found in the stack %s", len(stack.resources), stack.stack_path)

        # Store a map of function full_path to function information for quick reference
        self.functions = SamFunctionProvider._extract_functions(
            self.stacks, use_raw_codeuri, ignore_code_extraction_warnings
        )

        self._deprecated_runtimes = {"nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"}
        self._colored = Colored()

    def get(self, name: str) -> Optional[Function]:
        """
        Returns the function given name or LogicalId of the function. Every SAM resource has a logicalId, but it may
        also have a function name. This method searches only for LogicalID and returns the function that matches.
        If it is in a nested stack, "name" can be prefixed with stack path to avoid ambiguity.
        For example, if a function with name "FunctionA" is located in StackN, which is a nested stack in root stack,
          either "StackN/FunctionA" or "FunctionA" can be used.

        :param string name: Name of the function
        :return Function: namedtuple containing the Function information if function is found.
                          None, if function is not found
        :raises ValueError If name is not given
        """

        if not name:
            raise ValueError("Function name is required")

        # support lookup by full_path
        if name in self.functions:
            return self.functions.get(name)

        for f in self.get_all():
            if name in (f.name, f.functionname):
                self._deprecate_notification(f.runtime)
                return f

        return None

    def _deprecate_notification(self, runtime: Optional[str]) -> None:
        if runtime in self._deprecated_runtimes:
            message = (
                f"WARNING: {runtime} is no longer supported by AWS Lambda, "
                "please update to a newer supported runtime. SAM CLI "
                f"will drop support for all deprecated runtimes {self._deprecated_runtimes} on May 1st. "
                "See issue: https://github.com/awslabs/aws-sam-cli/issues/1934 for more details."
            )
            LOG.warning(self._colored.yellow(message))

    def get_all(self) -> Iterator[Function]:
        """
        Yields all the Lambda functions available in the SAM Template.

        :yields Function: namedtuple containing the function information
        """

        for _, function in self.functions.items():
            yield function

    @staticmethod
    def _extract_functions(
        stacks: List[Stack], use_raw_codeuri: bool = False, ignore_code_extraction_warnings: bool = False
    ) -> Dict[str, Function]:
        """
        Extracts and returns function information from the given dictionary of SAM/CloudFormation resources. This
        method supports functions defined with AWS::Serverless::Function and AWS::Lambda::Function

        :param stacks: List of SAM/CloudFormation stacks to extract functions from
        :param bool use_raw_codeuri: Do not resolve adjust core_uri based on the template path, use the raw uri.
        :param bool ignore_code_extraction_warnings: suppress log statements on code extraction from resources.
        :return dict(string : samcli.commands.local.lib.provider.Function): Dictionary of function full_path to the
            Function configuration object
        """

        result: Dict[str, Function] = {}  # a dict with full_path as key and extracted function as value
        for stack in stacks:
            for name, resource in stack.resources.items():

                resource_type = resource.get("Type")
                resource_properties = resource.get("Properties", {})
                resource_metadata = resource.get("Metadata", None)
                # Add extra metadata information to properties under a separate field.
                if resource_metadata:
                    resource_properties["Metadata"] = resource_metadata

                if resource_type == SamFunctionProvider.SERVERLESS_FUNCTION:
                    layers = SamFunctionProvider._parse_layer_info(
                        stack,
                        resource_properties.get("Layers", []),
                        use_raw_codeuri,
                        ignore_code_extraction_warnings=ignore_code_extraction_warnings,
                    )
                    function = SamFunctionProvider._convert_sam_function_resource(
                        stack,
                        name,
                        resource_properties,
                        layers,
                        use_raw_codeuri,
                        ignore_code_extraction_warnings=ignore_code_extraction_warnings,
                    )
                    result[function.full_path] = function

                elif resource_type == SamFunctionProvider.LAMBDA_FUNCTION:
                    layers = SamFunctionProvider._parse_layer_info(
                        stack,
                        resource_properties.get("Layers", []),
                        use_raw_codeuri,
                        ignore_code_extraction_warnings=ignore_code_extraction_warnings,
                    )
                    function = SamFunctionProvider._convert_lambda_function_resource(
                        stack, name, resource_properties, layers, use_raw_codeuri
                    )
                    result[function.full_path] = function

                # We don't care about other resource types. Just ignore them

        return result

    @staticmethod
    def _convert_sam_function_resource(
        stack: Stack,
        name: str,
        resource_properties: Dict,
        layers: List[LayerVersion],
        use_raw_codeuri: bool = False,
        ignore_code_extraction_warnings: bool = False,
    ) -> Function:
        """
        Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider.

        Parameters
        ----------
        name str
            LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name
        resource_properties dict
            Properties of this resource
        layers List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function.

        Returns
        -------
        samcli.commands.local.lib.provider.Function
            Function configuration
        """
        codeuri: Optional[str] = SamFunctionProvider.DEFAULT_CODEURI
        inlinecode = resource_properties.get("InlineCode")
        imageuri = None
        packagetype = resource_properties.get("PackageType", ZIP)
        if packagetype == ZIP:
            if inlinecode:
                LOG.debug("Found Serverless function with name='%s' and InlineCode", name)
                codeuri = None
            else:
                codeuri = SamFunctionProvider._extract_sam_function_codeuri(
                    name,
                    resource_properties,
                    "CodeUri",
                    ignore_code_extraction_warnings=ignore_code_extraction_warnings,
                )
                LOG.debug("Found Serverless function with name='%s' and CodeUri='%s'", name, codeuri)
        elif packagetype == IMAGE:
            imageuri = SamFunctionProvider._extract_sam_function_imageuri(resource_properties, "ImageUri")
            LOG.debug("Found Serverless function with name='%s' and ImageUri='%s'", name, imageuri)

        return SamFunctionProvider._build_function_configuration(
            stack, name, codeuri, resource_properties, layers, inlinecode, imageuri, use_raw_codeuri
        )

    @staticmethod
    def _convert_lambda_function_resource(
        stack: Stack, name: str, resource_properties: Dict, layers: List[LayerVersion], use_raw_codeuri: bool = False
    ) -> Function:
        """
        Converts a AWS::Lambda::Function resource to a Function configuration usable by the provider.

        Parameters
        ----------
        name str
            LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name
        resource_properties dict
            Properties of this resource
        layers List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function.
        use_raw_codeuri
            Do not resolve adjust core_uri based on the template path, use the raw uri.

        Returns
        -------
        samcli.commands.local.lib.provider.Function
            Function configuration
        """

        # CodeUri is set to "." in order to get code locally from current directory. AWS::Lambda::Function's ``Code``
        # property does not support specifying a local path
        codeuri: Optional[str] = SamFunctionProvider.DEFAULT_CODEURI
        inlinecode = None
        imageuri = None
        packagetype = resource_properties.get("PackageType", ZIP)
        if packagetype == ZIP:
            if (
                "Code" in resource_properties
                and isinstance(resource_properties["Code"], dict)
                and resource_properties["Code"].get("ZipFile")
            ):
                inlinecode = resource_properties["Code"]["ZipFile"]
                LOG.debug("Found Lambda function with name='%s' and Code ZipFile", name)
                codeuri = None
            else:
                codeuri = SamFunctionProvider._extract_lambda_function_code(resource_properties, "Code")
                LOG.debug("Found Lambda function with name='%s' and CodeUri='%s'", name, codeuri)
        elif packagetype == IMAGE:
            imageuri = SamFunctionProvider._extract_lambda_function_imageuri(resource_properties, "Code")
            LOG.debug("Found Lambda function with name='%s' and Imageuri='%s'", name, imageuri)

        return SamFunctionProvider._build_function_configuration(
            stack, name, codeuri, resource_properties, layers, inlinecode, imageuri, use_raw_codeuri
        )

    @staticmethod
    def _build_function_configuration(
        stack: Stack,
        name: str,
        codeuri: Optional[str],
        resource_properties: Dict,
        layers: List,
        inlinecode: Optional[str],
        imageuri: Optional[str],
        use_raw_codeuri: bool = False,
    ) -> Function:
        """
        Builds a Function configuration usable by the provider.

        Parameters
        ----------
        name str
            LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name
        codeuri str
            Representing the local code path
        resource_properties dict
            Properties of this resource
        layers List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function.
        use_raw_codeuri
            Do not resolve adjust core_uri based on the template path, use the raw uri.

        Returns
        -------
        samcli.commands.local.lib.provider.Function
            Function configuration
        """
        metadata = resource_properties.get("Metadata", None)
        if metadata and "DockerContext" in metadata and not use_raw_codeuri:
            LOG.debug(
                "--base-dir is presented not, adjusting uri %s relative to %s",
                metadata["DockerContext"],
                stack.location,
            )
            metadata["DockerContext"] = SamLocalStackProvider.normalize_resource_path(
                stack.location, metadata["DockerContext"]
            )

        if codeuri and not use_raw_codeuri:
            LOG.debug("--base-dir is presented not, adjusting uri %s relative to %s", codeuri, stack.location)
            codeuri = SamLocalStackProvider.normalize_resource_path(stack.location, codeuri)

        return Function(
            stack_path=stack.stack_path,
            name=name,
            functionname=resource_properties.get("FunctionName", name),
            packagetype=resource_properties.get("PackageType", ZIP),
            runtime=resource_properties.get("Runtime"),
            memory=resource_properties.get("MemorySize"),
            timeout=resource_properties.get("Timeout"),
            handler=resource_properties.get("Handler"),
            codeuri=codeuri,
            imageuri=imageuri if imageuri else resource_properties.get("ImageUri"),
            imageconfig=resource_properties.get("ImageConfig"),
            environment=resource_properties.get("Environment"),
            rolearn=resource_properties.get("Role"),
            events=resource_properties.get("Events"),
            layers=layers,
            metadata=metadata,
            inlinecode=inlinecode,
            codesign_config_arn=resource_properties.get("CodeSigningConfigArn", None),
        )

    @staticmethod
    def _parse_layer_info(
        stack: Stack,
        list_of_layers: List[Any],
        use_raw_codeuri: bool = False,
        ignore_code_extraction_warnings: bool = False,
    ) -> List[LayerVersion]:
        """
        Creates a list of Layer objects that are represented by the resources and the list of layers

        Parameters
        ----------
        stack : Stack
            The stack the layer is defined in
        list_of_layers : List[Any]
            List of layers that are defined within the Layers Property on a function,
            layer can be defined as string or Dict, in case customers define it in other types, use "Any" here.
        use_raw_codeuri : bool
            Do not resolve adjust core_uri based on the template path, use the raw uri.
        ignore_code_extraction_warnings : bool
            Whether to print warning when codeuri is not a local pth

        Returns
        -------
        List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function. The order
            of the layers does not change.

            I.E: list_of_layers = ["layer1", "layer2"] the return would be [Layer("layer1"), Layer("layer2")]
        """
        layers = []
        for layer in list_of_layers:
            if layer == "arn:aws:lambda:::awslayer:AmazonLinux1803":
                LOG.debug("Skipped arn:aws:lambda:::awslayer:AmazonLinux1803 as the containers are AmazonLinux1803")
                continue

            if layer == "arn:aws:lambda:::awslayer:AmazonLinux1703":
                raise InvalidLayerVersionArn(
                    "Building and invoking locally only supports AmazonLinux1803. See "
                    "https://aws.amazon.com/blogs/compute/upcoming-updates-to-the-aws-lambda-execution-environment/ "
                    "for more detials."
                )  # noqa: E501

            # If the layer is a string, assume it is the arn
            if isinstance(layer, str):
                layers.append(
                    LayerVersion(
                        layer,
                        None,
                        stack_path=stack.stack_path,
                    )
                )
                continue

            # In the list of layers that is defined within a template, you can reference a LayerVersion resource.
            # When running locally, we need to follow that Ref so we can extract the local path to the layer code.
            if isinstance(layer, dict) and layer.get("Ref"):
                layer_logical_id = cast(str, layer.get("Ref"))
                layer_resource = stack.resources.get(layer_logical_id)
                if not layer_resource or layer_resource.get("Type", "") not in (
                    SamFunctionProvider.SERVERLESS_LAYER,
                    SamFunctionProvider.LAMBDA_LAYER,
                ):
                    raise InvalidLayerReference()

                layer_properties = layer_resource.get("Properties", {})
                resource_type = layer_resource.get("Type")
                compatible_runtimes = layer_properties.get("CompatibleRuntimes")
                codeuri: Optional[str] = None

                if resource_type == SamFunctionProvider.LAMBDA_LAYER:
                    codeuri = SamFunctionProvider._extract_lambda_function_code(layer_properties, "Content")

                if resource_type == SamFunctionProvider.SERVERLESS_LAYER:
                    codeuri = SamFunctionProvider._extract_sam_function_codeuri(
                        layer_logical_id, layer_properties, "ContentUri", ignore_code_extraction_warnings
                    )

                if codeuri and not use_raw_codeuri:
                    LOG.debug("--base-dir is presented not, adjusting uri %s relative to %s", codeuri, stack.location)
                    codeuri = SamLocalStackProvider.normalize_resource_path(stack.location, codeuri)

                layers.append(
                    LayerVersion(
                        layer_logical_id,
                        codeuri,
                        compatible_runtimes,
                        layer_resource.get("Metadata", None),
                        stack_path=stack.stack_path,
                    )
                )

        return layers

    def get_resources_by_stack_path(self, stack_path: str) -> Dict:
        candidates = [stack.resources for stack in self.stacks if stack.stack_path == stack_path]
        if not candidates:
            raise RuntimeError(f"Cannot find resources with stack_path = {stack_path}")
        return candidates[0]
コード例 #12
0
class GuidedContext:
    def __init__(
        self,
        template_file,
        stack_name,
        s3_bucket,
        image_repository,
        image_repositories,
        s3_prefix,
        region=None,
        profile=None,
        confirm_changeset=None,
        capabilities=None,
        signing_profiles=None,
        parameter_overrides=None,
        save_to_config=True,
        config_section=None,
        config_env=None,
        config_file=None,
    ):
        self.template_file = template_file
        self.stack_name = stack_name
        self.s3_bucket = s3_bucket
        self.image_repository = image_repository
        self.image_repositories = image_repositories
        self.s3_prefix = s3_prefix
        self.region = region
        self.profile = profile
        self.confirm_changeset = confirm_changeset
        self.capabilities = (capabilities, )
        self.parameter_overrides_from_cmdline = parameter_overrides
        self.save_to_config = save_to_config
        self.config_section = config_section
        self.config_env = config_env
        self.config_file = config_file
        self.guided_stack_name = None
        self.guided_s3_bucket = None
        self.guided_image_repository = None
        self.guided_image_repositories = None
        self.guided_s3_prefix = None
        self.guided_region = None
        self.guided_profile = None
        self.signing_profiles = signing_profiles
        self._capabilities = None
        self._parameter_overrides = None
        self.start_bold = "\033[1m"
        self.end_bold = "\033[0m"
        self.color = Colored()
        self.function_provider = None

    @property
    def guided_capabilities(self):
        return self._capabilities

    @property
    def guided_parameter_overrides(self):
        return self._parameter_overrides

    # pylint: disable=too-many-statements
    def guided_prompts(self, parameter_override_keys):
        default_stack_name = self.stack_name or "sam-app"
        default_region = self.region or get_session().get_config_variable(
            "region") or "us-east-1"
        default_capabilities = self.capabilities[0] or ("CAPABILITY_IAM", )
        default_config_env = self.config_env or DEFAULT_ENV
        default_config_file = self.config_file or DEFAULT_CONFIG_FILE_NAME
        input_capabilities = None
        config_env = None
        config_file = None

        click.echo(
            self.color.yellow(
                "\n\tSetting default arguments for 'sam deploy'\n\t========================================="
            ))

        stack_name = prompt(f"\t{self.start_bold}Stack Name{self.end_bold}",
                            default=default_stack_name,
                            type=click.STRING)
        region = prompt(f"\t{self.start_bold}AWS Region{self.end_bold}",
                        default=default_region,
                        type=click.STRING)
        input_parameter_overrides = self.prompt_parameters(
            parameter_override_keys, self.parameter_overrides_from_cmdline,
            self.start_bold, self.end_bold)
        stacks = SamLocalStackProvider.get_stacks(
            self.template_file,
            parameter_overrides=sanitize_parameter_overrides(
                input_parameter_overrides))
        image_repositories = self.prompt_image_repository(stacks)

        click.secho(
            "\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"
        )
        confirm_changeset = confirm(
            f"\t{self.start_bold}Confirm changes before deploy{self.end_bold}",
            default=self.confirm_changeset)
        click.secho(
            "\t#SAM needs permission to be able to create roles to connect to the resources in your template"
        )
        capabilities_confirm = confirm(
            f"\t{self.start_bold}Allow SAM CLI IAM role creation{self.end_bold}",
            default=True)

        if not capabilities_confirm:
            input_capabilities = prompt(
                f"\t{self.start_bold}Capabilities{self.end_bold}",
                default=list(default_capabilities),
                type=FuncParamType(func=_space_separated_list_func_type),
            )

        self.prompt_authorization(stacks)
        self.prompt_code_signing_settings(stacks)

        save_to_config = confirm(
            f"\t{self.start_bold}Save arguments to configuration file{self.end_bold}",
            default=True)
        if save_to_config:
            config_file = prompt(
                f"\t{self.start_bold}SAM configuration file{self.end_bold}",
                default=default_config_file,
                type=click.STRING,
            )
            config_env = prompt(
                f"\t{self.start_bold}SAM configuration environment{self.end_bold}",
                default=default_config_env,
                type=click.STRING,
            )

        s3_bucket = manage_stack(profile=self.profile, region=region)
        click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}")
        click.echo(
            "\t\tA different default S3 bucket can be set in samconfig.toml")

        self.guided_stack_name = stack_name
        self.guided_s3_bucket = s3_bucket
        self.guided_image_repositories = image_repositories
        self.guided_s3_prefix = stack_name
        self.guided_region = region
        self.guided_profile = self.profile
        self._capabilities = input_capabilities if input_capabilities else default_capabilities
        self._parameter_overrides = (input_parameter_overrides
                                     if input_parameter_overrides else
                                     self.parameter_overrides_from_cmdline)
        self.save_to_config = save_to_config
        self.config_env = config_env if config_env else default_config_env
        self.config_file = config_file if config_file else default_config_file
        self.confirm_changeset = confirm_changeset

    def prompt_authorization(self, stacks: List[Stack]):
        auth_required_per_resource = auth_per_resource(stacks)

        for resource, authorization_required in auth_required_per_resource:
            if not authorization_required:
                auth_confirm = confirm(
                    f"\t{self.start_bold}{resource} may not have authorization defined, Is this okay?{self.end_bold}",
                    default=False,
                )
                if not auth_confirm:
                    raise GuidedDeployFailedError(
                        msg="Security Constraints Not Satisfied!")

    def prompt_code_signing_settings(self, stacks: List[Stack]):
        (functions_with_code_sign,
         layers_with_code_sign) = signer_config_per_function(stacks)

        # if no function or layer definition found with code signing, skip it
        if not functions_with_code_sign and not layers_with_code_sign:
            LOG.debug(
                "No function or layer definition found with code sign config, skipping"
            )
            return

        click.echo(
            "\n\t#Found code signing configurations in your function definitions"
        )
        sign_functions = confirm(
            f"\t{self.start_bold}Do you want to sign your code?{self.end_bold}",
            default=True,
        )

        if not sign_functions:
            LOG.debug(
                "User skipped code signing, continuing rest of the process")
            self.signing_profiles = None
            return

        if not self.signing_profiles:
            self.signing_profiles = {}

        click.echo(
            "\t#Please provide signing profile details for the following functions & layers"
        )

        for function_name in functions_with_code_sign:
            (profile_name,
             profile_owner) = extract_profile_name_and_owner_from_existing(
                 function_name, self.signing_profiles)

            click.echo(
                f"\t#Signing profile details for function '{function_name}'")
            profile_name = prompt_profile_name(profile_name, self.start_bold,
                                               self.end_bold)
            profile_owner = prompt_profile_owner(profile_owner,
                                                 self.start_bold,
                                                 self.end_bold)
            self.signing_profiles[function_name] = {
                "profile_name": profile_name,
                "profile_owner": profile_owner
            }
            self.signing_profiles[function_name][
                "profile_owner"] = "" if not profile_owner else profile_owner

        for layer_name, functions_use_this_layer in layers_with_code_sign.items(
        ):
            (profile_name,
             profile_owner) = extract_profile_name_and_owner_from_existing(
                 layer_name, self.signing_profiles)
            click.echo(
                f"\t#Signing profile details for layer '{layer_name}', "
                f"which is used by functions {functions_use_this_layer}")
            profile_name = prompt_profile_name(profile_name, self.start_bold,
                                               self.end_bold)
            profile_owner = prompt_profile_owner(profile_owner,
                                                 self.start_bold,
                                                 self.end_bold)
            self.signing_profiles[layer_name] = {
                "profile_name": profile_name,
                "profile_owner": profile_owner
            }
            self.signing_profiles[layer_name][
                "profile_owner"] = "" if not profile_owner else profile_owner

        LOG.debug("Signing profile names and owners %s", self.signing_profiles)

    def prompt_parameters(self, parameter_override_from_template,
                          parameter_override_from_cmdline, start_bold,
                          end_bold):
        _prompted_param_overrides = {}
        if parameter_override_from_template:
            for parameter_key, parameter_properties in parameter_override_from_template.items(
            ):
                no_echo = parameter_properties.get("NoEcho", False)
                if no_echo:
                    parameter = prompt(
                        f"\t{start_bold}Parameter {parameter_key}{end_bold}",
                        type=click.STRING,
                        hide_input=True)
                    _prompted_param_overrides[parameter_key] = {
                        "Value": parameter,
                        "Hidden": True
                    }
                else:
                    parameter = prompt(
                        f"\t{start_bold}Parameter {parameter_key}{end_bold}",
                        default=_prompted_param_overrides.get(
                            parameter_key,
                            self._get_parameter_value(
                                parameter_key, parameter_properties,
                                parameter_override_from_cmdline),
                        ),
                        type=click.STRING,
                    )
                    _prompted_param_overrides[parameter_key] = {
                        "Value": parameter,
                        "Hidden": False
                    }
        return _prompted_param_overrides

    def prompt_image_repository(self, stacks: List[Stack]):
        image_repositories = {}
        artifacts_format = get_template_artifacts_format(
            template_file=self.template_file)
        if IMAGE in artifacts_format:
            self.function_provider = SamFunctionProvider(
                stacks, ignore_code_extraction_warnings=True)
            function_resources = get_template_function_resource_ids(
                template_file=self.template_file, artifact=IMAGE)
            for resource_id in function_resources:
                image_repositories[resource_id] = prompt(
                    f"\t{self.start_bold}Image Repository for {resource_id}{self.end_bold}",
                    default=self.image_repositories.get(resource_id, "")
                    if isinstance(self.image_repositories, dict) else ""
                    or self.image_repository,
                )
                if not is_ecr_url(image_repositories.get(resource_id)):
                    raise GuidedDeployFailedError(
                        f"Invalid Image Repository ECR URI: {image_repositories.get(resource_id)}"
                    )
            for resource_id, function_prop in self.function_provider.functions.items(
            ):
                if function_prop.packagetype == IMAGE:
                    image = function_prop.imageuri
                    try:
                        tag = tag_translation(image)
                    except NonLocalImageException:
                        pass
                    except NoImageFoundException as ex:
                        raise GuidedDeployFailedError(
                            "No images found to deploy, try running sam build"
                        ) from ex
                    else:
                        click.secho(
                            f"\t  {image} to be pushed to {image_repositories.get(resource_id)}:{tag}"
                        )
            click.secho(nl=True)

        return image_repositories

    def run(self):

        try:
            _parameter_override_keys = get_template_parameters(
                template_file=self.template_file)
        except ValueError as ex:
            LOG.debug("Failed to parse SAM template", exc_info=ex)
            raise GuidedDeployFailedError(str(ex)) from ex

        guided_config = GuidedConfig(template_file=self.template_file,
                                     section=self.config_section)
        guided_config.read_config_showcase(
            self.config_file or DEFAULT_CONFIG_FILE_NAME, )

        self.guided_prompts(_parameter_override_keys)

        if self.save_to_config:
            guided_config.save_config(
                self._parameter_overrides,
                self.config_env or DEFAULT_ENV,
                self.config_file or DEFAULT_CONFIG_FILE_NAME,
                stack_name=self.guided_stack_name,
                s3_bucket=self.guided_s3_bucket,
                s3_prefix=self.guided_s3_prefix,
                image_repositories=self.guided_image_repositories,
                region=self.guided_region,
                profile=self.guided_profile,
                confirm_changeset=self.confirm_changeset,
                capabilities=self._capabilities,
                signing_profiles=self.signing_profiles,
            )

    @staticmethod
    def _get_parameter_value(parameter_key: str, parameter_properties: Dict,
                             parameter_override_from_cmdline: Dict) -> Any:
        """
        This function provide the value of a parameter. If the command line/config file have "override_parameter"
        whose key exist in the template file parameters, it will use the corresponding value.
        Otherwise, it will use its default value in template file.

        :param parameter_key: key of parameter
        :param parameter_properties: properties of that parameters from template file
        :param parameter_override_from_cmdline: parameter_override from command line/config file
        """
        if parameter_override_from_cmdline and parameter_override_from_cmdline.get(
                parameter_key, None):
            return parameter_override_from_cmdline[parameter_key]
        # Make sure the default is casted to a string.
        return str(parameter_properties.get("Default", ""))
コード例 #13
0
class SamFunctionProvider(SamBaseProvider):
    """
    Fetches and returns Lambda Functions from a SAM Template. The SAM template passed to this provider is assumed
    to be valid, normalized and a dictionary.

    It may or may not contain a function.
    """

    def __init__(self, template_dict, parameter_overrides=None, ignore_code_extraction_warnings=False):
        """
        Initialize the class with SAM template data. The SAM template passed to this provider is assumed
        to be valid, normalized and a dictionary. It should be normalized by running all pre-processing
        before passing to this class. The process of normalization will remove structures like ``Globals``, resolve
        intrinsic functions etc.
        This class does not perform any syntactic validation of the template.

        After the class is initialized, any changes to the ``template_dict`` will not be reflected in here.
        You need to explicitly update the class with new template, if necessary.

        :param dict template_dict: SAM Template as a dictionary
        :param dict parameter_overrides: Optional dictionary of values for SAM template parameters that might want
            to get substituted within the template
        :param bool ignore_code_extraction_warnings: Ignores Log warnings
        """

        self.template_dict = SamFunctionProvider.get_template(template_dict, parameter_overrides)
        self.ignore_code_extraction_warnings = ignore_code_extraction_warnings
        self.resources = self.template_dict.get("Resources", {})

        LOG.debug("%d resources found in the template", len(self.resources))

        # Store a map of function name to function information for quick reference
        self.functions = self._extract_functions(self.resources, self.ignore_code_extraction_warnings)

        self._deprecated_runtimes = {"nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"}
        self._colored = Colored()

    def get(self, name):
        """
        Returns the function given name or LogicalId of the function. Every SAM resource has a logicalId, but it may
        also have a function name. This method searches only for LogicalID and returns the function that matches
        it.

        :param string name: Name of the function
        :return Function: namedtuple containing the Function information if function is found.
                          None, if function is not found
        :raises ValueError If name is not given
        """

        if not name:
            raise ValueError("Function name is required")

        for f in self.get_all():
            if f.name == name:
                self._deprecate_notification(f.runtime)
                return f

            if f.functionname == name:
                self._deprecate_notification(f.runtime)
                return f

        return None

    def _deprecate_notification(self, runtime):
        if runtime in self._deprecated_runtimes:
            message = (
                f"WARNING: {runtime} is no longer supported by AWS Lambda, please update to a newer supported runtime. SAM CLI "
                f"will drop support for all deprecated runtimes {self._deprecated_runtimes} on May 1st. "
                f"See issue: https://github.com/awslabs/aws-sam-cli/issues/1934 for more details."
            )
            LOG.warning(self._colored.yellow(message))

    def get_all(self):
        """
        Yields all the Lambda functions available in the SAM Template.

        :yields Function: namedtuple containing the function information
        """

        for _, function in self.functions.items():
            yield function

    @staticmethod
    def _extract_functions(resources, ignore_code_extraction_warnings=False):
        """
        Extracts and returns function information from the given dictionary of SAM/CloudFormation resources. This
        method supports functions defined with AWS::Serverless::Function and AWS::Lambda::Function

        :param dict resources: Dictionary of SAM/CloudFormation resources
        :param bool ignore_code_extraction_warnings: suppress log statements on code extraction from resources.
        :return dict(string : samcli.commands.local.lib.provider.Function): Dictionary of function LogicalId to the
            Function configuration object
        """

        result = {}

        for name, resource in resources.items():

            resource_type = resource.get("Type")
            resource_properties = resource.get("Properties", {})
            resource_metadata = resource.get("Metadata", None)
            # Add extra metadata information to properties under a separate field.
            if resource_metadata:
                resource_properties["Metadata"] = resource_metadata

            if resource_type == SamFunctionProvider.SERVERLESS_FUNCTION:
                layers = SamFunctionProvider._parse_layer_info(
                    resource_properties.get("Layers", []),
                    resources,
                    ignore_code_extraction_warnings=ignore_code_extraction_warnings,
                )
                result[name] = SamFunctionProvider._convert_sam_function_resource(
                    name, resource_properties, layers, ignore_code_extraction_warnings=ignore_code_extraction_warnings
                )

            elif resource_type == SamFunctionProvider.LAMBDA_FUNCTION:
                layers = SamFunctionProvider._parse_layer_info(
                    resource_properties.get("Layers", []),
                    resources,
                    ignore_code_extraction_warnings=ignore_code_extraction_warnings,
                )
                result[name] = SamFunctionProvider._convert_lambda_function_resource(name, resource_properties, layers)

            # We don't care about other resource types. Just ignore them

        return result

    @staticmethod
    def _convert_sam_function_resource(name, resource_properties, layers, ignore_code_extraction_warnings=False):
        """
        Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider.

        Parameters
        ----------
        name str
            LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name
        resource_properties dict
            Properties of this resource
        layers List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function.

        Returns
        -------
        samcli.commands.local.lib.provider.Function
            Function configuration
        """

        codeuri = SamFunctionProvider.DEFAULT_CODEURI
        imageuri = None
        packagetype = resource_properties.get("PackageType", ZIP)
        if packagetype == ZIP:
            codeuri = SamFunctionProvider._extract_sam_function_codeuri(
                name, resource_properties, "CodeUri", ignore_code_extraction_warnings=ignore_code_extraction_warnings
            )
            LOG.debug("Found Serverless function with name='%s' and CodeUri='%s'", name, codeuri)
        elif packagetype == IMAGE:
            imageuri = SamFunctionProvider._extract_sam_function_imageuri(resource_properties, "ImageUri")
            LOG.debug("Found Serverless function with name='%s' and ImageUri='%s'", name, imageuri)

        return SamFunctionProvider._build_function_configuration(name, codeuri, resource_properties, layers, imageuri)

    @staticmethod
    def _convert_lambda_function_resource(name, resource_properties, layers):  # pylint: disable=invalid-name
        """
        Converts a AWS::Lambda::Function resource to a Function configuration usable by the provider.

        Parameters
        ----------
        name str
            LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name
        resource_properties dict
            Properties of this resource
        layers List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function.

        Returns
        -------
        samcli.commands.local.lib.provider.Function
            Function configuration
        """

        # CodeUri is set to "." in order to get code locally from current directory. AWS::Lambda::Function's ``Code``
        # property does not support specifying a local path
        codeuri = SamFunctionProvider.DEFAULT_CODEURI
        imageuri = None
        packagetype = resource_properties.get("PackageType", ZIP)

        if packagetype == ZIP:
            codeuri = SamFunctionProvider._extract_lambda_function_code(resource_properties, "Code")
            LOG.debug("Found Lambda function with name='%s' and CodeUri='%s'", name, codeuri)
        elif packagetype == IMAGE:
            imageuri = SamFunctionProvider._extract_lambda_function_imageuri(resource_properties, "Code")
            LOG.debug("Found Lambda function with name='%s' and Imageuri='%s'", name, imageuri)

        return SamFunctionProvider._build_function_configuration(name, codeuri, resource_properties, layers, imageuri)

    @staticmethod
    def _build_function_configuration(name, codeuri, resource_properties, layers, imageuri=None):
        """
        Builds a Function configuration usable by the provider.

        Parameters
        ----------
        name str
            LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name
        codeuri str
            Representing the local code path
        resource_properties dict
            Properties of this resource
        layers List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function.

        Returns
        -------
        samcli.commands.local.lib.provider.Function
            Function configuration
        """
        return Function(
            name=name,
            functionname=resource_properties.get("FunctionName", name),
            packagetype=resource_properties.get("PackageType", ZIP),
            runtime=resource_properties.get("Runtime"),
            memory=resource_properties.get("MemorySize"),
            timeout=resource_properties.get("Timeout"),
            handler=resource_properties.get("Handler"),
            codeuri=codeuri,
            imageuri=imageuri if imageuri else resource_properties.get("ImageUri"),
            imageconfig=resource_properties.get("ImageConfig"),
            environment=resource_properties.get("Environment"),
            rolearn=resource_properties.get("Role"),
            events=resource_properties.get("Events"),
            layers=layers,
            metadata=resource_properties.get("Metadata", None),
            codesign_config_arn=resource_properties.get("CodeSigningConfigArn", None),
        )

    @staticmethod
    def _parse_layer_info(list_of_layers, resources, ignore_code_extraction_warnings=False):
        """
        Creates a list of Layer objects that are represented by the resources and the list of layers

        Parameters
        ----------
        list_of_layers List(str)
            List of layers that are defined within the Layers Property on a function
        resources dict
            The Resources dictionary defined in a template

        Returns
        -------
        List(samcli.commands.local.lib.provider.Layer)
            List of the Layer objects created from the template and layer list defined on the function. The order
            of the layers does not change.

            I.E: list_of_layers = ["layer1", "layer2"] the return would be [Layer("layer1"), Layer("layer2")]
        """
        layers = []
        for layer in list_of_layers:
            if layer == "arn:aws:lambda:::awslayer:AmazonLinux1803":
                LOG.debug("Skipped arn:aws:lambda:::awslayer:AmazonLinux1803 as the containers are AmazonLinux1803")
                continue

            if layer == "arn:aws:lambda:::awslayer:AmazonLinux1703":
                raise InvalidLayerVersionArn(
                    "Building and invoking locally only supports AmazonLinux1803. See "
                    "https://aws.amazon.com/blogs/compute/upcoming-updates-to-the-aws-lambda-execution-environment/ for more detials."
                )  # noqa: E501

            # If the layer is a string, assume it is the arn
            if isinstance(layer, str):
                layers.append(LayerVersion(layer, None))
                continue

            # In the list of layers that is defined within a template, you can reference a LayerVersion resource.
            # When running locally, we need to follow that Ref so we can extract the local path to the layer code.
            if isinstance(layer, dict) and layer.get("Ref"):
                layer_logical_id = layer.get("Ref")
                layer_resource = resources.get(layer_logical_id)
                if not layer_resource or layer_resource.get("Type", "") not in (
                    SamFunctionProvider.SERVERLESS_LAYER,
                    SamFunctionProvider.LAMBDA_LAYER,
                ):
                    raise InvalidLayerReference()

                layer_properties = layer_resource.get("Properties", {})
                resource_type = layer_resource.get("Type")
                compatible_runtimes = layer_properties.get("CompatibleRuntimes")
                codeuri = None

                if resource_type == SamFunctionProvider.LAMBDA_LAYER:
                    codeuri = SamFunctionProvider._extract_lambda_function_code(layer_properties, "Content")

                if resource_type == SamFunctionProvider.SERVERLESS_LAYER:
                    codeuri = SamFunctionProvider._extract_sam_function_codeuri(
                        layer_logical_id, layer_properties, "ContentUri", ignore_code_extraction_warnings
                    )

                layers.append(
                    LayerVersion(layer_logical_id, codeuri, compatible_runtimes, layer_resource.get("Metadata", None))
                )

        return layers
コード例 #14
0
class ApplicationBuilder:
    """
    Class to build an entire application. Currently, this class builds Lambda functions only, but there is nothing that
    is stopping this class from supporting other resource types. Building in context of Lambda functions refer to
    converting source code into artifacts that can be run on AWS Lambda
    """
    def __init__(self,
                 resources_to_build,
                 build_dir,
                 base_dir,
                 cache_dir,
                 cached=False,
                 is_building_specific_resource=False,
                 manifest_path_override=None,
                 container_manager=None,
                 parallel=False,
                 mode=None,
                 stream_writer=None,
                 docker_client=None):
        """
        Initialize the class

        Parameters
        ----------
        resources_to_build: Iterator
            Iterator that can vend out resources available in the SAM template

        build_dir : str
            Path to the directory where we will be storing built artifacts

        base_dir : str
            Path to a folder. Use this folder as the root to resolve relative source code paths against

        cache_dir : str
            Path to a the directory where we will be caching built artifacts

        cached:
            Optional. Set to True to build each function with cache to improve performance

        is_building_specific_resource : boolean
            Whether customer requested to build a specific resource alone in isolation,
            by specifying function_identifier to the build command.
            Ex: sam build MyServerlessFunction

        container_manager : samcli.local.docker.manager.ContainerManager
            Optional. If provided, we will attempt to build inside a Docker Container

        parallel : bool
            Optional. Set to True to build each function in parallel to improve performance

        mode : str
            Optional, name of the build mode to use ex: 'debug'
        """
        self._resources_to_build = resources_to_build
        self._build_dir = build_dir
        self._base_dir = base_dir
        self._cache_dir = cache_dir
        self._cached = cached
        self._manifest_path_override = manifest_path_override
        self._is_building_specific_resource = is_building_specific_resource

        self._container_manager = container_manager
        self._parallel = parallel
        self._mode = mode
        self._stream_writer = stream_writer if stream_writer else StreamWriter(
            osutils.stderr())
        self._docker_client = docker_client if docker_client else docker.from_env(
        )

        self._deprecated_runtimes = {
            "nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"
        }
        self._colored = Colored()

    def build(self):
        """
        Build the entire application

        Returns
        -------
        dict
            Returns the path to where each resource was built as a map of resource's LogicalId to the path string
        """
        build_graph = self._get_build_graph()
        build_strategy = DefaultBuildStrategy(build_graph, self._build_dir,
                                              self._build_function,
                                              self._build_layer)

        if self._parallel:
            if self._cached:
                build_strategy = ParallelBuildStrategy(
                    build_graph,
                    CachedBuildStrategy(build_graph, build_strategy,
                                        self._base_dir, self._build_dir,
                                        self._cache_dir,
                                        self._is_building_specific_resource))
            else:
                build_strategy = ParallelBuildStrategy(build_graph,
                                                       build_strategy)
        elif self._cached:
            build_strategy = CachedBuildStrategy(
                build_graph, build_strategy, self._base_dir, self._build_dir,
                self._cache_dir, self._is_building_specific_resource)

        return build_strategy.build()

    def _get_build_graph(self):
        """
        Converts list of functions and layers into a build graph, where we can iterate on each unique build and trigger
        build
        :return: BuildGraph, which represents list of unique build definitions
        """
        build_graph = BuildGraph(self._build_dir)
        functions = self._resources_to_build.functions
        layers = self._resources_to_build.layers
        for function in functions:
            function_build_details = FunctionBuildDefinition(
                function.runtime, function.codeuri, function.packagetype,
                function.metadata)
            build_graph.put_function_build_definition(function_build_details,
                                                      function)

        for layer in layers:
            layer_build_details = LayerBuildDefinition(
                layer.name, layer.codeuri, layer.build_method,
                layer.compatible_runtimes)
            build_graph.put_layer_build_definition(layer_build_details, layer)

        build_graph.clean_redundant_definitions_and_update(
            not self._is_building_specific_resource)
        return build_graph

    def update_template(self, template_dict, original_template_path,
                        built_artifacts):
        """
        Given the path to built artifacts, update the template to point appropriate resource CodeUris to the artifacts
        folder

        Parameters
        ----------
        template_dict
        original_template_path : str
            Path where the template file will be written to

        built_artifacts : dict
            Map of LogicalId of a resource to the path where the the built artifacts for this resource lives

        Returns
        -------
        dict
            Updated template
        """

        original_dir = pathlib.Path(original_template_path).parent.resolve()

        for logical_id, resource in template_dict.get("Resources", {}).items():

            if logical_id not in built_artifacts:
                # this resource was not built. So skip it
                continue

            artifact_dir = pathlib.Path(built_artifacts[logical_id]).resolve()

            # Default path to absolute path of the artifact
            store_path = str(artifact_dir)

            # In Windows, if template and artifacts are in two different drives, relpath will fail
            if original_dir.drive == artifact_dir.drive:
                # Artifacts are written relative  the template because it makes the template portable
                #   Ex: A CI/CD pipeline build stage could zip the output folder and pass to a
                #   package stage running on a different machine
                store_path = os.path.relpath(artifact_dir, original_dir)

            resource_type = resource.get("Type")
            properties = resource.setdefault("Properties", {})

            if resource_type == SamBaseProvider.SERVERLESS_FUNCTION and properties.get(
                    "PackageType", ZIP) == ZIP:
                properties["CodeUri"] = store_path

            if resource_type == SamBaseProvider.LAMBDA_FUNCTION and properties.get(
                    "PackageType", ZIP) == ZIP:
                properties["Code"] = store_path

            if resource_type in [
                    SamBaseProvider.SERVERLESS_LAYER,
                    SamBaseProvider.LAMBDA_LAYER
            ]:
                properties["ContentUri"] = store_path

            if resource_type == SamBaseProvider.LAMBDA_FUNCTION and properties.get(
                    "PackageType", ZIP) == IMAGE:
                properties["Code"] = built_artifacts[logical_id]

            if resource_type == SamBaseProvider.SERVERLESS_FUNCTION and properties.get(
                    "PackageType", ZIP) == IMAGE:
                properties["ImageUri"] = built_artifacts[logical_id]

        return template_dict

    def _build_lambda_image(self, function_name, metadata):
        """
        Build an Lambda image

        Parameters
        ----------
        function_name str
            Name of the function (logical id or function name)
        metadata dict
            Dictionary representing the Metadata attached to the Resource in the template

        Returns
        -------
        str
            The full tag (org/repo:tag) of the image that was built
        """

        LOG.info("Building image for %s function", function_name)

        dockerfile = metadata.get("Dockerfile")
        docker_context = metadata.get("DockerContext")
        # Have a default tag if not present.
        tag = metadata.get("DockerTag", "latest")
        docker_tag = f"{function_name.lower()}:{tag}"
        docker_build_args = metadata.get("DockerBuildArgs", {})
        if not isinstance(docker_build_args, dict):
            raise DockerBuildFailed(
                "DockerBuildArgs needs to be a dictionary!")

        docker_context_dir = pathlib.Path(self._base_dir,
                                          docker_context).resolve()
        if not is_docker_reachable(self._docker_client):
            raise DockerConnectionError(
                msg=
                f"Building image for {function_name} requires Docker. is Docker running?"
            )

        if os.environ.get("SAM_BUILD_MODE") and isinstance(
                docker_build_args, dict):
            docker_build_args["SAM_BUILD_MODE"] = os.environ.get(
                "SAM_BUILD_MODE")
            docker_tag = "-".join(
                [docker_tag, docker_build_args["SAM_BUILD_MODE"]])

        if isinstance(docker_build_args, dict):
            LOG.info("Setting DockerBuildArgs: %s for %s function",
                     docker_build_args, function_name)

        build_logs = self._docker_client.api.build(
            path=str(docker_context_dir),
            dockerfile=dockerfile,
            tag=docker_tag,
            buildargs=docker_build_args,
            decode=True)

        # The Docker-py low level api will stream logs back but if an exception is raised by the api
        # this is raised when accessing the generator. So we need to wrap accessing build_logs in a try: except.
        try:
            self._stream_lambda_image_build_logs(build_logs, function_name)
        except docker.errors.APIError as e:
            if e.is_server_error and "Cannot locate specified Dockerfile" in e.explanation:
                raise DockerfileOutSideOfContext(e.explanation) from e

            # Not sure what else can be raise that we should be catching but re-raising for now
            raise

        return docker_tag

    def _stream_lambda_image_build_logs(self, build_logs, function_name):
        """
        Stream logs to the console from an Lambda image build.

        Parameters
        ----------
        build_logs generator
            A generator for the build output.
        function_name str
            Name of the function that is being built

        Returns
        -------
        None
        """
        for log in build_logs:
            if log:
                log_stream = log.get("stream")
                error_stream = log.get("error")

                if error_stream:
                    raise DockerBuildFailed(
                        f"{function_name} failed to build: {error_stream}")

                if log_stream:
                    self._stream_writer.write(str.encode(log_stream))
                    self._stream_writer.flush()

    def _build_layer(self, layer_name, codeuri, specified_workflow,
                     compatible_runtimes):
        # Create the arguments to pass to the builder
        # Code is always relative to the given base directory.
        code_dir = str(pathlib.Path(self._base_dir, codeuri).resolve())

        config = get_workflow_config(None, code_dir, self._base_dir,
                                     specified_workflow)
        subfolder = get_layer_subfolder(specified_workflow)

        # artifacts directory will be created by the builder
        artifacts_dir = str(
            pathlib.Path(self._build_dir, layer_name, subfolder))

        with osutils.mkdir_temp() as scratch_dir:
            manifest_path = self._manifest_path_override or os.path.join(
                code_dir, config.manifest_name)

            # By default prefer to build in-process for speed
            build_runtime = specified_workflow
            build_method = self._build_function_in_process
            if self._container_manager:
                build_method = self._build_function_on_container
                if config.language == "provided":
                    LOG.warning(
                        "For container layer build, first compatible runtime is chosen as build target for container."
                    )
                    # Only set to this value if specified workflow is makefile which will result in config language as provided
                    build_runtime = compatible_runtimes[0]
            options = ApplicationBuilder._get_build_options(
                layer_name, config.language, None)

            build_method(config, code_dir, artifacts_dir, scratch_dir,
                         manifest_path, build_runtime, options)
            # Not including subfolder in return so that we copy subfolder, instead of copying artifacts inside it.
            return str(pathlib.Path(self._build_dir, layer_name))

    def _build_function(self,
                        function_name,
                        codeuri,
                        packagetype,
                        runtime,
                        handler,
                        artifacts_dir,
                        metadata=None):  # pylint: disable=R1710
        """
        Given the function information, this method will build the Lambda function. Depending on the configuration
        it will either build the function in process or by spinning up a Docker container.

        Parameters
        ----------
        function_name : str
            Name or LogicalId of the function

        codeuri : str
            Path to where the code lives

        runtime : str
            AWS Lambda function runtime

        artifacts_dir: str
            Path to where function will be build into

        metadata : dict
            AWS Lambda function metadata

        Returns
        -------
        str
            Path to the location where built artifacts are available
        """
        if packagetype == IMAGE:
            return self._build_lambda_image(function_name=function_name,
                                            metadata=metadata)
        if packagetype == ZIP:
            if runtime in self._deprecated_runtimes:
                message = (
                    f"WARNING: {runtime} is no longer supported by AWS Lambda, please update to a newer supported runtime. SAM CLI "
                    f"will drop support for all deprecated runtimes {self._deprecated_runtimes} on May 1st. "
                    f"See issue: https://github.com/awslabs/aws-sam-cli/issues/1934 for more details."
                )
                LOG.warning(self._colored.yellow(message))

            # Create the arguments to pass to the builder
            # Code is always relative to the given base directory.
            code_dir = str(pathlib.Path(self._base_dir, codeuri).resolve())

            # Determine if there was a build workflow that was specified directly in the template.
            specified_build_workflow = metadata.get("BuildMethod",
                                                    None) if metadata else None

            config = get_workflow_config(
                runtime,
                code_dir,
                self._base_dir,
                specified_workflow=specified_build_workflow)

            with osutils.mkdir_temp() as scratch_dir:
                manifest_path = self._manifest_path_override or os.path.join(
                    code_dir, config.manifest_name)

                # By default prefer to build in-process for speed
                build_method = self._build_function_in_process
                if self._container_manager:
                    build_method = self._build_function_on_container

                options = ApplicationBuilder._get_build_options(
                    function_name, config.language, handler)

                return build_method(config, code_dir, artifacts_dir,
                                    scratch_dir, manifest_path, runtime,
                                    options)

    @staticmethod
    def _get_build_options(function_name, language, handler):
        """
        Parameters
        ----------
        function_name str
            currrent function resource name
        language str
            language of the runtime
        handler str
            Handler value of the Lambda Function Resource
        Returns
        -------
        dict
            Dictionary that represents the options to pass to the builder workflow or None if options are not needed
        """

        _build_options = {
            "go": {
                "artifact_executable_name": handler
            },
            "provided": {
                "build_logical_id": function_name
            }
        }
        return _build_options.get(language, None)

    def _build_function_in_process(self, config, source_dir, artifacts_dir,
                                   scratch_dir, manifest_path, runtime,
                                   options):

        builder = LambdaBuilder(
            language=config.language,
            dependency_manager=config.dependency_manager,
            application_framework=config.application_framework,
        )

        runtime = runtime.replace(".al2", "")

        try:
            builder.build(
                source_dir,
                artifacts_dir,
                scratch_dir,
                manifest_path,
                runtime=runtime,
                executable_search_paths=config.executable_search_paths,
                mode=self._mode,
                options=options,
            )
        except LambdaBuilderError as ex:
            raise BuildError(wrapped_from=ex.__class__.__name__,
                             msg=str(ex)) from ex

        return artifacts_dir

    def _build_function_on_container(
        self,  # pylint: disable=too-many-locals
        config,
        source_dir,
        artifacts_dir,
        scratch_dir,
        manifest_path,
        runtime,
        options,
    ):

        if not self._container_manager.is_docker_reachable:
            raise BuildInsideContainerError(
                "Docker is unreachable. Docker needs to be running to build inside a container."
            )

        container_build_supported, reason = supports_build_in_container(config)
        if not container_build_supported:
            raise ContainerBuildNotSupported(reason)

        # If we are printing debug logs in SAM CLI, the builder library should also print debug logs
        log_level = LOG.getEffectiveLevel()

        container = LambdaBuildContainer(
            lambda_builders_protocol_version,
            config.language,
            config.dependency_manager,
            config.application_framework,
            source_dir,
            manifest_path,
            runtime,
            log_level=log_level,
            optimizations=None,
            options=options,
            executable_search_paths=config.executable_search_paths,
            mode=self._mode,
        )

        try:
            try:
                self._container_manager.run(container)
            except docker.errors.APIError as ex:
                if "executable file not found in $PATH" in str(ex):
                    raise UnsupportedBuilderLibraryVersionError(
                        container.image,
                        "{} executable not found in container".format(
                            container.executable_name)) from ex

            # Container's output provides status of whether the build succeeded or failed
            # stdout contains the result of JSON-RPC call
            stdout_stream = io.BytesIO()
            # stderr contains logs printed by the builder. Stream it directly to terminal
            stderr_stream = osutils.stderr()
            container.wait_for_logs(stdout=stdout_stream, stderr=stderr_stream)

            stdout_data = stdout_stream.getvalue().decode("utf-8")
            LOG.debug("Build inside container returned response %s",
                      stdout_data)

            response = self._parse_builder_response(stdout_data,
                                                    container.image)

            # Request is successful. Now copy the artifacts back to the host
            LOG.debug(
                "Build inside container was successful. Copying artifacts from container to host"
            )

            # "/." is a Docker thing that instructions the copy command to download contents of the folder only
            result_dir_in_container = response["result"]["artifacts_dir"] + "/."
            container.copy(result_dir_in_container, artifacts_dir)
        finally:
            self._container_manager.stop(container)

        LOG.debug("Build inside container succeeded")
        return artifacts_dir

    @staticmethod
    def _parse_builder_response(stdout_data, image_name):

        try:
            response = json.loads(stdout_data)
        except Exception:
            # Invalid JSON is produced as an output only when the builder process crashed for some reason.
            # Report this as a crash
            LOG.debug("Builder crashed")
            raise

        if "error" in response:
            error = response.get("error", {})
            err_code = error.get("code")
            msg = error.get("message")

            if 400 <= err_code < 500:
                # Like HTTP 4xx - customer error
                raise BuildInsideContainerError(msg)

            if err_code == 505:
                # Like HTTP 505 error code: Version of the protocol is not supported
                # In this case, this error means that the Builder Library within the container is
                # not compatible with the version of protocol expected SAM CLI installation supports.
                # This can happen when customers have a newer container image or an older SAM CLI version.
                # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/505
                raise UnsupportedBuilderLibraryVersionError(image_name, msg)

            if err_code == -32601:
                # Default JSON Rpc Code for Method Unavailable https://www.jsonrpc.org/specification
                # This can happen if customers are using an incompatible version of builder library within the
                # container
                LOG.debug(
                    "Builder library does not support the supplied method")
                raise UnsupportedBuilderLibraryVersionError(image_name, msg)

            LOG.debug("Builder crashed")
            raise ValueError(msg)

        return response
コード例 #15
0
ファイル: app_builder.py プロジェクト: zwerginz/aws-sam-cli
class ApplicationBuilder:
    """
    Class to build an entire application. Currently, this class builds Lambda functions only, but there is nothing that
    is stopping this class from supporting other resource types. Building in context of Lambda functions refer to
    converting source code into artifacts that can be run on AWS Lambda
    """
    def __init__(self,
                 resources_to_build,
                 build_dir,
                 base_dir,
                 manifest_path_override=None,
                 container_manager=None,
                 parallel=False,
                 mode=None):
        """
        Initialize the class

        Parameters
        ----------
        functions_to_build: Iterator
            Iterator that can vend out functions available in the SAM template

        build_dir : str
            Path to the directory where we will be storing built artifacts

        base_dir : str
            Path to a folder. Use this folder as the root to resolve relative source code paths against

        container_manager : samcli.local.docker.manager.ContainerManager
            Optional. If provided, we will attempt to build inside a Docker Container

        parallel : bool
            Optional. Set to True to build each function in parallel to improve performance

        mode : str
            Optional, name of the build mode to use ex: 'debug'
        """
        self._resources_to_build = resources_to_build
        self._build_dir = build_dir
        self._base_dir = base_dir
        self._manifest_path_override = manifest_path_override

        self._container_manager = container_manager
        self._parallel = parallel
        self._mode = mode

        self._deprecated_runtimes = {
            "nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"
        }
        self._colored = Colored()

    def build(self):
        """
        Build the entire application

        Returns
        -------
        dict
            Returns the path to where each resource was built as a map of resource's LogicalId to the path string
        """

        result = {}

        for function in self._resources_to_build.functions:
            LOG.info("Building function '%s'", function.name)
            result[function.name] = self._build_function(
                function.name, function.codeuri, function.runtime,
                function.handler, function.metadata)
        for layer in self._resources_to_build.layers:
            LOG.info("Building layer '%s'", layer.name)
            if layer.build_method is None:
                raise MissingBuildMethodException(
                    f"Layer {layer.name} cannot be build without BuildMethod. Please provide BuildMethod in Metadata."
                )
            result[layer.name] = self._build_layer(layer.name, layer.codeuri,
                                                   layer.build_method,
                                                   layer.compatible_runtimes)

        return result

    def update_template(self, template_dict, original_template_path,
                        built_artifacts):
        """
        Given the path to built artifacts, update the template to point appropriate resource CodeUris to the artifacts
        folder

        Parameters
        ----------
        template_dict
        original_template_path : str
            Path where the template file will be written to

        built_artifacts : dict
            Map of LogicalId of a resource to the path where the the built artifacts for this resource lives

        Returns
        -------
        dict
            Updated template
        """

        original_dir = os.path.dirname(original_template_path)

        for logical_id, resource in template_dict.get("Resources", {}).items():

            if logical_id not in built_artifacts:
                # this resource was not built. So skip it
                continue

            # Artifacts are written relative  the template because it makes the template portable
            #   Ex: A CI/CD pipeline build stage could zip the output folder and pass to a
            #   package stage running on a different machine
            artifact_relative_path = os.path.relpath(
                built_artifacts[logical_id], original_dir)

            resource_type = resource.get("Type")
            properties = resource.setdefault("Properties", {})
            if resource_type == SamBaseProvider.SERVERLESS_FUNCTION:
                properties["CodeUri"] = artifact_relative_path

            if resource_type == SamBaseProvider.LAMBDA_FUNCTION:
                properties["Code"] = artifact_relative_path

            if resource_type in [
                    SamBaseProvider.SERVERLESS_LAYER,
                    SamBaseProvider.LAMBDA_LAYER
            ]:
                properties["ContentUri"] = artifact_relative_path

        return template_dict

    def _build_layer(self, layer_name, codeuri, specified_workflow,
                     compatible_runtimes):
        # Create the arguments to pass to the builder
        # Code is always relative to the given base directory.
        code_dir = str(pathlib.Path(self._base_dir, codeuri).resolve())

        config = get_workflow_config(None, code_dir, self._base_dir,
                                     specified_workflow)
        subfolder = get_layer_subfolder(specified_workflow)

        # artifacts directory will be created by the builder
        artifacts_dir = str(
            pathlib.Path(self._build_dir, layer_name, subfolder))

        with osutils.mkdir_temp() as scratch_dir:
            manifest_path = self._manifest_path_override or os.path.join(
                code_dir, config.manifest_name)

            # By default prefer to build in-process for speed
            build_runtime = specified_workflow
            build_method = self._build_function_in_process
            if self._container_manager:
                build_method = self._build_function_on_container
                if config.language == "provided":
                    LOG.warning(
                        "For container layer build, first compatible runtime is chosen as build target for container."
                    )
                    # Only set to this value if specified workflow is makefile which will result in config language as provided
                    build_runtime = compatible_runtimes[0]
            options = ApplicationBuilder._get_build_options(
                layer_name, config.language, None)

            build_method(config, code_dir, artifacts_dir, scratch_dir,
                         manifest_path, build_runtime, options)
            # Not including subfolder in return so that we copy subfolder, instead of copying artifacts inside it.
            return str(pathlib.Path(self._build_dir, layer_name))

    def _build_function(self,
                        function_name,
                        codeuri,
                        runtime,
                        handler,
                        metadata=None):
        """
        Given the function information, this method will build the Lambda function. Depending on the configuration
        it will either build the function in process or by spinning up a Docker container.

        Parameters
        ----------
        function_name : str
            Name or LogicalId of the function

        codeuri : str
            Path to where the code lives

        runtime : str
            AWS Lambda function runtime

        metadata : dict
            AWS Lambda function metadata

        Returns
        -------
        str
            Path to the location where built artifacts are available
        """

        if runtime in self._deprecated_runtimes:
            message = f"WARNING: {runtime} is no longer supported by AWS Lambda, please update to a newer supported runtime. SAM CLI " \
                      f"will drop support for all deprecated runtimes {self._deprecated_runtimes} on May 1st. " \
                      f"See issue: https://github.com/awslabs/aws-sam-cli/issues/1934 for more details."
            LOG.warning(self._colored.yellow(message))

        # Create the arguments to pass to the builder
        # Code is always relative to the given base directory.
        code_dir = str(pathlib.Path(self._base_dir, codeuri).resolve())

        # Determine if there was a build workflow that was specified directly in the template.
        specified_build_workflow = metadata.get("BuildMethod",
                                                None) if metadata else None

        config = get_workflow_config(
            runtime,
            code_dir,
            self._base_dir,
            specified_workflow=specified_build_workflow)

        # artifacts directory will be created by the builder
        artifacts_dir = str(pathlib.Path(self._build_dir, function_name))

        with osutils.mkdir_temp() as scratch_dir:
            manifest_path = self._manifest_path_override or os.path.join(
                code_dir, config.manifest_name)

            # By default prefer to build in-process for speed
            build_method = self._build_function_in_process
            if self._container_manager:
                build_method = self._build_function_on_container

            options = ApplicationBuilder._get_build_options(
                function_name, config.language, handler)

            return build_method(config, code_dir, artifacts_dir, scratch_dir,
                                manifest_path, runtime, options)

    @staticmethod
    def _get_build_options(function_name, language, handler):
        """
        Parameters
        ----------
        function_name str
            currrent function resource name
        language str
            language of the runtime
        handler str
            Handler value of the Lambda Function Resource
        Returns
        -------
        dict
            Dictionary that represents the options to pass to the builder workflow or None if options are not needed
        """

        _build_options = {
            'go': {
                'artifact_executable_name': handler
            },
            'provided': {
                'build_logical_id': function_name
            }
        }
        return _build_options.get(language, None)

    def _build_function_in_process(self, config, source_dir, artifacts_dir,
                                   scratch_dir, manifest_path, runtime,
                                   options):

        builder = LambdaBuilder(
            language=config.language,
            dependency_manager=config.dependency_manager,
            application_framework=config.application_framework)

        runtime = runtime.replace(".al2", "")

        try:
            builder.build(
                source_dir,
                artifacts_dir,
                scratch_dir,
                manifest_path,
                runtime=runtime,
                executable_search_paths=config.executable_search_paths,
                mode=self._mode,
                options=options)
        except LambdaBuilderError as ex:
            raise BuildError(wrapped_from=ex.__class__.__name__, msg=str(ex))

        return artifacts_dir

    def _build_function_on_container(
            self,  # pylint: disable=too-many-locals
            config,
            source_dir,
            artifacts_dir,
            scratch_dir,
            manifest_path,
            runtime,
            options):

        if not self._container_manager.is_docker_reachable:
            raise BuildInsideContainerError(
                "Docker is unreachable. Docker needs to be running to build inside a container."
            )

        container_build_supported, reason = supports_build_in_container(config)
        if not container_build_supported:
            raise ContainerBuildNotSupported(reason)

        # If we are printing debug logs in SAM CLI, the builder library should also print debug logs
        log_level = LOG.getEffectiveLevel()

        container = LambdaBuildContainer(
            lambda_builders_protocol_version,
            config.language,
            config.dependency_manager,
            config.application_framework,
            source_dir,
            manifest_path,
            runtime,
            log_level=log_level,
            optimizations=None,
            options=options,
            executable_search_paths=config.executable_search_paths,
            mode=self._mode)

        try:
            try:
                self._container_manager.run(container)
            except docker.errors.APIError as ex:
                if "executable file not found in $PATH" in str(ex):
                    raise UnsupportedBuilderLibraryVersionError(
                        container.image,
                        "{} executable not found in container".format(
                            container.executable_name))

            # Container's output provides status of whether the build succeeded or failed
            # stdout contains the result of JSON-RPC call
            stdout_stream = io.BytesIO()
            # stderr contains logs printed by the builder. Stream it directly to terminal
            stderr_stream = osutils.stderr()
            container.wait_for_logs(stdout=stdout_stream, stderr=stderr_stream)

            stdout_data = stdout_stream.getvalue().decode('utf-8')
            LOG.debug("Build inside container returned response %s",
                      stdout_data)

            response = self._parse_builder_response(stdout_data,
                                                    container.image)

            # Request is successful. Now copy the artifacts back to the host
            LOG.debug(
                "Build inside container was successful. Copying artifacts from container to host"
            )

            # "/." is a Docker thing that instructions the copy command to download contents of the folder only
            result_dir_in_container = response["result"]["artifacts_dir"] + "/."
            container.copy(result_dir_in_container, artifacts_dir)
        finally:
            self._container_manager.stop(container)

        LOG.debug("Build inside container succeeded")
        return artifacts_dir

    @staticmethod
    def _parse_builder_response(stdout_data, image_name):

        try:
            response = json.loads(stdout_data)
        except Exception:
            # Invalid JSON is produced as an output only when the builder process crashed for some reason.
            # Report this as a crash
            LOG.debug("Builder crashed")
            raise

        if "error" in response:
            error = response.get("error", {})
            err_code = error.get("code")
            msg = error.get("message")

            if 400 <= err_code < 500:
                # Like HTTP 4xx - customer error
                raise BuildInsideContainerError(msg)

            if err_code == 505:
                # Like HTTP 505 error code: Version of the protocol is not supported
                # In this case, this error means that the Builder Library within the container is
                # not compatible with the version of protocol expected SAM CLI installation supports.
                # This can happen when customers have a newer container image or an older SAM CLI version.
                # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/505
                raise UnsupportedBuilderLibraryVersionError(image_name, msg)

            if err_code == -32601:
                # Default JSON Rpc Code for Method Unavailable https://www.jsonrpc.org/specification
                # This can happen if customers are using an incompatible version of builder library within the
                # container
                LOG.debug(
                    "Builder library does not support the supplied method")
                raise UnsupportedBuilderLibraryVersionError(image_name, msg)

            LOG.debug("Builder crashed")
            raise ValueError(msg)

        return response
コード例 #16
0
    def __init__(
        self,
        resources_to_build: ResourcesToBuildCollector,
        build_dir: str,
        base_dir: str,
        cache_dir: str,
        cached: bool = False,
        is_building_specific_resource: bool = False,
        manifest_path_override: Optional[str] = None,
        container_manager: Optional[ContainerManager] = None,
        parallel: bool = False,
        mode: Optional[str] = None,
        stream_writer: Optional[StreamWriter] = None,
        docker_client: Optional[docker.DockerClient] = None,
        container_env_var: Optional[Dict] = None,
        container_env_var_file: Optional[str] = None,
    ) -> None:
        """
        Initialize the class

        Parameters
        ----------
        resources_to_build: Iterator
            Iterator that can vend out resources available in the SAM template
        build_dir : str
            Path to the directory where we will be storing built artifacts
        base_dir : str
            Path to a folder. Use this folder as the root to resolve relative source code paths against
        cache_dir : str
            Path to a the directory where we will be caching built artifacts
        cached:
            Optional. Set to True to build each function with cache to improve performance
        is_building_specific_resource : boolean
            Whether customer requested to build a specific resource alone in isolation,
            by specifying function_identifier to the build command.
            Ex: sam build MyServerlessFunction
        manifest_path_override : Optional[str]
            Optional path to manifest file to replace the default one
        container_manager : samcli.local.docker.manager.ContainerManager
            Optional. If provided, we will attempt to build inside a Docker Container
        parallel : bool
            Optional. Set to True to build each function in parallel to improve performance
        mode : str
            Optional, name of the build mode to use ex: 'debug'
        stream_writer : Optional[StreamWriter]
            An optional stream writer to accept stderr output
        docker_client : Optional[docker.DockerClient]
            An optional Docker client object to replace the default one loaded from env
        container_env_var : Optional[Dict]
            An optional dictionary of environment variables to pass to the container
        container_env_var_file : Optional[str]
            An optional path to file that contains environment variables to pass to the container
        """
        self._resources_to_build = resources_to_build
        self._build_dir = build_dir
        self._base_dir = base_dir
        self._cache_dir = cache_dir
        self._cached = cached
        self._manifest_path_override = manifest_path_override
        self._is_building_specific_resource = is_building_specific_resource

        self._container_manager = container_manager
        self._parallel = parallel
        self._mode = mode
        self._stream_writer = stream_writer if stream_writer else StreamWriter(
            osutils.stderr())
        self._docker_client = docker_client if docker_client else docker.from_env(
        )

        self._deprecated_runtimes = {
            "nodejs4.3", "nodejs6.10", "nodejs8.10", "dotnetcore2.0"
        }
        self._colored = Colored()
        self._container_env_var = container_env_var
        self._container_env_var_file = container_env_var_file
コード例 #17
0
class GuidedContext:
    def __init__(
        self,
        template_file,
        stack_name,
        s3_bucket,
        s3_prefix,
        region=None,
        profile=None,
        confirm_changeset=None,
        capabilities=None,
        parameter_overrides=None,
        save_to_config=True,
        config_section=None,
    ):
        self.template_file = template_file
        self.stack_name = stack_name
        self.s3_bucket = s3_bucket
        self.s3_prefix = s3_prefix
        self.region = region
        self.profile = profile
        self.confirm_changeset = confirm_changeset
        self.capabilities = (capabilities, )
        self.parameter_overrides = parameter_overrides
        self.save_to_config = save_to_config
        self.config_section = config_section
        self.guided_stack_name = None
        self.guided_s3_bucket = None
        self.guided_s3_prefix = None
        self.guided_region = None
        self.guided_profile = None
        self._capabilities = None
        self._parameter_overrides = None
        self.start_bold = "\033[1m"
        self.end_bold = "\033[0m"
        self.color = Colored()

    @property
    def guided_capabilities(self):
        return self._capabilities

    @property
    def guided_parameter_overrides(self):
        return self._parameter_overrides

    def guided_prompts(self, parameter_override_keys):
        default_stack_name = self.stack_name or "sam-app"
        default_region = self.region or "us-east-1"
        default_capabilities = ("CAPABILITY_IAM", )
        input_capabilities = None

        click.echo(
            self.color.yellow(
                "\n\tSetting default arguments for 'sam deploy'\n\t========================================="
            ))

        stack_name = prompt(f"\t{self.start_bold}Stack Name{self.end_bold}",
                            default=default_stack_name,
                            type=click.STRING)
        region = prompt(f"\t{self.start_bold}AWS Region{self.end_bold}",
                        default=default_region,
                        type=click.STRING)
        input_parameter_overrides = self.prompt_parameters(
            parameter_override_keys, self.start_bold, self.end_bold)

        click.secho(
            "\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"
        )
        confirm_changeset = confirm(
            f"\t{self.start_bold}Confirm changes before deploy{self.end_bold}",
            default=self.confirm_changeset)
        click.secho(
            "\t#SAM needs permission to be able to create roles to connect to the resources in your template"
        )
        capabilities_confirm = confirm(
            f"\t{self.start_bold}Allow SAM CLI IAM role creation{self.end_bold}",
            default=True)

        if not capabilities_confirm:
            input_capabilities = prompt(
                f"\t{self.start_bold}Capabilities{self.end_bold}",
                default=list(default_capabilities),
                type=FuncParamType(func=_space_separated_list_func_type),
            )

        save_to_config = confirm(
            f"\t{self.start_bold}Save arguments to samconfig.toml{self.end_bold}",
            default=True)

        s3_bucket = manage_stack(profile=self.profile, region=region)
        click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}")
        click.echo(
            "\t\tA different default S3 bucket can be set in samconfig.toml")

        self.guided_stack_name = stack_name
        self.guided_s3_bucket = s3_bucket
        self.guided_s3_prefix = stack_name
        self.guided_region = region
        self.guided_profile = self.profile
        self._capabilities = input_capabilities if input_capabilities else default_capabilities
        self._parameter_overrides = input_parameter_overrides if input_parameter_overrides else self.parameter_overrides
        self.save_to_config = save_to_config
        self.confirm_changeset = confirm_changeset

    def prompt_parameters(self, parameter_override_keys, start_bold, end_bold):
        _prompted_param_overrides = {}
        if parameter_override_keys:
            for parameter_key, parameter_properties in parameter_override_keys.items(
            ):
                no_echo = parameter_properties.get("NoEcho", False)
                if no_echo:
                    parameter = prompt(
                        f"\t{start_bold}Parameter {parameter_key}{end_bold}",
                        type=click.STRING,
                        hide_input=True)
                    _prompted_param_overrides[parameter_key] = {
                        "Value": parameter,
                        "Hidden": True
                    }
                else:
                    # Make sure the default is casted to a string.
                    parameter = prompt(
                        f"\t{start_bold}Parameter {parameter_key}{end_bold}",
                        default=_prompted_param_overrides.get(
                            parameter_key,
                            str(parameter_properties.get("Default", ""))),
                        type=click.STRING,
                    )
                    _prompted_param_overrides[parameter_key] = {
                        "Value": parameter,
                        "Hidden": False
                    }
        return _prompted_param_overrides

    def run(self):

        try:
            _parameter_override_keys = get_template_parameters(
                template_file=self.template_file)
        except ValueError as ex:
            LOG.debug("Failed to parse SAM template", exc_info=ex)
            raise GuidedDeployFailedError(str(ex))

        guided_config = GuidedConfig(template_file=self.template_file,
                                     section=self.config_section)
        guided_config.read_config_showcase()

        self.guided_prompts(_parameter_override_keys)

        if self.save_to_config:
            guided_config.save_config(
                self._parameter_overrides,
                stack_name=self.guided_stack_name,
                s3_bucket=self.guided_s3_bucket,
                s3_prefix=self.guided_s3_prefix,
                region=self.guided_region,
                profile=self.guided_profile,
                confirm_changeset=self.confirm_changeset,
                capabilities=self._capabilities,
            )
コード例 #18
0
class GuidedContext:
    def __init__(
        self,
        template_file,
        stack_name,
        s3_bucket,
        s3_prefix,
        region=None,
        profile=None,
        confirm_changeset=None,
        capabilities=None,
        parameter_overrides=None,
        save_to_config=True,
        config_section=None,
        config_env=None,
        config_file=None,
    ):
        self.template_file = template_file
        self.stack_name = stack_name
        self.s3_bucket = s3_bucket
        self.s3_prefix = s3_prefix
        self.region = region
        self.profile = profile
        self.confirm_changeset = confirm_changeset
        self.capabilities = (capabilities, )
        self.parameter_overrides_from_cmdline = parameter_overrides
        self.save_to_config = save_to_config
        self.config_section = config_section
        self.config_env = config_env
        self.config_file = config_file
        self.guided_stack_name = None
        self.guided_s3_bucket = None
        self.guided_s3_prefix = None
        self.guided_region = None
        self.guided_profile = None
        self._capabilities = None
        self._parameter_overrides = None
        self.start_bold = "\033[1m"
        self.end_bold = "\033[0m"
        self.color = Colored()

    @property
    def guided_capabilities(self):
        return self._capabilities

    @property
    def guided_parameter_overrides(self):
        return self._parameter_overrides

    # pylint: disable=too-many-statements
    def guided_prompts(self, parameter_override_keys):
        default_stack_name = self.stack_name or "sam-app"
        default_region = self.region or "us-east-1"
        default_capabilities = self.capabilities[0] or ("CAPABILITY_IAM", )
        default_config_env = self.config_env or DEFAULT_ENV
        default_config_file = self.config_file or DEFAULT_CONFIG_FILE_NAME
        input_capabilities = None
        config_env = None
        config_file = None

        click.echo(
            self.color.yellow(
                "\n\tSetting default arguments for 'sam deploy'\n\t========================================="
            ))

        stack_name = prompt(f"\t{self.start_bold}Stack Name{self.end_bold}",
                            default=default_stack_name,
                            type=click.STRING)
        region = prompt(f"\t{self.start_bold}AWS Region{self.end_bold}",
                        default=default_region,
                        type=click.STRING)
        input_parameter_overrides = self.prompt_parameters(
            parameter_override_keys, self.parameter_overrides_from_cmdline,
            self.start_bold, self.end_bold)

        click.secho(
            "\t#Shows you resources changes to be deployed and require a 'Y' to initiate deploy"
        )
        confirm_changeset = confirm(
            f"\t{self.start_bold}Confirm changes before deploy{self.end_bold}",
            default=self.confirm_changeset)
        click.secho(
            "\t#SAM needs permission to be able to create roles to connect to the resources in your template"
        )
        capabilities_confirm = confirm(
            f"\t{self.start_bold}Allow SAM CLI IAM role creation{self.end_bold}",
            default=True)

        if not capabilities_confirm:
            input_capabilities = prompt(
                f"\t{self.start_bold}Capabilities{self.end_bold}",
                default=list(default_capabilities),
                type=FuncParamType(func=_space_separated_list_func_type),
            )

        self.prompt_authorization(
            sanitize_parameter_overrides(input_parameter_overrides))

        save_to_config = confirm(
            f"\t{self.start_bold}Save arguments to configuration file{self.end_bold}",
            default=True)
        if save_to_config:
            config_file = prompt(
                f"\t{self.start_bold}SAM configuration file{self.end_bold}",
                default=default_config_file,
                type=click.STRING,
            )
            config_env = prompt(
                f"\t{self.start_bold}SAM configuration environment{self.end_bold}",
                default=default_config_env,
                type=click.STRING,
            )

        s3_bucket = manage_stack(profile=self.profile, region=region)
        click.echo(f"\n\t\tManaged S3 bucket: {s3_bucket}")
        click.echo(
            "\t\tA different default S3 bucket can be set in samconfig.toml")

        self.guided_stack_name = stack_name
        self.guided_s3_bucket = s3_bucket
        self.guided_s3_prefix = stack_name
        self.guided_region = region
        self.guided_profile = self.profile
        self._capabilities = input_capabilities if input_capabilities else default_capabilities
        self._parameter_overrides = (input_parameter_overrides
                                     if input_parameter_overrides else
                                     self.parameter_overrides_from_cmdline)
        self.save_to_config = save_to_config
        self.config_env = config_env if config_env else default_config_env
        self.config_file = config_file if config_file else default_config_file
        self.confirm_changeset = confirm_changeset

    def prompt_authorization(self, parameter_overrides):
        auth_required_per_resource = auth_per_resource(
            parameter_overrides, get_template_data(self.template_file))

        for resource, authorization_required in auth_required_per_resource:
            if not authorization_required:
                auth_confirm = confirm(
                    f"\t{self.start_bold}{resource} may not have authorization defined, Is this okay?{self.end_bold}",
                    default=False,
                )
                if not auth_confirm:
                    raise GuidedDeployFailedError(
                        msg="Security Constraints Not Satisfied!")

    def prompt_parameters(self, parameter_override_from_template,
                          parameter_override_from_cmdline, start_bold,
                          end_bold):
        _prompted_param_overrides = {}
        if parameter_override_from_template:
            for parameter_key, parameter_properties in parameter_override_from_template.items(
            ):
                no_echo = parameter_properties.get("NoEcho", False)
                if no_echo:
                    parameter = prompt(
                        f"\t{start_bold}Parameter {parameter_key}{end_bold}",
                        type=click.STRING,
                        hide_input=True)
                    _prompted_param_overrides[parameter_key] = {
                        "Value": parameter,
                        "Hidden": True
                    }
                else:
                    parameter = prompt(
                        f"\t{start_bold}Parameter {parameter_key}{end_bold}",
                        default=_prompted_param_overrides.get(
                            parameter_key,
                            self._get_parameter_value(
                                parameter_key, parameter_properties,
                                parameter_override_from_cmdline),
                        ),
                        type=click.STRING,
                    )
                    _prompted_param_overrides[parameter_key] = {
                        "Value": parameter,
                        "Hidden": False
                    }
        return _prompted_param_overrides

    def run(self):

        try:
            _parameter_override_keys = get_template_parameters(
                template_file=self.template_file)
        except ValueError as ex:
            LOG.debug("Failed to parse SAM template", exc_info=ex)
            raise GuidedDeployFailedError(str(ex))

        guided_config = GuidedConfig(template_file=self.template_file,
                                     section=self.config_section)
        guided_config.read_config_showcase(
            self.config_file or DEFAULT_CONFIG_FILE_NAME, )

        self.guided_prompts(_parameter_override_keys)

        if self.save_to_config:
            guided_config.save_config(
                self._parameter_overrides,
                self.config_env or DEFAULT_ENV,
                self.config_file or DEFAULT_CONFIG_FILE_NAME,
                stack_name=self.guided_stack_name,
                s3_bucket=self.guided_s3_bucket,
                s3_prefix=self.guided_s3_prefix,
                region=self.guided_region,
                profile=self.guided_profile,
                confirm_changeset=self.confirm_changeset,
                capabilities=self._capabilities,
            )

    def _get_parameter_value(self, parameter_key, parameter_properties,
                             parameter_override_from_cmdline):
        """
        This function provide the value of a parameter. If the command line/config file have "override_parameter"
        whose key exist in the template file parameters, it will use the corresponding value.
        Otherwise, it will use its default value in template file.

        :param parameter_key: key of parameter
        :param parameter_properties: properties of that parameters from template file
        :param parameter_override_from_cmdline: parameter_override from command line/config file
        """
        if parameter_override_from_cmdline and parameter_override_from_cmdline.get(
                parameter_key, None):
            return parameter_override_from_cmdline[parameter_key]
        # Make sure the default is casted to a string.
        return str(parameter_properties.get("Default", ""))