Esempio n. 1
0
    def test_missing_artifact(self):
        event = self.lambda_event(
            [self.artifact_dict("Input", "missing")],
            self.artifact_dict("Output", "key"))

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn(
            "Unable to download input artifact 'Input' (s3://" +
            self.bucket_name + "/missing):", str(l))
Esempio n. 2
0
    def test_invalid_format(self):
        event = self.lambda_event(
            [],
            self.artifact_dict("Output", "key"),
            format="qwerty")

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn(
            "Invalid output format 'qwerty': valid types are 'json' and 'yaml'",
            str(l))
Esempio n. 3
0
    def test_unknown_artifact_parameter(self):
        event = self.lambda_event(
            [self.artifact_dict("Input", "missing")],
            self.artifact_dict("Output", "key"),
            template_document="Foo::bar")

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn(
            "Invalid value for TemplateDocument: unknown input artifact Foo",
            str(l))
Esempio n. 4
0
    def test_bad_artifact_filename(self):
        event = self.lambda_event(
            [self.artifact_dict("Input", "missing")],
            self.artifact_dict("Output", "key"),
            template_document="qwertyuiop")

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn(
            "Invalid value for TemplateDocument: expected input_artifact::"
            "filename: qwertyuiop", str(l))
Esempio n. 5
0
    def test_unknown_artifacttype(self):
        event = self.lambda_event(
            [self.artifact_dict("Input", "missing")],
            self.artifact_dict("Output", "key"))

        event["CodePipeline.job"]["data"]["inputArtifacts"][0]["location"]\
            ["type"] = "ftp"

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn("Can't handle input artifact type ftp", str(l))
Esempio n. 6
0
    def test_bad_userparams(self):
        event = self.lambda_event([], self.artifact_dict("Output", "key"))
        event["CodePipeline.job"]["data"]["actionConfiguration"]\
            ["configuration"]["UserParameters"] = "{])}"

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn("Expecting property name", str(l))

        event = self.lambda_event([], self.artifact_dict("Output", "key"))
        event["CodePipeline.job"]["data"]["actionConfiguration"]\
            ["configuration"]["UserParameters"] = "[]"

        with LogCapture() as l:
            codepipeline_handler(event, None)

        self.assertIn("Expected a JSON object for user parameters", str(l))
Esempio n. 7
0
    def run_doc(self, filename):
        s3 = self.boto3.resource("s3", region_name="us-west-2")
        bucket = s3.Bucket(self.bucket_name)
        bucket.create()

        filename = dirname(__file__) + "/lambda/" + filename

        with open(filename, "r") as fd:
            log.info("Running Lambda test on %s" % filename)
            doc = yaml_load(fd)
            input_artifacts = []

            for artifact in doc["InputArtifacts"]:
                name = artifact["Name"]
                contents = {}

                for filename, data in iteritems(artifact.get("Files", {})):
                    if isinstance(data, (list, dict)):
                        data = yaml_dump(data)

                    contents[filename] = data

                input_artifacts.append(
                    self.create_input_artifact(name, contents))

            output_artifact = doc["OutputArtifact"]

        output_artifact_name = output_artifact.get("Name", "Output")
        output_artifact_key = "%s/%s/%s.zip" % (
            self.pipeline_name, output_artifact_name, random_keyname())
        output_artifact_dict = self.artifact_dict(
            output_artifact_name, output_artifact_key)

        event = self.lambda_event(
            input_artifacts=input_artifacts,
            output_artifact=output_artifact_dict,
            template_document=doc.get("TemplateDocument"),
            resource_documents=doc.get("ResourceDocuments"),
            default_input_filename=doc.get("DefaultInputFilename"),
            local_tags=doc.get("LocalTags"))

        log.info("Invoking Lambda codepipeline_handler")
        with captured_output() as (out, err):
            with LogCapture() as l:
                codepipeline_handler(event, None)

        # Redisplay the log records
        for record in l.records:
            getLogger(record.name).handle(record)

        log.info("Lambda codepipeline_handler done")


        expected_errors = doc.get("ExpectedErrors")
        if expected_errors:
            err = str(l) + "\n" + err.getvalue()

            if isinstance(expected_errors, string_types):
                self.assertIn(expected_errors, err)
            else:
                for err in expected_errors:
                    self.assertIn(err, err)
        else:
            output_filename, expected_content = next(
                iteritems(output_artifact["Files"]))
            expected_content = yaml_load(expected_content)

            result_obj = s3.Object(self.bucket_name, output_artifact_key)
            result_zip = result_obj.get()["Body"].read()

            with ZipFile(BytesIO(result_zip), "r") as zf:
                with zf.open(output_filename, "r") as fd:
                    result = yaml_load(fd)

            self.assertEquals(result, expected_content)