def check_server_groups_endpoint(self, kind, image, has_lb=True):
        name = self.TEST_APP + "-" + kind
        account = self.bindings["SPINNAKER_KUBERNETES_V2_ACCOUNT"]
        builder = HttpContractBuilder(self.agent)
        lb_pred = (jp.LIST_MATCHES([
            jp.STR_EQ("service {}-service".format(self.TEST_APP))
        ]) if has_lb else jp.LIST_EQ([]))
        (builder.new_clause_builder(
            "Has recorded a server group for the deployed manifest",
            retryable_for_secs=120,
        ).get_url_path("/applications/{}/serverGroups".format(
            self.TEST_APP)).EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        "name":
                        jp.STR_SUBSTR(name),
                        "cluster":
                        jp.STR_EQ(kind + " " + name),
                        "account":
                        jp.STR_EQ(account),
                        "cloudProvider":
                        jp.STR_EQ("kubernetes"),
                        "buildInfo":
                        jp.DICT_MATCHES({
                            "images":
                            jp.LIST_MATCHES([jp.STR_EQ(image)]),
                        }),
                        "loadBalancers":
                        lb_pred,
                    }))))

        return st.OperationContract(
            NoOpOperation("Has recorded a server group"),
            contract=builder.build())
Beispiel #2
0
    def check_server_groups_endpoint(self, kind, image, has_lb=True):
        name = self.TEST_APP + '-' + kind
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        lb_pred = (jp.LIST_MATCHES([
            jp.STR_EQ('service {}-service'.format(self.TEST_APP))
        ]) if has_lb else jp.LIST_EQ([]))
        (builder.new_clause_builder(
            'Has recorded a server group for the deployed manifest',
            retryable_for_secs=120).get_url_path(
                '/applications/{}/serverGroups'.format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'name':
                            jp.STR_SUBSTR(name),
                            'cluster':
                            jp.STR_EQ(kind + ' ' + name),
                            'account':
                            jp.STR_EQ(account),
                            'cloudProvider':
                            jp.STR_EQ('kubernetes'),
                            'buildInfo':
                            jp.DICT_MATCHES({
                                'images':
                                jp.LIST_MATCHES([jp.STR_EQ(image)]),
                            }),
                            'loadBalancers':
                            lb_pred,
                        }))))

        return st.OperationContract(
            NoOpOperation('Has recorded a server group'),
            contract=builder.build())
Beispiel #3
0
    def test_observation_failure_ok(self):
        error_text = 'the error'
        context = ExecutionContext()

        observation = jc.Observation()
        error = ValueError(error_text)
        observation.add_error(error)

        ex_pred = jp.ExceptionMatchesPredicate(ValueError, error_text)
        ex_result = ex_pred(context, error)
        ex_observation_predicate_result = jc.ObservationPredicateResult(
            True, observation, jp.LIST_MATCHES([ex_pred]),
            jp.LIST_MATCHES([ex_pred])(context, [error]))

        expect_failure = jc.ObservationVerifyResult(
            valid=True,
            observation=observation,
            good_results=[ex_observation_predicate_result],
            bad_results=[],
            failed_constraints=[])

        builder = jc.ValueObservationVerifierBuilder(title='Test For Error')
        builder.EXPECT(jc.ObservationErrorPredicate(jp.LIST_MATCHES([ex_pred
                                                                     ])))
        verifier = builder.build()

        self.assertEqual(expect_failure, verifier(context, observation))
    def test_list_match_simple_bad(self):
        context = ExecutionContext()
        source = [1]
        want = [jp.NUM_NE(1)]
        result = jp.LIST_MATCHES(want)(context, source)

        expect = (jp.SequencedPredicateResultBuilder(
            jp.LIST_MATCHES(want)).append_result(
                jp.MapPredicate(jp.NUM_NE(1))(context, source)).build(False))

        self.assertFalse(result)
        self.assertEquals(expect, result)
    def register_load_balancer_instances(self):
        """Creates test registering the first two instances with a load balancer.

           Assumes that create_instances test has been run to add
           the instances. Note by design these were in two different zones
           but same region as required by the API this is testing.

           Assumes that upsert_load_balancer has been run to
           create the load balancer itself.
        Returns:
          st.OperationContract
        """
        # pylint: disable=bad-continuation
        payload = self.agent.type_to_payload(
            "registerInstancesWithGoogleLoadBalancerDescription",
            {
                "loadBalancerNames": [self.__use_lb_name],
                "instanceIds": self.use_instance_names[:2],
                "region": self.bindings["TEST_GCE_REGION"],
                "credentials": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
            },
        )

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            "Instances in Target Pool",
            retryable_for_secs=15).list_resource("targetPools").EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        "name":
                        jp.STR_SUBSTR(self.__use_lb_tp_name),
                        "instances":
                        jp.LIST_MATCHES([
                            jp.STR_SUBSTR(self.use_instance_names[0]),
                            jp.STR_SUBSTR(self.use_instance_names[1]),
                        ]),
                    }))).AND(
                        ov_factory.value_list_excludes(
                            jp.DICT_MATCHES({
                                "name":
                                jp.STR_SUBSTR(self.__use_lb_tp_name),
                                "instances":
                                jp.LIST_MATCHES([
                                    jp.STR_SUBSTR(self.use_instance_names[2])
                                ]),
                            }))))

        return st.OperationContract(
            self.new_post_operation(title="register_load_balancer_instances",
                                    data=payload,
                                    path="ops"),
            contract=builder.build(),
        )
  def delete_server_group(self):
    """Creates OperationContract for deleteServerGroup.

    To verify the operation, we just check that the AWS Auto Scaling Group
    is no longer visible on AWS (or is in the process of terminating).
    """
    bindings = self.bindings

    group_name = '{app}-{stack}-v000'.format(
        app=self.TEST_APP, stack=bindings['TEST_STACK'])

    payload = self.agent.make_json_payload_from_kwargs(
        job=[{
            'cloudProvider': 'aws',
            'type': 'destroyServerGroup',
            'serverGroupName': group_name,
            'asgName': group_name,
            'region': bindings['TEST_AWS_REGION'],
            'regions': [bindings['TEST_AWS_REGION']],
            'credentials': bindings['SPINNAKER_AWS_ACCOUNT'],
            'user': '******'
        }],
        application=self.TEST_APP,
        description='DestroyServerGroup: ' + group_name)

    builder = aws.AwsPythonContractBuilder(self.aws_observer)
    (builder.new_clause_builder('Auto Scaling Group Removed')
     .call_method(
         self.autoscaling_client.describe_auto_scaling_groups,
         AutoScalingGroupNames=[group_name])
     .EXPECT(
         ov_factory.error_list_contains(
             jp.ExceptionMatchesPredicate(
                   (BotoCoreError, ClientError), 'AutoScalingGroupNotFound')))
     .OR(
         ov_factory.value_list_path_contains(
             'AutoScalingGroups',
           jp.LIST_MATCHES([])))
     .OR(
         ov_factory.value_list_path_contains(
             'AutoScalingGroups',
             jp.LIST_MATCHES([
                 jp.DICT_MATCHES({'Status': jp.STR_SUBSTR('Delete'),
                                  'MaxSize': jp.NUM_EQ(0)})])))
     )

    return st.OperationContract(
        self.new_post_operation(
            title='delete_server_group', data=payload, path='tasks'),
        contract=builder.build())
Beispiel #7
0
    def check_detailed_clusters_endpoint(self, kind):
        name = kind + ' ' + self.TEST_APP + '-' + kind
        url_name = name.replace(' ', '%20')
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has recorded a cluster for the deployed manifest',
            retryable_for_secs=120).get_url_path(
                '/applications/{app}/clusters/{account}/{name}'.format(
                    app=self.TEST_APP, account=account, name=url_name)).EXPECT(
                        ov_factory.value_list_contains(
                            jp.DICT_MATCHES({
                                'accountName':
                                jp.STR_EQ(account),
                                'name':
                                jp.STR_EQ(name),
                                'serverGroups':
                                jp.LIST_MATCHES([
                                    jp.DICT_MATCHES({
                                        'account':
                                        jp.STR_EQ(account),
                                    })
                                ]),
                            }))))

        return st.OperationContract(NoOpOperation('Has recorded a cluster'),
                                    contract=builder.build())
Beispiel #8
0
    def save_delete_daemonset_pipeline(self):
        delete_stage = self.make_delete_stage_daemonset()
        job = dict(appConfig={},
                   keepWaitingPipelines='false',
                   application=self.TEST_APP,
                   name='daemonset-delete-pipeline',
                   lastModifiedBy='anonymous',
                   limitConcurrent='true',
                   parallel='true',
                   stages=[delete_stage])
        payload = self.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in delete_stage.items()
            })
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(self.new_post_operation(
            title='create_delete daemonset',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
    def upsert_google_server_group_tags(self):
        # pylint: disable=bad-continuation
        server_group_name = 'katotest-server-group'
        payload = self.agent.type_to_payload(
            'upsertGoogleServerGroupTagsDescription', {
                'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
                'zone': self.bindings['TEST_GCE_ZONE'],
                'serverGroupName': 'katotest-server-group',
                'tags': ['test-tag-1', 'test-tag-2']
            })

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            'Server Group Tags Added').inspect_resource(
                'instanceGroupManagers', server_group_name).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'name':
                            jp.STR_SUBSTR(server_group_name),
                            jp.build_path('tags', 'items'):
                            jp.LIST_MATCHES(['test-tag-1', 'test-tag-2'])
                        }))))

        return st.OperationContract(self.new_post_operation(
            title='upsert_server_group_tags', data=payload, path='ops'),
                                    contract=builder.build())
    def enable_server_group(self):
        job = [{
            'cloudProvider': 'gce',
            'asgName': self.__server_group_name,
            'serverGroupName': self.__server_group_name,
            'region': self.TEST_REGION,
            'zone': self.TEST_ZONE,
            'type': 'enableServerGroup',
            'regions': [self.TEST_REGION],
            'zones': [self.TEST_ZONE],
            'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
            'user': '******'
        }]

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            'Server Group Enabled', retryable_for_secs=90).list_resource(
                'instanceGroupManagers').contains_match({
                    'baseInstanceName':
                    jp.STR_SUBSTR(self.__server_group_name),
                    'targetPools':
                    jp.LIST_MATCHES([jp.STR_SUBSTR('https')])
                }))

        payload = self.agent.make_json_payload_from_kwargs(
            job=job,
            description='Server Group Test - enable server group',
            application=self.TEST_APP)

        return st.OperationContract(self.new_post_operation(
            title='enable_server_group', data=payload, path=self.__path),
                                    contract=builder.build())
 def deployment_configmap_mounted_predicate(self, configmap_name):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES(
             {
                 "spec": jp.DICT_MATCHES(
                     {
                         "template": jp.DICT_MATCHES(
                             {
                                 "spec": jp.DICT_MATCHES(
                                     {
                                         "volumes": jp.LIST_MATCHES(
                                             [
                                                 jp.DICT_MATCHES(
                                                     {
                                                         "configMap": jp.DICT_MATCHES(
                                                             {
                                                                 "name": jp.STR_SUBSTR(
                                                                     configmap_name
                                                                 )
                                                             }
                                                         )
                                                     }
                                                 )
                                             ]
                                         )
                                     }
                                 )
                             }
                         )
                     }
                 ),
                 "status": jp.DICT_MATCHES({"availableReplicas": jp.NUM_GE(1)}),
             }
         )
     )
Beispiel #12
0
    def upsert_google_server_group_tags(self):
        # pylint: disable=bad-continuation
        server_group_name = "katotest-server-group"
        payload = self.agent.type_to_payload(
            "upsertGoogleServerGroupTagsDescription",
            {
                "credentials": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
                "zone": self.bindings["TEST_GCE_ZONE"],
                "serverGroupName": "katotest-server-group",
                "tags": ["test-tag-1", "test-tag-2"],
            },
        )

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            "Server Group Tags Added").inspect_resource(
                "instanceGroupManagers", server_group_name).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            "name":
                            jp.STR_SUBSTR(server_group_name),
                            jp.build_path("tags", "items"):
                            jp.LIST_MATCHES(["test-tag-1", "test-tag-2"]),
                        }))))

        return st.OperationContract(
            self.new_post_operation(title="upsert_server_group_tags",
                                    data=payload,
                                    path="ops"),
            contract=builder.build(),
        )
Beispiel #13
0
    def submit_pipeline_contract(self, name, stages):
        s = self.scenario
        job = {
            'keepWaitingPipelines': 'false',
            'application': s.TEST_APP,
            'name': name,
            'lastModifiedBy': 'anonymous',
            'limitConcurrent': 'true',
            'parallel': 'true',
            'stages': stages,
        }
        payload = s.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES(
                {key: jp.EQUIVALENT(value)
                 for key, value in stage.items()}) for stage in stages
        ])

        builder = st.HttpContractBuilder(s.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=s.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(s.new_post_operation(
            title='save_pipeline_operation',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
    def check_detailed_clusters_endpoint(self, kind):
        name = kind + " " + self.TEST_APP + "-" + kind
        url_name = name.replace(" ", "%20")
        account = self.bindings["SPINNAKER_KUBERNETES_V2_ACCOUNT"]
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            "Has recorded a cluster for the deployed manifest",
            retryable_for_secs=120,
        ).get_url_path("/applications/{app}/clusters/{account}/{name}".format(
            app=self.TEST_APP, account=account, name=url_name)).EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        "accountName":
                        jp.STR_EQ(account),
                        "name":
                        jp.STR_EQ(name),
                        "serverGroups":
                        jp.LIST_MATCHES([
                            jp.DICT_MATCHES({
                                "account": jp.STR_EQ(account),
                            })
                        ]),
                    }))))

        return st.OperationContract(NoOpOperation("Has recorded a cluster"),
                                    contract=builder.build())
    def check_load_balancers_endpoint(self, kind):
        name = kind + " " + self.TEST_APP + "-" + kind
        account = self.bindings["SPINNAKER_KUBERNETES_V2_ACCOUNT"]
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            "Has recorded a load balancer",
            retryable_for_secs=120).get_url_path(
                "/applications/{}/loadBalancers".format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            "name":
                            jp.STR_EQ(name),
                            "kind":
                            jp.STR_EQ(kind),
                            "account":
                            jp.STR_EQ(account),
                            "cloudProvider":
                            jp.STR_EQ("kubernetes"),
                            "serverGroups":
                            jp.LIST_MATCHES([
                                jp.DICT_MATCHES({
                                    "account":
                                    jp.STR_EQ(account),
                                    "name":
                                    jp.STR_SUBSTR(self.TEST_APP),
                                }),
                            ]),
                        }))))

        return st.OperationContract(
            NoOpOperation("Has recorded a load balancer"),
            contract=builder.build())
Beispiel #16
0
    def check_load_balancers_endpoint(self, kind):
        name = kind + ' ' + self.TEST_APP + '-' + kind
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has recorded a load balancer',
            retryable_for_secs=120).get_url_path(
                '/applications/{}/loadBalancers'.format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'name':
                            jp.STR_EQ(name),
                            'kind':
                            jp.STR_EQ(kind),
                            'account':
                            jp.STR_EQ(account),
                            'cloudProvider':
                            jp.STR_EQ('kubernetes'),
                            'serverGroups':
                            jp.LIST_MATCHES([
                                jp.DICT_MATCHES({
                                    'account':
                                    jp.STR_EQ(account),
                                    'name':
                                    jp.STR_SUBSTR(self.TEST_APP),
                                }),
                            ]),
                        }))))

        return st.OperationContract(
            NoOpOperation('Has recorded a load balancer'),
            contract=builder.build())
Beispiel #17
0
 def deployment_configmap_mounted_predicate(self, configmap_name):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             'spec':
             jp.DICT_MATCHES({
                 'template':
                 jp.DICT_MATCHES({
                     'spec':
                     jp.DICT_MATCHES({
                         'volumes':
                         jp.LIST_MATCHES([
                             jp.DICT_MATCHES({
                                 'configMap':
                                 jp.DICT_MATCHES({
                                     'name':
                                     jp.STR_SUBSTR(configmap_name)
                                 })
                             })
                         ])
                     })
                 })
             }),
             'status':
             jp.DICT_MATCHES({'availableReplicas': jp.NUM_GE(1)})
         }))
  def create_deploy_pipeline(self):
    name = 'GcsToGaePubsubDeploy'
    self.pipeline_id = name

    pipeline_spec = self.make_pipeline_spec(name)
    payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)

    pipeline_config_path = 'applications/{app}/pipelineConfigs'.format(app=self.TEST_APP)
    builder = st.HttpContractBuilder(self.agent)
    (builder.new_clause_builder('Has Pipeline', retryable_for_secs=15)
     .get_url_path(pipeline_config_path)
     .EXPECT(
       jp.LIST_MATCHES([self.make_dict_matcher(pipeline_spec)])))

    # Need to query Gate for the id of the pipeline we just created...
    def create_pipeline_id_extractor(_ignored, context):
      pipeline_config_resp = self.agent.get(pipeline_config_path)
      pipeline_config_list = json.JSONDecoder().decode(pipeline_config_resp.output)
      found = next((x for x in pipeline_config_list if x['name'] == self.pipeline_id), None)
      if (found is not None):
        context['pipelineId'] = found['id'] # I don't know how to reference this later, so I'm saving it in self for now.
        self.__pipeline_id = found['id'] # I don't know how to reference this later, so I'm saving it in self for now.
        logging.info('Created pipeline config with id: %s', context['pipelineId'])

    return st.OperationContract(
      self.new_post_operation(
        title='create_gcs_gae_pubsub_pipeline', data=payload, path='pipelines',
        status_class=st.SynchronousHttpOperationStatus),
      contract=builder.build(),
      status_extractor=create_pipeline_id_extractor)
 def deployment_image_predicate(self, image):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES(
             {
                 "spec": jp.DICT_MATCHES(
                     {
                         "template": jp.DICT_MATCHES(
                             {
                                 "spec": jp.DICT_MATCHES(
                                     {
                                         "containers": jp.LIST_MATCHES(
                                             [
                                                 jp.DICT_MATCHES(
                                                     {"image": jp.STR_EQ(image)}
                                                 )
                                             ]
                                         )
                                     }
                                 )
                             }
                         )
                     }
                 ),
                 "status": jp.DICT_MATCHES({"availableReplicas": jp.NUM_GE(1)}),
             }
         )
     )
Beispiel #20
0
    def register_load_balancer_instances(self):
        """Creates test registering the first two instances with a load balancer.

       Assumes that create_instances test has been run to add
       the instances. Note by design these were in two different zones
       but same region as required by the API this is testing.

       Assumes that upsert_load_balancer has been run to
       create the load balancer itself.
    Returns:
      st.OperationContract
    """
        # pylint: disable=bad-continuation
        payload = self.agent.type_to_payload(
            'registerInstancesWithGoogleLoadBalancerDescription', {
                'loadBalancerNames': [self.__use_lb_name],
                'instanceIds': self.use_instance_names[:2],
                'region': self.bindings['TEST_GCE_REGION'],
                'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT']
            })

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            'Instances in Target Pool',
            retryable_for_secs=15).list_resource('targetPools').contains_match(
                {
                    'name':
                    jp.STR_SUBSTR(self.__use_lb_tp_name),
                    'instances':
                    jp.LIST_MATCHES([
                        jp.STR_SUBSTR(self.use_instance_names[0]),
                        jp.STR_SUBSTR(self.use_instance_names[1])
                    ])
                }).excludes_match({
                    'name':
                    jp.STR_SUBSTR(self.__use_lb_tp_name),
                    'instances':
                    jp.LIST_MATCHES(
                        [jp.STR_SUBSTR(self.use_instance_names[2])])
                }))

        return st.OperationContract(
            self.new_post_operation(title='register_load_balancer_instances',
                                    data=payload,
                                    path='ops'),
            contract=builder.build())
Beispiel #21
0
    def test_list_match_unique_ok(self):
        context = ExecutionContext()
        source = [1, 2]
        want = [jp.NUM_EQ(1)]
        match_pred = jp.LIST_MATCHES(want, unique=True)
        result = match_pred(context, source)

        expect = (jp.SequencedPredicateResultBuilder(match_pred).append_result(
            jp.MapPredicate(jp.NUM_EQ(1))(context, source)).build(True))

        self.assertTrue(result)
        self.assertEquals(expect, result)
    def check_clusters_endpoint(self, kind):
        name = kind + ' ' + self.TEST_APP + '-' + kind
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has recorded a cluster for the deployed manifest').get_url_path(
                '/applications/{}/clusters'.format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            account:
                            jp.LIST_MATCHES([jp.STR_EQ(name)]),
                        }))))

        return st.OperationContract(NoOpOperation('Has recorded a cluster'),
                                    contract=builder.build())
Beispiel #23
0
    def test_list_match_strict_bad(self):
        context = ExecutionContext()
        source = [1, 2]
        want = [jp.NUM_NE(2)]
        match_pred = jp.LIST_MATCHES(want, strict=True)
        result = match_pred(context, source)

        expect = (jp.SequencedPredicateResultBuilder(match_pred).append_result(
            jp.MapPredicate(jp.NUM_NE(2))(context, source)).append_result(
                jp.UnexpectedPathError(source=source,
                                       target_path='[1]',
                                       path_value=jp.PathValue(
                                           '[1]', 2))).build(False))

        self.assertFalse(result)
        self.assertEquals(expect, result)
 def __deployment_image_predicate(self, image):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             'spec':
             jp.DICT_MATCHES({
                 'template':
                 jp.DICT_MATCHES({
                     'spec':
                     jp.DICT_MATCHES({
                         'containers':
                         jp.LIST_MATCHES(
                             [jp.DICT_MATCHES({'image': jp.STR_EQ(image)})])
                     })
                 })
             })
         }))
Beispiel #25
0
    def create_find_image_pipeline(self):
        name = 'findImagePipeline'
        self.pipeline_id = name
        smoke_stage = self.make_smoke_stage()
        deploy_stage = self.make_deploy_stage(imageSource='FINDIMAGE',
                                              requisiteStages=['FINDIMAGE'])

        pipeline_spec = dict(name=name,
                             stages=[smoke_stage, deploy_stage],
                             triggers=[],
                             application=self.TEST_APP,
                             stageCounter=2,
                             parallel=True,
                             limitConcurrent=True,
                             executionEngine='v2',
                             appConfig={},
                             index=0)

        payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in pipeline_spec.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in smoke_stage.items()
            }),
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in deploy_stage.items()
            })
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=5).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))

        return st.OperationContract(self.new_post_operation(
            title='create_find_image_pipeline',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
Beispiel #26
0
    def test_observation_strict_vs_nonstrict(self):
        aA = jp.PathEqPredicate('a', 'A')
        bB = jp.PathEqPredicate('b', 'B')

        unstrict_object_list = [_NUMBER_DICT, _LETTER_DICT, _MIXED_DICT]
        unstrict_observation = jc.Observation()
        unstrict_observation.add_all_objects(unstrict_object_list)

        strict_object_list = [_LETTER_DICT, {'a': 'A', 'b': 'B', 'x': 'X'}]
        strict_observation = jc.Observation()
        strict_observation.add_all_objects(strict_object_list)

        none_object_list = [_NUMBER_DICT, _MIXED_DICT]
        none_observation = jc.Observation()
        none_observation.add_all_objects(none_object_list)

        # pylint: disable=bad-whitespace
        test_cases = [
            #  Name      jc.Observation        Strict,  Unstrict
            #---------------------------------------------------
            ('Strict', strict_observation, True, True),
            ('Unstrict', unstrict_observation, False, True),
            ('None', none_observation, False, False)
        ]

        # For each of the cases, test it with strict and non-strict verification.
        context = ExecutionContext()
        for test in test_cases:
            name = test[0]
            observation = test[1]

            # For this case, check it strict (2) and unstrict (3).
            for index in [2, 3]:
                test_strict = index == 2
                expected = test[index]
                aA_bB = jp.LIST_MATCHES([aA, bB], strict=test_strict)
                verifier = (jc.ValueObservationVerifierBuilder(
                    'verifier').EXPECT(aA_bB).build())

                verify_result = verifier(context, observation)
                try:
                    self.assertEqual(expected, verify_result.__nonzero__())
                except Exception as e:
                    print '*** FAILED case={0}:\n{1}'.format(name, e)
                    print 'GOT {0}'.format(verify_result)
                    raise
    def make_dict_matcher(self, want):
        spec = {}
        for key, value in want.items():
            if isinstance(value, dict):
                spec[key] = self.make_dict_matcher(value)
            elif isinstance(value, list):
                list_spec = []
                for elem in value:
                    if isinstance(elem, dict):
                        list_spec.append(self.make_dict_matcher(elem))
                    else:
                        list_spec.append(jp.CONTAINS(elem))
                spec[key] = jp.LIST_MATCHES(list_spec)
            else:
                spec[key] = jp.CONTAINS(value)

        return jp.DICT_MATCHES(spec)
Beispiel #28
0
    def test_clause_failure(self):
        context = ExecutionContext()
        observation = jc.Observation()
        observation.add_object('B')
        fake_observer = FakeObserver(observation)

        eq_A = jp.LIST_MATCHES([jp.STR_EQ('A')])
        verifier = jc.ValueObservationVerifierBuilder('Has A').EXPECT(
            eq_A).build()

        clause = jc.ContractClause('TestClause', fake_observer, verifier)

        expect_result = jc.contract.ContractClauseVerifyResult(
            False, clause, verifier(context, observation))
        result = clause.verify(context)
        self.assertEqual(expect_result, result)
        self.assertFalse(result)
Beispiel #29
0
    def deregister_load_balancer_instances(self):
        """Creates a test unregistering instances from load balancer.

        Returns:
          st.OperationContract
        """
        # pylint: disable=bad-continuation
        payload = self.agent.type_to_payload(
            "deregisterInstancesFromGoogleLoadBalancerDescription",
            {
                "loadBalancerNames": [self.__use_lb_name],
                "instanceIds": self.use_instance_names[:2],
                "region": self.bindings["TEST_GCE_REGION"],
                "credentials": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
            },
        )

        # NOTE(ewiseblatt): 20150530
        # This displays an error that 'instances' field doesnt exist.
        # That's because it was removed because all the instances are gone.
        # I dont have a way to express that the field itself is optional,
        # just the record. Leaving it as is because displaying this type of
        # error is usually helpful for development.
        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            "Instances not in Target Pool",
            retryable_for_secs=30).list_resource(
                "targetPools", region=self.bindings["TEST_GCE_REGION"]).EXPECT(
                    ov_factory.value_list_excludes(
                        jp.DICT_MATCHES({
                            "name":
                            jp.STR_SUBSTR(self.__use_lb_tp_name),
                            "instances":
                            jp.LIST_MATCHES([
                                jp.STR_SUBSTR(self.use_instance_names[0]),
                                jp.STR_SUBSTR(self.use_instance_names[1]),
                            ]),
                        }))))

        return st.OperationContract(
            self.new_post_operation(title="deregister_load_balancer_instances",
                                    data=payload,
                                    path="ops"),
            contract=builder.build(),
        )
Beispiel #30
0
 def deployment_image_predicate(self, image):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             'spec':
             jp.DICT_MATCHES({
                 'template':
                 jp.DICT_MATCHES({
                     'spec':
                     jp.DICT_MATCHES({
                         'containers':
                         jp.LIST_MATCHES(
                             [jp.DICT_MATCHES({'image': jp.STR_EQ(image)})])
                     })
                 })
             }),
             'status':
             jp.DICT_MATCHES({'availableReplicas': jp.NUM_GE(1)})
         }))