def check_load_balancers_endpoint(self, kind):
        name = kind + " " + self.TEST_APP + "-" + kind
        account = self.bindings["SPINNAKER_KUBERNETES_V2_ACCOUNT"]
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            "Has recorded a load balancer",
            retryable_for_secs=120).get_url_path(
                "/applications/{}/loadBalancers".format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            "name":
                            jp.STR_EQ(name),
                            "kind":
                            jp.STR_EQ(kind),
                            "account":
                            jp.STR_EQ(account),
                            "cloudProvider":
                            jp.STR_EQ("kubernetes"),
                            "serverGroups":
                            jp.LIST_MATCHES([
                                jp.DICT_MATCHES({
                                    "account":
                                    jp.STR_EQ(account),
                                    "name":
                                    jp.STR_SUBSTR(self.TEST_APP),
                                }),
                            ]),
                        }))))

        return st.OperationContract(
            NoOpOperation("Has recorded a load balancer"),
            contract=builder.build())
Exemple #2
0
    def check_detailed_clusters_endpoint(self, kind):
        name = kind + ' ' + self.TEST_APP + '-' + kind
        url_name = name.replace(' ', '%20')
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has recorded a cluster for the deployed manifest',
            retryable_for_secs=120).get_url_path(
                '/applications/{app}/clusters/{account}/{name}'.format(
                    app=self.TEST_APP, account=account, name=url_name)).EXPECT(
                        ov_factory.value_list_contains(
                            jp.DICT_MATCHES({
                                'accountName':
                                jp.STR_EQ(account),
                                'name':
                                jp.STR_EQ(name),
                                'serverGroups':
                                jp.LIST_MATCHES([
                                    jp.DICT_MATCHES({
                                        'account':
                                        jp.STR_EQ(account),
                                    })
                                ]),
                            }))))

        return st.OperationContract(NoOpOperation('Has recorded a cluster'),
                                    contract=builder.build())
    def scale_manifest(self):
        """Creates OperationContract for scaleManifest.

    To verify the operation, we just check that the deployment has changed size
    """
        bindings = self.bindings
        name = self.TEST_APP + '-deployment'
        payload = self.agent.make_json_payload_from_kwargs(
            job=[{
                'cloudProvider': 'kubernetes',
                'account': bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT'],
                'manifestName': 'deployment ' + name,
                'location': self.TEST_NAMESPACE,
                'type': 'scaleManifest',
                'user': '******',
                'replicas': 2
            }],
            description='Deploy manifest',
            application=self.TEST_APP)

        builder = kube.KubeContractBuilder(self.kube_v2_observer)
        (builder.new_clause_builder(
            'Deployment scaled', retryable_for_secs=15).get_resources(
                'deploy',
                extra_args=[name, '--namespace', self.TEST_NAMESPACE]).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'spec':
                            jp.DICT_MATCHES({'replicas': jp.NUM_EQ(2)})
                        }))))

        return st.OperationContract(self.new_post_operation(
            title='scale_manifest', data=payload, path='tasks'),
                                    contract=builder.build())
Exemple #4
0
    def check_load_balancers_endpoint(self, kind):
        name = kind + ' ' + self.TEST_APP + '-' + kind
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has recorded a load balancer',
            retryable_for_secs=120).get_url_path(
                '/applications/{}/loadBalancers'.format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'name':
                            jp.STR_EQ(name),
                            'kind':
                            jp.STR_EQ(kind),
                            'account':
                            jp.STR_EQ(account),
                            'cloudProvider':
                            jp.STR_EQ('kubernetes'),
                            'serverGroups':
                            jp.LIST_MATCHES([
                                jp.DICT_MATCHES({
                                    'account':
                                    jp.STR_EQ(account),
                                    'name':
                                    jp.STR_SUBSTR(self.TEST_APP),
                                }),
                            ]),
                        }))))

        return st.OperationContract(
            NoOpOperation('Has recorded a load balancer'),
            contract=builder.build())
    def check_server_groups_endpoint(self, kind, image, has_lb=True):
        name = self.TEST_APP + "-" + kind
        account = self.bindings["SPINNAKER_KUBERNETES_V2_ACCOUNT"]
        builder = HttpContractBuilder(self.agent)
        lb_pred = (jp.LIST_MATCHES([
            jp.STR_EQ("service {}-service".format(self.TEST_APP))
        ]) if has_lb else jp.LIST_EQ([]))
        (builder.new_clause_builder(
            "Has recorded a server group for the deployed manifest",
            retryable_for_secs=120,
        ).get_url_path("/applications/{}/serverGroups".format(
            self.TEST_APP)).EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        "name":
                        jp.STR_SUBSTR(name),
                        "cluster":
                        jp.STR_EQ(kind + " " + name),
                        "account":
                        jp.STR_EQ(account),
                        "cloudProvider":
                        jp.STR_EQ("kubernetes"),
                        "buildInfo":
                        jp.DICT_MATCHES({
                            "images":
                            jp.LIST_MATCHES([jp.STR_EQ(image)]),
                        }),
                        "loadBalancers":
                        lb_pred,
                    }))))

        return st.OperationContract(
            NoOpOperation("Has recorded a server group"),
            contract=builder.build())
Exemple #6
0
    def check_server_groups_endpoint(self, kind, image, has_lb=True):
        name = self.TEST_APP + '-' + kind
        account = self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT']
        builder = HttpContractBuilder(self.agent)
        lb_pred = (jp.LIST_MATCHES([
            jp.STR_EQ('service {}-service'.format(self.TEST_APP))
        ]) if has_lb else jp.LIST_EQ([]))
        (builder.new_clause_builder(
            'Has recorded a server group for the deployed manifest',
            retryable_for_secs=120).get_url_path(
                '/applications/{}/serverGroups'.format(self.TEST_APP)).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'name':
                            jp.STR_SUBSTR(name),
                            'cluster':
                            jp.STR_EQ(kind + ' ' + name),
                            'account':
                            jp.STR_EQ(account),
                            'cloudProvider':
                            jp.STR_EQ('kubernetes'),
                            'buildInfo':
                            jp.DICT_MATCHES({
                                'images':
                                jp.LIST_MATCHES([jp.STR_EQ(image)]),
                            }),
                            'loadBalancers':
                            lb_pred,
                        }))))

        return st.OperationContract(
            NoOpOperation('Has recorded a server group'),
            contract=builder.build())
 def service_selector_predicate(self, key, value):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             'spec':
             jp.DICT_MATCHES(
                 {'selector': jp.DICT_MATCHES({key: jp.STR_EQ(value)})}),
         }))
    def test_dict_match_multi_bad(self):
        context = ExecutionContext()
        source = {'a': 'testing', 'n': 10}
        want = {'n': jp.NUM_NE(10), 'a': jp.STR_SUBSTR('test')}
        result = jp.DICT_MATCHES(want)(context, source)

        expect = (jp.KeyedPredicateResultBuilder(
            jp.DICT_MATCHES(want)).add_result(
                'a',
                jp.PathPredicateResultBuilder(
                    source=source,
                    pred=jp.STR_SUBSTR('test')).add_result_candidate(
                        path_value=jp.PathValue('a', 'testing'),
                        final_result=jp.STR_SUBSTR('test')(
                            context, 'testing')).build(True)).add_result(
                                'n',
                                jp.PathPredicateResultBuilder(
                                    source=source,
                                    pred=jp.NUM_NE(10)).add_result_candidate(
                                        jp.PathValue('n', 10),
                                        jp.NUM_NE(10)(
                                            context,
                                            10)).build(False)).build(False))

        self.assertFalse(result)
        self.assertEquals(expect, result)
    def check_detailed_clusters_endpoint(self, kind):
        name = kind + " " + self.TEST_APP + "-" + kind
        url_name = name.replace(" ", "%20")
        account = self.bindings["SPINNAKER_KUBERNETES_V2_ACCOUNT"]
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            "Has recorded a cluster for the deployed manifest",
            retryable_for_secs=120,
        ).get_url_path("/applications/{app}/clusters/{account}/{name}".format(
            app=self.TEST_APP, account=account, name=url_name)).EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        "accountName":
                        jp.STR_EQ(account),
                        "name":
                        jp.STR_EQ(name),
                        "serverGroups":
                        jp.LIST_MATCHES([
                            jp.DICT_MATCHES({
                                "account": jp.STR_EQ(account),
                            })
                        ]),
                    }))))

        return st.OperationContract(NoOpOperation("Has recorded a cluster"),
                                    contract=builder.build())
Exemple #10
0
    def delete_app(self):
        contract = jc.Contract()

        app_url_path = "/".join(["/v2/applications", self.TEST_APP])
        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        # The application should be unlisted immediately (assuming 1 replica)
        # However given GCS rate limiting on updating the timestamp file,
        # there is a race condition in which the filesystem timestamp
        # was rate limited from updating AND a scheduled update is in progress
        # where the application was seen just before the delete so gets restored
        # back. Because the timestamp was not updated, this observer will read
        # from the cache thinking it is fresh. We need the extra second to allow
        # for the retry on the timestamp update to write out to GCS.
        (f50_builder.new_clause_builder(
            "Unlists Application",
            retryable_for_secs=8).get_url_path("/v2/applications").EXPECT(
                ov_factory.value_list_path_excludes(
                    "name", jp.STR_SUBSTR(self.TEST_APP.upper()))))
        (f50_builder.new_clause_builder("Deletes Application").get_url_path(
            app_url_path).EXPECT(
                ov_factory.error_list_contains(
                    st.HttpAgentErrorPredicate(
                        st.HttpResponsePredicate(http_code=404)))))

        (f50_builder.new_clause_builder(
            "History Retains Application", retryable_for_secs=5).get_url_path(
                "/v2/applications/{app}/history".format(
                    app=self.TEST_APP)).EXPECT(
                        ov_factory.value_list_matches([
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[0].items()
                            }),
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[1].items()
                            }),
                        ])))

        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            "Deleted File", retryable_for_secs=5).list_bucket(
                self.BUCKET, "/".join([self.BASE_PATH,
                                       "applications"])).EXPECT(
                                           ov_factory.value_list_path_excludes(
                                               "name",
                                               jp.STR_SUBSTR(self.TEST_APP))))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        return st.OperationContract(
            self.new_delete_operation(title="delete_app",
                                      data=None,
                                      path=app_url_path),
            contract=contract,
        )
    def trigger_bake_and_RB_deploy_pipeline(self):
        """Create OperationContract that manually trigger the bake and deploy pipeline
        This create a new server group below the given load balancer and destroy the eldest one

        To verify the operation, we check that the spinnaker server groups
        for the given load balancer was created in correct size.
        """

        pipeline_id = self.bake_pipeline_id
        payload = self.agent.make_json_payload_from_kwargs(
            job=[{
                "dryRun": False,
                "type": "manual",
                "user": "******"
            }],
            description="Test - begin bake and deploy: {pl}".format(
                pl=pipeline_id),
            application=self.TEST_APP)

        # Assuming the max remaining Asgs is 2
        builder = az.AzContractBuilder(self.az_observer)
        (builder.new_clause_builder(
            "Has Virtual Machine Scale Set",
            retryable_for_secs=30).collect_resources(
                az_resource='vmss',
                command='list',
                args=[
                    '--resource-group',
                    '{app}-{rg}'.format(app=self.TEST_APP,
                                        rg=self.__rg_location)
                ]).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            "name":
                            jp.STR_EQ(
                                '{lb}-v002'.format(lb=self.__full_lb_name))
                        }))).EXPECT(
                            ov_factory.value_list_contains(
                                jp.DICT_MATCHES({
                                    "name":
                                    jp.STR_EQ('{lb}-v001'.format(
                                        lb=self.__full_lb_name))
                                }))).EXPECT(
                                    ov_factory.value_list_excludes(
                                        jp.DICT_MATCHES({
                                            "name":
                                            jp.STR_EQ('{lb}-v000'.format(
                                                lb=self.__full_lb_name))
                                        }))))

        return st.OperationContract(
            self.new_post_operation(
                title='bake and deploy',
                data=payload,
                # TODO: cannot use v2 url: pipelines/v2/{app}/{pl}
                path='pipelines/{app}/{pl}'.format(app=self.TEST_APP,
                                                   pl=pipeline_id),
                max_wait_secs=7200),
            contract=builder.build())
Exemple #12
0
  def create_app(self):
    payload = self.agent.make_json_payload_from_object(self.initial_app_spec)
    expect = dict(self.initial_app_spec)
    expect['name'] = self.initial_app_spec['name'].upper()
    expect['lastModifiedBy'] = 'anonymous'

    contract = jc.Contract()

    # Note that curiosly the updated timestamp is not adjusted in the storage
    # file.
    gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
    (gcs_builder.new_clause_builder('Created Google Cloud Storage File',
                                    retryable_for_secs=5)
     .list_bucket(self.BUCKET, '/'.join([self.BASE_PATH, 'applications']))
     .EXPECT(ov_factory.value_list_path_contains(
         'name', jp.STR_SUBSTR(self.TEST_APP))))
    (gcs_builder.new_clause_builder('Wrote File Content')
     .retrieve_content(self.BUCKET,
                       '/'.join([self.BASE_PATH, 'applications', self.TEST_APP,
                                 'specification.json']),
                       transform=json.JSONDecoder().decode)
     .EXPECT(ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             key: jp.EQUIVALENT(value)
             for key, value in expect.items()}))))
    for clause in gcs_builder.build().clauses:
      contract.add_clause(clause)

    # The update timestamp is determined by the server,
    # and we dont know what that is, so lets ignore it
    # and assume the unit tests verify it is properly updated.
    expect = dict(expect)
    del expect['updateTs']
    self.app_history.insert(0, expect)
    f50_builder = st.http_observer.HttpContractBuilder(self.agent)

    # These clauses are querying the Front50 http server directly
    # to verify that it returns the application we added.
    # We already verified the data was stored on GCS, but while we
    # are here we will verify that it is also being returned when queried.
    (f50_builder.new_clause_builder('Lists Application')
     .get_url_path('/v2/applications')
     .EXPECT(ov_factory.value_list_path_contains(
         'name', jp.STR_SUBSTR(self.TEST_APP.upper()))))
    (f50_builder.new_clause_builder('Returns Application')
     .get_url_path('/v2/applications')
     .EXPECT(ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             key: jp.EQUIVALENT(value)
             for key, value in self.app_history[0].items()}))))
    for clause in f50_builder.build().clauses:
      contract.add_clause(clause)

    path = '/v2/applications'
    return st.OperationContract(
        self.new_post_operation(
            title='create_app', data=payload, path=path),
        contract=contract)
    def register_load_balancer_instances(self):
        """Creates test registering the first two instances with a load balancer.

           Assumes that create_instances test has been run to add
           the instances. Note by design these were in two different zones
           but same region as required by the API this is testing.

           Assumes that upsert_load_balancer has been run to
           create the load balancer itself.
        Returns:
          st.OperationContract
        """
        # pylint: disable=bad-continuation
        payload = self.agent.type_to_payload(
            "registerInstancesWithGoogleLoadBalancerDescription",
            {
                "loadBalancerNames": [self.__use_lb_name],
                "instanceIds": self.use_instance_names[:2],
                "region": self.bindings["TEST_GCE_REGION"],
                "credentials": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
            },
        )

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            "Instances in Target Pool",
            retryable_for_secs=15).list_resource("targetPools").EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        "name":
                        jp.STR_SUBSTR(self.__use_lb_tp_name),
                        "instances":
                        jp.LIST_MATCHES([
                            jp.STR_SUBSTR(self.use_instance_names[0]),
                            jp.STR_SUBSTR(self.use_instance_names[1]),
                        ]),
                    }))).AND(
                        ov_factory.value_list_excludes(
                            jp.DICT_MATCHES({
                                "name":
                                jp.STR_SUBSTR(self.__use_lb_tp_name),
                                "instances":
                                jp.LIST_MATCHES([
                                    jp.STR_SUBSTR(self.use_instance_names[2])
                                ]),
                            }))))

        return st.OperationContract(
            self.new_post_operation(title="register_load_balancer_instances",
                                    data=payload,
                                    path="ops"),
            contract=builder.build(),
        )
  def patch_manifest(self):
    """Creates OperationContract for patchManifest.

    To verify the operation, we just check that the deployment was created.
    """
    bindings = self.bindings
    name = self.TEST_APP + '-deployment'
    test_label = 'patchedLabel'
    payload = self.agent.make_json_payload_from_kwargs(
        job=[{
          'account': bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT'],
          'cloudProvider': 'kubernetes',
          'kind': 'deployment',
          'location': self.TEST_NAMESPACE,
          'manifestName': 'deployment ' + name,
          'type': 'patchManifest',
          'user': '******',
          'source': 'text',
          'patchBody': {
            'metadata': {
              'labels': {
                'testLabel': test_label,
              }
            }
          },
          'options': {
            'mergeStrategy': 'strategic',
            'record': True
          }
        }],
        description='Patch manifest',
        application=self.TEST_APP)

    builder = kube.KubeContractBuilder(self.kube_v2_observer)
    (builder.new_clause_builder('Deployment patched',
                                retryable_for_secs=15)
     .get_resources(
        'deploy',
        extra_args=[name, '--namespace', self.TEST_NAMESPACE])
     .EXPECT(ov_factory.value_list_contains(jp.DICT_MATCHES({
      'metadata': jp.DICT_MATCHES({
        'labels': jp.DICT_MATCHES({
          'testLabel': jp.STR_EQ(test_label)
        })
      })
    }))))

    return st.OperationContract(
        self.new_post_operation(
            title='patch_manifest', data=payload, path='tasks'),
        contract=builder.build())
    def test_dict_match_simple_ok(self):
        context = ExecutionContext()
        source = {'n': 10}
        pred = jp.NUM_LE(20)
        want = {'n': pred}
        result = jp.DICT_MATCHES(want)(context, source)
        expect = (jp.KeyedPredicateResultBuilder(
            jp.DICT_MATCHES(want)).add_result(
                'n',
                self._match_dict_attribute_result(context, pred, 'n',
                                                  source)).build(True))

        self.assertTrue(result)
        self.assertEquals(expect, result)
    def test_dict_match_missing_path(self):
        context = ExecutionContext()
        source = {'n': 10}
        want = {'missing': jp.NUM_EQ(10)}
        result = jp.DICT_MATCHES(want)(context, source)

        expect = (jp.KeyedPredicateResultBuilder(
            jp.DICT_MATCHES(want)).add_result(
                'missing',
                jp.MissingPathError(source=source,
                                    target_path='missing')).build(False))

        self.assertFalse(result)
        self.assertEquals(expect, result)
    def test_dict_match_missing_path(self):
        context = ExecutionContext()
        source = {'n': 10}
        pred = jp.NUM_EQ(10)
        want = {'missing': pred}
        result = jp.DICT_MATCHES(want)(context, source)

        expect = (jp.KeyedPredicateResultBuilder(
            jp.DICT_MATCHES(want)).add_result(
                'missing',
                self._match_dict_attribute_result(context, pred, 'missing',
                                                  source)).build(False))

        self.assertFalse(result)
        self.assertEquals(expect, result)
Exemple #18
0
    def save_delete_daemonset_pipeline(self):
        delete_stage = self.make_delete_stage_daemonset()
        job = dict(appConfig={},
                   keepWaitingPipelines='false',
                   application=self.TEST_APP,
                   name='daemonset-delete-pipeline',
                   lastModifiedBy='anonymous',
                   limitConcurrent='true',
                   parallel='true',
                   stages=[delete_stage])
        payload = self.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in delete_stage.items()
            })
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(self.new_post_operation(
            title='create_delete daemonset',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
Exemple #19
0
    def submit_pipeline_contract(self, name, stages):
        s = self.scenario
        job = {
            'keepWaitingPipelines': 'false',
            'application': s.TEST_APP,
            'name': name,
            'lastModifiedBy': 'anonymous',
            'limitConcurrent': 'true',
            'parallel': 'true',
            'stages': stages,
        }
        payload = s.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES(
                {key: jp.EQUIVALENT(value)
                 for key, value in stage.items()}) for stage in stages
        ])

        builder = st.HttpContractBuilder(s.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=s.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(s.new_post_operation(
            title='save_pipeline_operation',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
  def run_deploy_upsert_load_balancer_pipeline(self):
    url_path = 'pipelines/{0}/{1}'.format(self.TEST_APP, self.pipeline_id)

    previous_group_name = frigga.Naming.server_group(
        app=self.TEST_APP,
        stack=self.TEST_STACK,
        version='v000')

    deployed_group_name = frigga.Naming.server_group(
        app=self.TEST_APP,
        stack=self.TEST_STACK,
        version='v001')

    payload = self.agent.make_json_payload_from_kwargs(
        type='manual',
        user='******')

    builder = gcp.GcpContractBuilder(self.appengine_observer)
    (builder.new_clause_builder('Service Modified', retryable_for_secs=60)
     .inspect_resource('apps.services',
                       self.__lb_name,
                       appsId=self.__gcp_project)
     .EXPECT(
         ov_factory.value_list_path_contains(
             jp.build_path('split', 'allocations'),
             jp.DICT_MATCHES({previous_group_name: jp.NUM_EQ(0.9),
                              deployed_group_name: jp.NUM_EQ(0.1)}))))

    return st.OperationContract(
        self.new_post_operation(
            title='run_deploy_upsert_load_balancer_pipeline',
            data=payload, path=url_path),
        builder.build())
    def upsert_google_server_group_tags(self):
        # pylint: disable=bad-continuation
        server_group_name = "katotest-server-group"
        payload = self.agent.type_to_payload(
            "upsertGoogleServerGroupTagsDescription",
            {
                "credentials": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
                "zone": self.bindings["TEST_GCE_ZONE"],
                "serverGroupName": "katotest-server-group",
                "tags": ["test-tag-1", "test-tag-2"],
            },
        )

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            "Server Group Tags Added").inspect_resource(
                "instanceGroupManagers", server_group_name).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            "name":
                            jp.STR_SUBSTR(server_group_name),
                            jp.build_path("tags", "items"):
                            jp.LIST_MATCHES(["test-tag-1", "test-tag-2"]),
                        }))))

        return st.OperationContract(
            self.new_post_operation(title="upsert_server_group_tags",
                                    data=payload,
                                    path="ops"),
            contract=builder.build(),
        )
    def upsert_google_server_group_tags(self):
        # pylint: disable=bad-continuation
        server_group_name = 'katotest-server-group'
        payload = self.agent.type_to_payload(
            'upsertGoogleServerGroupTagsDescription', {
                'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
                'zone': self.bindings['TEST_GCE_ZONE'],
                'serverGroupName': 'katotest-server-group',
                'tags': ['test-tag-1', 'test-tag-2']
            })

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            'Server Group Tags Added').inspect_resource(
                'instanceGroupManagers', server_group_name).EXPECT(
                    ov_factory.value_list_contains(
                        jp.DICT_MATCHES({
                            'name':
                            jp.STR_SUBSTR(server_group_name),
                            jp.build_path('tags', 'items'):
                            jp.LIST_MATCHES(['test-tag-1', 'test-tag-2'])
                        }))))

        return st.OperationContract(self.new_post_operation(
            title='upsert_server_group_tags', data=payload, path='ops'),
                                    contract=builder.build())
    def test_dict_match_simple_bad(self):
        context = ExecutionContext()
        source = {'n': 10}
        want = {'n': jp.NUM_NE(10)}
        result = jp.DICT_MATCHES(want)(context, source)

        expect = (jp.KeyedPredicateResultBuilder(
            jp.DICT_MATCHES(want)).add_result(
                'n',
                jp.PathPredicateResultBuilder(
                    source=source, pred=jp.NUM_NE(10)).add_result_candidate(
                        jp.PathValue('n', 10),
                        jp.NUM_NE(10)(context, 10)).build(False)).build(False))

        self.assertFalse(result)
        self.assertEquals(expect, result)
 def __deployment_image_predicate(self, image):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             'spec':
             jp.DICT_MATCHES({
                 'template':
                 jp.DICT_MATCHES({
                     'spec':
                     jp.DICT_MATCHES({
                         'containers':
                         jp.LIST_MATCHES(
                             [jp.DICT_MATCHES({'image': jp.STR_EQ(image)})])
                     })
                 })
             })
         }))
Exemple #25
0
    def delete_app(self):
        contract = jc.Contract()

        app_url_path = '/'.join(['/v2/applications', self.TEST_APP])
        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        (f50_builder.new_clause_builder('Unlists Application').get_url_path(
            '/v2/applications').EXPECT(
                ov_factory.value_list_path_excludes(
                    'name', jp.STR_SUBSTR(self.TEST_APP.upper()))))
        (f50_builder.new_clause_builder('Deletes Application').get_url_path(
            app_url_path).EXPECT(
                ov_factory.error_list_contains(
                    st.HttpAgentErrorPredicate(
                        st.HttpResponsePredicate(http_code=404)))))

        (f50_builder.new_clause_builder(
            'History Retains Application', retryable_for_secs=5).get_url_path(
                '/v2/applications/{app}/history'.format(
                    app=self.TEST_APP)).EXPECT(
                        ov_factory.value_list_matches([
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[0].items()
                            }),
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[1].items()
                            })
                        ])))

        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            'Deleted File', retryable_for_secs=5).list_bucket(
                self.BUCKET, '/'.join([self.BASE_PATH,
                                       'applications'])).EXPECT(
                                           ov_factory.value_list_path_excludes(
                                               'name',
                                               jp.STR_SUBSTR(self.TEST_APP))))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        return st.OperationContract(self.new_delete_operation(
            title='delete_app', data=None, path=app_url_path),
                                    contract=contract)
Exemple #26
0
    def create_find_image_pipeline(self):
        name = 'findImagePipeline'
        self.pipeline_id = name
        smoke_stage = self.make_smoke_stage()
        deploy_stage = self.make_deploy_stage(imageSource='FINDIMAGE',
                                              requisiteStages=['FINDIMAGE'])

        pipeline_spec = dict(name=name,
                             stages=[smoke_stage, deploy_stage],
                             triggers=[],
                             application=self.TEST_APP,
                             stageCounter=2,
                             parallel=True,
                             limitConcurrent=True,
                             executionEngine='v2',
                             appConfig={},
                             index=0)

        payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in pipeline_spec.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in smoke_stage.items()
            }),
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in deploy_stage.items()
            })
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=5).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))

        return st.OperationContract(self.new_post_operation(
            title='create_find_image_pipeline',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
 def deployment_configmap_mounted_predicate(self, configmap_name):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES(
             {
                 "spec": jp.DICT_MATCHES(
                     {
                         "template": jp.DICT_MATCHES(
                             {
                                 "spec": jp.DICT_MATCHES(
                                     {
                                         "volumes": jp.LIST_MATCHES(
                                             [
                                                 jp.DICT_MATCHES(
                                                     {
                                                         "configMap": jp.DICT_MATCHES(
                                                             {
                                                                 "name": jp.STR_SUBSTR(
                                                                     configmap_name
                                                                 )
                                                             }
                                                         )
                                                     }
                                                 )
                                             ]
                                         )
                                     }
                                 )
                             }
                         )
                     }
                 ),
                 "status": jp.DICT_MATCHES({"availableReplicas": jp.NUM_GE(1)}),
             }
         )
     )
Exemple #28
0
 def deployment_configmap_mounted_predicate(self, configmap_name):
     return ov_factory.value_list_contains(
         jp.DICT_MATCHES({
             'spec':
             jp.DICT_MATCHES({
                 'template':
                 jp.DICT_MATCHES({
                     'spec':
                     jp.DICT_MATCHES({
                         'volumes':
                         jp.LIST_MATCHES([
                             jp.DICT_MATCHES({
                                 'configMap':
                                 jp.DICT_MATCHES({
                                     'name':
                                     jp.STR_SUBSTR(configmap_name)
                                 })
                             })
                         ])
                     })
                 })
             }),
             'status':
             jp.DICT_MATCHES({'availableReplicas': jp.NUM_GE(1)})
         }))
    def test_dict_match_multi_ok(self):
        context = ExecutionContext()
        source = {'a': 'testing', 'n': 10}
        n_pred = jp.NUM_LE(20)
        a_pred = jp.STR_SUBSTR('test')
        want = {'n': n_pred, 'a': a_pred}
        result = jp.DICT_MATCHES(want)(context, source)

        expect = (jp.KeyedPredicateResultBuilder(
            jp.DICT_MATCHES(want)).add_result(
                'n',
                self._match_dict_attribute_result(
                    context, n_pred, 'n', source)).add_result(
                        'a',
                        self._match_dict_attribute_result(
                            context, a_pred, 'a', source)).build(True))

        self.assertTrue(result)
        self.assertEquals(expect, result)
  def clone_server_group(self):
    job = [{
      'application': self.TEST_APP,
      'stack': self.TEST_STACK,
      'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
      'loadBalancers': [self.__lb_name],
      'targetSize': 1,
      'capacity': {
        'min': 1,
        'max': 1,
        'desired': 1
      },
      'zone': self.TEST_ZONE,
      'network': 'default',
      'instanceMetadata': {'load-balancer-names': self.__lb_name},
      'availabilityZones': {self.TEST_REGION: [self.TEST_ZONE]},
      'cloudProvider': 'gce',
      'source': {
        'account': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
        'region': self.TEST_REGION,
        'zone': self.TEST_ZONE,
        'serverGroupName': self.__server_group_name,
        'asgName': self.__server_group_name
      },
      'instanceType': 'f1-micro',
      'image': self.bindings['TEST_GCE_IMAGE_NAME'],
      'initialNumReplicas': 1,
      'loadBalancers': [self.__lb_name],
      'type': 'cloneServerGroup',
      'account': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
      'user': '******'
    }]
    job[0].update(self.__mig_payload_extra)

    builder = gcp.GcpContractBuilder(self.gcp_observer)
    (builder.new_clause_builder(self.__mig_title + ' Cloned',
                                retryable_for_secs=90)
     .list_resource(self.__mig_manager_name, **self.__mig_manager_kwargs)
     .contains_path_value('baseInstanceName', self.__cloned_server_group_name))

    (builder.new_clause_builder('Instance template preserved', retryable_for_secs=150)
          .list_resource('instanceTemplates')
          .contains_path_pred('properties/metadata/items',
                              jp.DICT_MATCHES({
                                  'key': jp.EQUIVALENT(self.__custom_user_data_key),
                                  'value': jp.EQUIVALENT(self.__custom_user_data_value)})))

    payload = self.agent.make_json_payload_from_kwargs(
        job=job, description=self.__mig_title + ' Test - clone server group',
        application=self.TEST_APP)

    return st.OperationContract(
      self.new_post_operation(
          title='clone_server_group', data=payload, path=self.__path),
      contract=builder.build())