Beispiel #1
0
    def submit_pipeline_contract(self, name, stages):
        s = self.scenario
        job = {
            'keepWaitingPipelines': 'false',
            'application': s.TEST_APP,
            'name': name,
            'lastModifiedBy': 'anonymous',
            'limitConcurrent': 'true',
            'parallel': 'true',
            'stages': stages,
        }
        payload = s.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES(
                {key: jp.EQUIVALENT(value)
                 for key, value in stage.items()}) for stage in stages
        ])

        builder = st.HttpContractBuilder(s.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=s.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(s.new_post_operation(
            title='save_pipeline_operation',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
Beispiel #2
0
  def delete_app(self):
    contract = jc.Contract()

    app_url_path = '/'.join(['/v2/applications', self.TEST_APP])
    f50_builder = st.http_observer.HttpContractBuilder(self.agent)
    (f50_builder.new_clause_builder('Unlists Application')
     .get_url_path('/v2/applications')
     .excludes_path_value('name', self.TEST_APP.upper()))
    (f50_builder.new_clause_builder('Deletes Application')
     .get_url_path(app_url_path, allow_http_error_status=404))
    (f50_builder.new_clause_builder('History Retains Application',
                                    retryable_for_secs=5)
     .get_url_path('/v2/applications/{app}/history'
                   .format(app=self.TEST_APP))
     .contains_path_match(
          '[0]', {key: jp.EQUIVALENT(value)
                  for key, value in self.app_history[0].items()})
     .contains_path_match(
          '[1]', {key: jp.EQUIVALENT(value)
                  for key, value in self.app_history[1].items()}))
    for clause in f50_builder.build().clauses:
      contract.add_clause(clause)


    gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
    (gcs_builder.new_clause_builder('Deleted File', retryable_for_secs=5)
     .list_bucket(self.BUCKET, '/'.join([self.BASE_PATH, 'applications']))
     .excludes_path_value('name', self.TEST_APP))
    for clause in gcs_builder.build().clauses:
      contract.add_clause(clause)

    return st.OperationContract(
        self.new_delete_operation(
            title='delete_app', data=None, path=app_url_path),
        contract=contract)
Beispiel #3
0
    def save_delete_daemonset_pipeline(self):
        delete_stage = self.make_delete_stage_daemonset()
        job = dict(appConfig={},
                   keepWaitingPipelines='false',
                   application=self.TEST_APP,
                   name='daemonset-delete-pipeline',
                   lastModifiedBy='anonymous',
                   limitConcurrent='true',
                   parallel='true',
                   stages=[delete_stage])
        payload = self.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in delete_stage.items()
            })
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(self.new_post_operation(
            title='create_delete daemonset',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
Beispiel #4
0
    def delete_app(self):
        contract = jc.Contract()

        app_url_path = "/".join(["/v2/applications", self.TEST_APP])
        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        # The application should be unlisted immediately (assuming 1 replica)
        # However given GCS rate limiting on updating the timestamp file,
        # there is a race condition in which the filesystem timestamp
        # was rate limited from updating AND a scheduled update is in progress
        # where the application was seen just before the delete so gets restored
        # back. Because the timestamp was not updated, this observer will read
        # from the cache thinking it is fresh. We need the extra second to allow
        # for the retry on the timestamp update to write out to GCS.
        (f50_builder.new_clause_builder(
            "Unlists Application",
            retryable_for_secs=8).get_url_path("/v2/applications").EXPECT(
                ov_factory.value_list_path_excludes(
                    "name", jp.STR_SUBSTR(self.TEST_APP.upper()))))
        (f50_builder.new_clause_builder("Deletes Application").get_url_path(
            app_url_path).EXPECT(
                ov_factory.error_list_contains(
                    st.HttpAgentErrorPredicate(
                        st.HttpResponsePredicate(http_code=404)))))

        (f50_builder.new_clause_builder(
            "History Retains Application", retryable_for_secs=5).get_url_path(
                "/v2/applications/{app}/history".format(
                    app=self.TEST_APP)).EXPECT(
                        ov_factory.value_list_matches([
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[0].items()
                            }),
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[1].items()
                            }),
                        ])))

        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            "Deleted File", retryable_for_secs=5).list_bucket(
                self.BUCKET, "/".join([self.BASE_PATH,
                                       "applications"])).EXPECT(
                                           ov_factory.value_list_path_excludes(
                                               "name",
                                               jp.STR_SUBSTR(self.TEST_APP))))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        return st.OperationContract(
            self.new_delete_operation(title="delete_app",
                                      data=None,
                                      path=app_url_path),
            contract=contract,
        )
Beispiel #5
0
    def create_app(self):
        payload = self.agent.make_json_payload_from_object(
            self.initial_app_spec)
        expect = dict(self.initial_app_spec)
        expect['name'] = self.initial_app_spec['name'].upper()
        expect['lastModifiedBy'] = 'anonymous'

        contract = jc.Contract()

        # Note that curiosly the updated timestamp is not adjusted in the storage
        # file.
        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            'Created Google Cloud Storage File',
            retryable_for_secs=5).list_bucket(
                self.BUCKET,
                '/'.join([self.BASE_PATH, 'applications'
                          ])).contains_path_value('name', self.TEST_APP))
        (gcs_builder.new_clause_builder('Wrote File Content').retrieve_content(
            self.BUCKET,
            '/'.join([
                self.BASE_PATH, 'applications', self.TEST_APP,
                'specification.json'
            ]),
            transform=json.JSONDecoder().decode).contains_match(
                {key: jp.EQUIVALENT(value)
                 for key, value in expect.items()}))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        # The update timestamp is determined by the server,
        # and we dont know what that is, so lets ignore it
        # and assume the unit tests verify it is properly updated.
        expect = dict(expect)
        del expect['updateTs']
        self.app_history.insert(0, expect)
        f50_builder = st.http_observer.HttpContractBuilder(self.agent)

        # These clauses are querying the Front50 http server directly
        # to verify that it returns the application we added.
        # We already verified the data was stored on GCS, but while we
        # are here we will verify that it is also being returned when queried.
        (f50_builder.new_clause_builder('Lists Application').get_url_path(
            '/default/applications').contains_path_value(
                'name', self.TEST_APP.upper()))
        (f50_builder.new_clause_builder('Returns Application').get_url_path(
            '/'.join(['/default/applications/name',
                      self.TEST_APP])).contains_match({
                          key: jp.EQUIVALENT(value)
                          for key, value in self.app_history[0].items()
                      }))
        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        path = '/'.join(['/default/applications/name', self.TEST_APP])
        return st.OperationContract(self.new_post_operation(title='create_app',
                                                            data=payload,
                                                            path=path),
                                    contract=contract)
Beispiel #6
0
    def create_pipeline(self):
        payload = self.agent.make_json_payload_from_object(
            self.initial_pipeline_spec)
        expect = dict(self.initial_pipeline_spec)
        expect['lastModifiedBy'] = 'anonymous'
        self.pipeline_history.insert(0, expect)

        contract = jc.Contract()

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            'Created Google Cloud Storage File',
            retryable_for_secs=5).list_bucket(
                self.BUCKET,
                '/'.join([self.BASE_PATH, 'pipelines'])).contains_path_value(
                    'name', 'pipelines/{id}/specification.json'.format(
                        id=self.TEST_PIPELINE_ID)))
        (gcs_builder.new_clause_builder('Wrote File Content').retrieve_content(
            self.BUCKET,
            '/'.join([
                self.BASE_PATH, 'pipelines', self.TEST_PIPELINE_ID,
                'specification.json'
            ]),
            transform=json.JSONDecoder().decode).contains_match(
                {key: jp.EQUIVALENT(value)
                 for key, value in expect.items()}))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        f50_builder = st.http_observer.HttpContractBuilder(self.agent)

        # These clauses are querying the Front50 http server directly
        # to verify that it returns the application we added.
        # We already verified the data was stored on GCS, but while we
        # are here we will verify that it is also being returned when queried.
        (f50_builder.new_clause_builder('Global Lists Pipeline').get_url_path(
            '/pipelines').contains_path_value('name', self.TEST_PIPELINE_NAME))
        (f50_builder.new_clause_builder('Application Lists Pipeline').
         get_url_path('/pipelines/{app}'.format(
             app=self.TEST_APP)).contains_path_value('name',
                                                     self.TEST_PIPELINE_NAME))
        (f50_builder.new_clause_builder('Returns Pipeline').get_url_path(
            '/pipelines/{id}/history'.format(
                id=self.TEST_PIPELINE_ID)).contains_path_match(
                    '[0]', {
                        key: jp.EQUIVALENT(value)
                        for key, value in self.pipeline_history[0].items()
                    }))
        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        path = '/pipelines'
        return st.OperationContract(self.new_post_operation(
            title='create_pipeline', data=payload, path=path),
                                    contract=contract)
  def clone_server_group(self):
    job = [{
      'application': self.TEST_APP,
      'stack': self.TEST_STACK,
      'credentials': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
      'loadBalancers': [self.__lb_name],
      'targetSize': 1,
      'capacity': {
        'min': 1,
        'max': 1,
        'desired': 1
      },
      'zone': self.TEST_ZONE,
      'network': 'default',
      'instanceMetadata': {'load-balancer-names': self.__lb_name},
      'availabilityZones': {self.TEST_REGION: [self.TEST_ZONE]},
      'cloudProvider': 'gce',
      'source': {
        'account': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
        'region': self.TEST_REGION,
        'zone': self.TEST_ZONE,
        'serverGroupName': self.__server_group_name,
        'asgName': self.__server_group_name
      },
      'instanceType': 'f1-micro',
      'image': self.bindings['TEST_GCE_IMAGE_NAME'],
      'initialNumReplicas': 1,
      'loadBalancers': [self.__lb_name],
      'type': 'cloneServerGroup',
      'account': self.bindings['SPINNAKER_GOOGLE_ACCOUNT'],
      'user': '******'
    }]
    job[0].update(self.__mig_payload_extra)

    builder = gcp.GcpContractBuilder(self.gcp_observer)
    (builder.new_clause_builder(self.__mig_title + ' Cloned',
                                retryable_for_secs=90)
     .list_resource(self.__mig_manager_name, **self.__mig_manager_kwargs)
     .contains_path_value('baseInstanceName', self.__cloned_server_group_name))

    (builder.new_clause_builder('Instance template preserved', retryable_for_secs=150)
          .list_resource('instanceTemplates')
          .contains_path_pred('properties/metadata/items',
                              jp.DICT_MATCHES({
                                  'key': jp.EQUIVALENT(self.__custom_user_data_key),
                                  'value': jp.EQUIVALENT(self.__custom_user_data_value)})))

    payload = self.agent.make_json_payload_from_kwargs(
        job=job, description=self.__mig_title + ' Test - clone server group',
        application=self.TEST_APP)

    return st.OperationContract(
      self.new_post_operation(
          title='clone_server_group', data=payload, path=self.__path),
      contract=builder.build())
  def old_delete_app(self):
    """Creates OperationContract that deletes a new Spinnaker Application and validates its deletion in Azure Storage.
    """

    payload = self.agent.make_json_payload_from_kwargs(
            job=[{
                'type': 'deleteApplication',
                'account': self.bindings['SPINNAKER_AZURE_ACCOUNT'],
                'application': {
                    'name': self.bindings['TEST_APP']
                },
                'user': '******'
            }],
            description= 'Test - delete application {app}'.format(app=self.bindings['TEST_APP']),
            application=self.bindings['TEST_APP'])

    builder = az.AzContractBuilder(self.az_observer)
    (builder.new_clause_builder(
        'Application Created', retryable_for_secs=30)
      .collect_resources(
          az_resource='storage',
          command='blob',
          args=['exists', '--container-name', 'front50',
          '--name', 'applications/'+self.bindings['TEST_APP']+'/application-metadata.json',
          '--account-name', self.bindings['azure_storage_account_name'],
          '--account-key', self.bindings['spinnaker_azure_storage_account_key']])
      .EXPECT(ov_factory.value_list_path_contains(
          'exists', jp.EQUIVALENT(False))))

    return st.OperationContract(
        self.new_post_operation(
            title='delete_app', data=payload,
            path='tasks'),
        contract=builder.build())
Beispiel #9
0
    def delete_app(self):
        contract = jc.Contract()

        app_url_path = '/'.join(['/v2/applications', self.TEST_APP])
        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        (f50_builder.new_clause_builder('Unlists Application').get_url_path(
            '/v2/applications').EXPECT(
                ov_factory.value_list_path_excludes(
                    'name', jp.STR_SUBSTR(self.TEST_APP.upper()))))
        (f50_builder.new_clause_builder('Deletes Application').get_url_path(
            app_url_path).EXPECT(
                ov_factory.error_list_contains(
                    st.HttpAgentErrorPredicate(
                        st.HttpResponsePredicate(http_code=404)))))

        (f50_builder.new_clause_builder(
            'History Retains Application', retryable_for_secs=5).get_url_path(
                '/v2/applications/{app}/history'.format(
                    app=self.TEST_APP)).EXPECT(
                        ov_factory.value_list_matches([
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[0].items()
                            }),
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[1].items()
                            })
                        ])))

        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            'Deleted File', retryable_for_secs=5).list_bucket(
                self.BUCKET, '/'.join([self.BASE_PATH,
                                       'applications'])).EXPECT(
                                           ov_factory.value_list_path_excludes(
                                               'name',
                                               jp.STR_SUBSTR(self.TEST_APP))))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        return st.OperationContract(self.new_delete_operation(
            title='delete_app', data=None, path=app_url_path),
                                    contract=contract)
Beispiel #10
0
    def create_find_image_pipeline(self):
        name = 'findImagePipeline'
        self.pipeline_id = name
        smoke_stage = self.make_smoke_stage()
        deploy_stage = self.make_deploy_stage(imageSource='FINDIMAGE',
                                              requisiteStages=['FINDIMAGE'])

        pipeline_spec = dict(name=name,
                             stages=[smoke_stage, deploy_stage],
                             triggers=[],
                             application=self.TEST_APP,
                             stageCounter=2,
                             parallel=True,
                             limitConcurrent=True,
                             executionEngine='v2',
                             appConfig={},
                             index=0)

        payload = self.agent.make_json_payload_from_kwargs(**pipeline_spec)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in pipeline_spec.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in smoke_stage.items()
            }),
            jp.DICT_MATCHES({
                key: jp.EQUIVALENT(value)
                for key, value in deploy_stage.items()
            })
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=5).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))

        return st.OperationContract(self.new_post_operation(
            title='create_find_image_pipeline',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
Beispiel #11
0
  def test_b_put_dict(self):
    """Example writes a dict value then checks for parts of it."""
    key = self.make_key('MyDictKey')
    expect_value = {'a': 'A', 'b': 'B', 'c': 'C'}

    operation = http_agent.HttpPostOperation(
        title='Writing Key Value',
        data=json.JSONEncoder().encode(expect_value),
        path='/put/' + key)
    builder = st.HttpContractBuilder(self.scenario.agent)
    (builder.new_clause_builder('Check Key Value')
     .get_url_path('/lookup/' + key)
     .contains_match({'a': jp.EQUIVALENT('A'),
                      'b': jp.EQUIVALENT('B')}))
    contract = builder.build()

    test = st.OperationContract(operation, contract)
    self.run_test_case(test)
Beispiel #12
0
    def save_deploy_manifest_pipeline(self, image):
        name = self.TEST_APP + '-deployment'
        stage = {
            'type': 'deployManifest',
            'cloudProvider': 'kubernetes',
            'moniker': {
                'app': self.TEST_APP
            },
            'account': self.bindings['SPINNAKER_KUBERNETES_V2_ACCOUNT'],
            'source': 'text',
            'manifests': [self.mf.deployment(name, image)],
        }
        job = {
            'keepWaitingPipelines': 'false',
            'application': self.TEST_APP,
            'name': 'deploy-manifest-pipeline',
            'lastModifiedBy': 'anonymous',
            'limitConcurrent': 'true',
            'parallel': 'true',
            'stages': [stage]
        }
        payload = self.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match['stages'] = jp.LIST_MATCHES([
            jp.DICT_MATCHES(
                {key: jp.EQUIVALENT(value)
                 for key, value in stage.items()})
        ])

        builder = st.HttpContractBuilder(self.agent)
        (builder.new_clause_builder(
            'Has Pipeline', retryable_for_secs=15).get_url_path(
                'applications/{app}/pipelineConfigs'.format(
                    app=self.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(self.new_post_operation(
            title='save_deploy_manifest_operation',
            data=payload,
            path='pipelines',
            status_class=st.SynchronousHttpOperationStatus),
                                    contract=builder.build())
Beispiel #13
0
 def test_list_equivalent(self):
   context = ExecutionContext()
   source = [{'a':'A', 'b':'B'}, {'one':1, 'two':2}]
   pred = jp.EQUIVALENT([source[1], source[0]])
   result = pred(context, source)
   self.assertEqual(
       jp.PathValueResult(valid=True, source=source, target_path='',
                          path_value=PathValue('', source),
                          pred=jp.LIST_SIMILAR(pred.operand)),
       result)
Beispiel #14
0
  def test_c_put_array(self):
    """Example writes an array value then shows many ways to check values."""
    key = self.make_key('MyArrayKey')
    expect_value = [{'a': 1, 'b': 1}, 2, {'a': 3, 'b': 3}]

    operation = http_agent.HttpPostOperation(
        title='Writing Key Value',
        data=json.JSONEncoder().encode(expect_value),
        path='/put/' + key)
    # Examples of different ways to express things
    builder = st.HttpContractBuilder(self.scenario.agent)
    (builder.new_clause_builder('Contains a=1 and contains b=3')
     .get_url_path('/lookup/' + key)
     .contains_path_value('a', 1)
     .contains_path_value('b', 3))
    (builder.new_clause_builder('Contains (a=1 and b=1))')
     .get_url_path('/lookup/' + key)
     .contains_pred_list([jp.PathPredicate('a', jp.NUM_EQ(1)),
                          jp.PathPredicate('b', jp.NUM_EQ(1))]))
    (builder.new_clause_builder('Does not contain (a=1 and b=3)')
     .get_url_path('/lookup/' + key)
     .excludes_pred_list([jp.PathPredicate('a', jp.NUM_EQ(1)),
                          jp.PathPredicate('b', jp.NUM_EQ(3))]))
    (builder.new_clause_builder('Contains List')
     .get_url_path('/lookup/' + key)
     .contains_match([jp.EQUIVALENT(2),
                      jp.DICT_MATCHES({'a': jp.EQUIVALENT(3),
                                       'b': jp.DIFFERENT(1)})]))
    (builder.new_clause_builder("Contains Multiple A's >= 0")
     .get_url_path('/lookup/' + key)
     .contains_path_pred('a', jp.NUM_GE(0), min=2))
    (builder.new_clause_builder("Contains only 1 A >= 2")
     .get_url_path('/lookup/' + key)
     .contains_path_pred('a', jp.NUM_GE(2), min=1, max=1))
    (builder.new_clause_builder("Contains no A >= 10")
     .get_url_path('/lookup/' + key)
     .excludes_path_pred('a', jp.NUM_GE(10)))

    contract = builder.build()

    test = st.OperationContract(operation, contract)
    self.run_test_case(test)
Beispiel #15
0
    def submit_pipeline_contract(self,
                                 name,
                                 stages,
                                 expectedArtifacts=[],
                                 user="******"):
        s = self.scenario
        job = {
            "keepWaitingPipelines": "false",
            "application": s.TEST_APP,
            "name": name,
            "lastModifiedBy": user,
            "limitConcurrent": "true",
            "parallel": "true",
            "stages": stages,
            "expectedArtifacts": expectedArtifacts,
        }
        payload = s.agent.make_json_payload_from_kwargs(**job)
        expect_match = {
            key: jp.EQUIVALENT(value)
            for key, value in job.items()
        }
        expect_match["stages"] = jp.LIST_MATCHES([
            jp.DICT_MATCHES(
                {key: jp.EQUIVALENT(value)
                 for key, value in stage.items()}) for stage in stages
        ])

        builder = st.HttpContractBuilder(s.agent)
        (builder.new_clause_builder(
            "Has Pipeline", retryable_for_secs=15).get_url_path(
                "applications/{app}/pipelineConfigs".format(
                    app=s.TEST_APP)).contains_match(expect_match))
        return st.OperationContract(
            s.new_post_operation(
                title="save_pipeline_operation",
                data=payload,
                path="pipelines",
                status_class=st.SynchronousHttpOperationStatus,
            ),
            contract=builder.build(),
        )
Beispiel #16
0
  def test_list_equivalent_indirect(self):
    context = ExecutionContext(testB='B', second={'one':1, 'two':2})
    source = [{'a':'A', 'b': lambda x: x['testB']}, lambda x: x['second']]
    actual_source = [{'a':'A', 'b':'B'}, {'one':1, 'two':2}]

    pred = jp.EQUIVALENT(source)
    result = pred(context, actual_source)
    self.assertEqual(
        jp.PathValueResult(valid=True, source=actual_source, target_path='',
                           path_value=PathValue('', actual_source),
                           pred=jp.LIST_SIMILAR(actual_source)),
        result)
Beispiel #17
0
    def list_available_images(self):
        """Creates a test that confirms expected available images.

    Returns:
      st.OperationContract
    """
        logger = logging.getLogger(__name__)

        # Get the list of images available (to the service account we are using).
        context = citest.base.ExecutionContext()
        gcp_agent = self.gcp_observer
        JournalLogger.begin_context('Collecting expected available images')
        relation_context = 'ERROR'
        try:
            logger.debug('Looking up available images.')

            json_doc = gcp_agent.list_resource(context, 'images')
            for project in GCP_STANDARD_IMAGES.keys():
                logger.info('Looking for images from project=%s', project)
                found = gcp_agent.list_resource(context,
                                                'images',
                                                project=project)
                for image in found:
                    if not image.get('deprecated', None):
                        json_doc.append(image)

            # Produce the list of images that we expect to receive from spinnaker
            # (visible to the primary service account).
            spinnaker_account = self.agent.deployed_config.get(
                'providers.google.primaryCredentials.name')

            logger.debug('Configured with Spinnaker account "%s"',
                         spinnaker_account)
            expect_images = [{
                'account': spinnaker_account,
                'imageName': image['name']
            } for image in json_doc]
            expect_images = sorted(expect_images, key=lambda k: k['imageName'])
            relation_context = 'VALID'
        finally:
            JournalLogger.end_context(relation=relation_context)

        # pylint: disable=bad-continuation
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder('Has Expected Images').get_url_path(
            '/gce/images/find').add_constraint(
                jp.PathPredicate(jp.DONT_ENUMERATE_TERMINAL,
                                 jp.EQUIVALENT(expect_images))))

        return st.OperationContract(NoOpOperation('List Available Images'),
                                    contract=builder.build())
Beispiel #18
0
    def delete_pipeline(self):
        contract = jc.Contract()

        app_url_path = "pipelines/{app}/{pipeline}".format(
            app=self.TEST_APP, pipeline=UrlQuote(self.TEST_PIPELINE_NAME))

        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        (f50_builder.new_clause_builder("Global Unlists Pipeline",
                                        retryable_for_secs=5).get_url_path(
                                            "/pipelines").excludes_path_value(
                                                "name",
                                                self.TEST_PIPELINE_NAME))
        (f50_builder.new_clause_builder(
            "Application Unlists Pipeline",
            retryable_for_secs=5).get_url_path("/pipelines/{app}".format(
                app=self.TEST_APP)).excludes_path_value(
                    "id", self.TEST_PIPELINE_ID))

        (f50_builder.new_clause_builder(
            "History Retains Pipeline", retryable_for_secs=5).get_url_path(
                "/pipelines/{id}/history".format(
                    id=self.TEST_PIPELINE_ID)).contains_path_match(
                        "[0]",
                        {
                            key: jp.EQUIVALENT(value)
                            for key, value in self.pipeline_history[0].items()
                        },
                    ))
        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            "Deleted File", retryable_for_secs=5).list_bucket(
                self.BUCKET,
                "/".join([self.BASE_PATH, "pipelines"
                          ])).excludes_path_value("name",
                                                  self.TEST_PIPELINE_ID))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        return st.OperationContract(
            self.new_delete_operation(title="delete_pipeline",
                                      data=None,
                                      path=app_url_path),
            contract=contract,
        )
Beispiel #19
0
    def old_create_app(self):
        """Creates OperationContract that creates a new Spinnaker Application and validate its creation in Azure Storage."""
        email = self.bindings.get("TEST_EMAIL", "*****@*****.**")
        payload = self.agent.make_json_payload_from_kwargs(
            job=[{
                "type": "createApplication",
                "account": self.bindings["SPINNAKER_AZURE_ACCOUNT"],
                "application": {
                    "name": self.bindings["TEST_APP"],
                    "description": "Gate Testing Application for Azure",
                    "email": email,
                },
                "user": "******",
            }],
            description="Test - create application {app}".format(
                app=self.bindings["TEST_APP"]),
            application=self.bindings["TEST_APP"],
        )

        builder = az.AzContractBuilder(self.az_observer)
        (builder.new_clause_builder(
            "Application Created", retryable_for_secs=30).collect_resources(
                az_resource="storage",
                command="blob",
                args=[
                    "exists",
                    "--container-name",
                    "front50",
                    "--name",
                    "applications/" + self.bindings["TEST_APP"] +
                    "/application-metadata.json",
                    "--account-name",
                    self.bindings["azure_storage_account_name"],
                    "--account-key",
                    self.bindings["spinnaker_azure_storage_account_key"],
                ],
            ).EXPECT(
                ov_factory.value_list_path_contains("exists",
                                                    jp.EQUIVALENT(True))))

        return st.OperationContract(
            self.new_post_operation(title="create_app",
                                    data=payload,
                                    path="tasks"),
            contract=builder.build(),
        )
Beispiel #20
0
    def list_available_images(self):
        """Creates a test that confirms expected available images.

    Returns:
      st.OperationContract
    """
        logger = logging.getLogger(__name__)

        # Get the list of images available (to the service account we are using).
        gcloud_agent = self.gce_observer
        service_account = self.bindings.get('GCE_SERVICE_ACCOUNT', None)
        extra_args = ['--account', service_account] if service_account else []
        logger.debug('Looking up available images.')
        cli_result = gcloud_agent.list_resources('images',
                                                 extra_args=extra_args)

        if not cli_result.ok():
            raise RuntimeError('GCloud failed with: {0}'.format(
                str(cli_result)))
        json_doc = json_module.JSONDecoder().decode(cli_result.output)

        # Produce the list of images that we expect to receive from spinnaker
        # (visible to the primary service account).
        spinnaker_account = self.agent.deployed_config.get(
            'providers.google.primaryCredentials.name')

        logger.debug('Configured with Spinnaker account "%s"',
                     spinnaker_account)
        expect_images = [{
            'account': spinnaker_account,
            'imageName': image['name']
        } for image in json_doc]
        expect_images = sorted(expect_images, key=lambda k: k['imageName'])

        # pylint: disable=bad-continuation
        builder = HttpContractBuilder(self.agent)
        (builder.new_clause_builder('Has Expected Images').get_url_path(
            '/gce/images/find').add_constraint(
                jp.PathPredicate(jp.DONT_ENUMERATE_TERMINAL,
                                 jp.EQUIVALENT(expect_images))))

        return st.OperationContract(NoOpOperation('List Available Images'),
                                    contract=builder.build())
Beispiel #21
0
    def delete_pipeline(self):
        contract = jc.Contract()

        app_url_path = 'pipelines/{app}/{pipeline}'.format(
            app=self.TEST_APP, pipeline=urllib.quote(self.TEST_PIPELINE_NAME))

        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        (f50_builder.new_clause_builder('Global Unlists Pipeline',
                                        retryable_for_secs=5).get_url_path(
                                            '/pipelines').excludes_path_value(
                                                'name',
                                                self.TEST_PIPELINE_NAME))
        (f50_builder.new_clause_builder(
            'Application Unlists Pipeline',
            retryable_for_secs=5).get_url_path('/pipelines/{app}'.format(
                app=self.TEST_APP)).excludes_path_value(
                    'id', self.TEST_PIPELINE_ID))

        (f50_builder.new_clause_builder(
            'History Retains Pipeline', retryable_for_secs=5).get_url_path(
                '/pipelines/{id}/history'.format(
                    id=self.TEST_PIPELINE_ID)).contains_path_match(
                        '[0]', {
                            key: jp.EQUIVALENT(value)
                            for key, value in self.pipeline_history[0].items()
                        }))
        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            'Deleted File', retryable_for_secs=5).list_bucket(
                self.BUCKET,
                '/'.join([self.BASE_PATH, 'pipelines'
                          ])).excludes_path_value('name',
                                                  self.TEST_PIPELINE_ID))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        return st.OperationContract(self.new_delete_operation(
            title='delete_pipeline', data=None, path=app_url_path),
                                    contract=contract)
Beispiel #22
0
    def upsert_load_balancer(self):
        """Creates OperationContract for upsertLoadBalancer.

        Calls Spinnaker's upsertLoadBalancer with a configuration, then verifies
        that the expected resources and configurations are visible on AWS. See
        the contract builder for more info on what the expectations are.
        """
        detail_raw_name = "katotestlb" + self.test_id
        self.__use_lb_name = detail_raw_name

        bindings = self.bindings
        region = bindings["TEST_AWS_REGION"]
        avail_zones = [region + "a", region + "b"]

        listener = {"Listener": {"InstancePort": 7001, "LoadBalancerPort": 80}}
        health_check = {
            "HealthyThreshold": 8,
            "UnhealthyThreshold": 3,
            "Interval": 123,
            "Timeout": 12,
            "Target":
            "HTTP:%d/healthcheck" % listener["Listener"]["InstancePort"],
        }

        payload = self.agent.type_to_payload(
            "upsertAmazonLoadBalancerDescription",
            {
                "credentials":
                bindings["SPINNAKER_AWS_ACCOUNT"],
                "clusterName":
                bindings["TEST_APP"],
                "name":
                detail_raw_name,
                "availabilityZones": {
                    region: avail_zones
                },
                "listeners":
                [{
                    "internalProtocol": "HTTP",
                    "internalPort": listener["Listener"]["InstancePort"],
                    "externalProtocol": "HTTP",
                    "externalPort": listener["Listener"]["LoadBalancerPort"],
                }],
                "healthCheck":
                health_check["Target"],
                "healthTimeout":
                health_check["Timeout"],
                "healthInterval":
                health_check["Interval"],
                "healthyThreshold":
                health_check["HealthyThreshold"],
                "unhealthyThreshold":
                health_check["UnhealthyThreshold"],
            },
        )

        builder = aws.AwsPythonContractBuilder(self.aws_observer)
        (builder.new_clause_builder(
            "Load Balancer Added", retryable_for_secs=30).call_method(
                self.elb_client.describe_load_balancers,
                LoadBalancerNames=[self.__use_lb_name],
            ).EXPECT(
                ov_factory.value_list_path_contains(
                    "LoadBalancerDescriptions",
                    jp.LIST_MATCHES([
                        jp.DICT_MATCHES({
                            "HealthCheck":
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in health_check.items()
                            }),
                            "AvailabilityZones":
                            jp.LIST_MATCHES(
                                [jp.STR_SUBSTR(zone) for zone in avail_zones]),
                            "ListenerDescriptions/Listener":
                            jp.DICT_MATCHES({
                                key: jp.EQUIVALENT(value)
                                for key, value in listener["Listener"].items()
                            }),
                        })
                    ]),
                )))

        return st.OperationContract(
            self.new_post_operation(title="upsert_amazon_load_balancer",
                                    data=payload,
                                    path="ops"),
            contract=builder.build(),
        )
Beispiel #23
0
    def upsert_load_balancer(self):
        """Creates OperationContract for upsertLoadBalancer.

    Calls Spinnaker's upsertLoadBalancer with a configuration, then verifies
    that the expected resources and configurations are visible on AWS. See
    the contract builder for more info on what the expectations are.
    """
        detail_raw_name = 'katotestlb' + self.test_id
        self.__use_lb_name = detail_raw_name

        bindings = self.bindings
        region = bindings['TEST_AWS_REGION']
        avail_zones = [region + 'a', region + 'b']

        listener = {'Listener': {'InstancePort': 7001, 'LoadBalancerPort': 80}}
        health_check = {
            'HealthyThreshold': 8,
            'UnhealthyThreshold': 3,
            'Interval': 123,
            'Timeout': 12,
            'Target':
            'HTTP:%d/healthcheck' % listener['Listener']['InstancePort']
        }

        payload = self.agent.type_to_payload(
            'upsertAmazonLoadBalancerDescription', {
                'credentials':
                bindings['SPINNAKER_AWS_ACCOUNT'],
                'clusterName':
                bindings['TEST_APP'],
                'name':
                detail_raw_name,
                'availabilityZones': {
                    region: avail_zones
                },
                'listeners':
                [{
                    'internalProtocol': 'HTTP',
                    'internalPort': listener['Listener']['InstancePort'],
                    'externalProtocol': 'HTTP',
                    'externalPort': listener['Listener']['LoadBalancerPort']
                }],
                'healthCheck':
                health_check['Target'],
                'healthTimeout':
                health_check['Timeout'],
                'healthInterval':
                health_check['Interval'],
                'healthyThreshold':
                health_check['HealthyThreshold'],
                'unhealthyThreshold':
                health_check['UnhealthyThreshold']
            })

        builder = aws.AwsPythonContractBuilder(self.aws_observer)
        (builder.new_clause_builder(
            'Load Balancer Added', retryable_for_secs=30).call_method(
                self.elb_client.describe_load_balancers,
                LoadBalancerNames=[self.__use_lb_name]).EXPECT(
                    ov_factory.value_list_path_contains(
                        'LoadBalancerDescriptions',
                        jp.LIST_MATCHES([
                            jp.DICT_MATCHES({
                                'HealthCheck':
                                jp.DICT_MATCHES({
                                    key: jp.EQUIVALENT(value)
                                    for key, value in health_check.items()
                                }),
                                'AvailabilityZones':
                                jp.LIST_MATCHES([
                                    jp.STR_SUBSTR(zone) for zone in avail_zones
                                ]),
                                'ListenerDescriptions/Listener':
                                jp.DICT_MATCHES({
                                    key: jp.EQUIVALENT(value)
                                    for key, value in
                                    listener['Listener'].items()
                                })
                            })
                        ]))))

        return st.OperationContract(self.new_post_operation(
            title='upsert_amazon_load_balancer', data=payload, path='ops'),
                                    contract=builder.build())
Beispiel #24
0
    def clone_server_group(self):
        job = [{
            "application": self.TEST_APP,
            "stack": self.TEST_STACK,
            "credentials": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
            "loadBalancers": [self.__lb_name],
            "targetSize": 1,
            "capacity": {
                "min": 1,
                "max": 1,
                "desired": 1
            },
            "zone": self.TEST_ZONE,
            "network": "default",
            "instanceMetadata": {
                "load-balancer-names": self.__lb_name
            },
            "availabilityZones": {
                self.TEST_REGION: [self.TEST_ZONE]
            },
            "cloudProvider": "gce",
            "source": {
                "account": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
                "region": self.TEST_REGION,
                "zone": self.TEST_ZONE,
                "serverGroupName": self.__server_group_name,
                "asgName": self.__server_group_name,
            },
            "instanceType": "f1-micro",
            "image": self.bindings["TEST_GCE_IMAGE_NAME"],
            "initialNumReplicas": 1,
            "loadBalancers": [self.__lb_name],
            "type": "cloneServerGroup",
            "account": self.bindings["SPINNAKER_GOOGLE_ACCOUNT"],
            "user": "******",
        }]
        job[0].update(self.__mig_payload_extra)

        builder = gcp.GcpContractBuilder(self.gcp_observer)
        (builder.new_clause_builder(
            self.__mig_title + " Cloned", retryable_for_secs=90).list_resource(
                self.__mig_manager_name, **self.__mig_manager_kwargs).EXPECT(
                    ov_factory.value_list_path_contains(
                        "baseInstanceName",
                        jp.STR_SUBSTR(self.__cloned_server_group_name))))
        (builder.new_clause_builder(
            "Instance template preserved",
            retryable_for_secs=150).list_resource("instanceTemplates").EXPECT(
                ov_factory.value_list_path_contains(
                    "properties/metadata/items",
                    jp.LIST_MATCHES([
                        jp.DICT_MATCHES({
                            "key":
                            jp.EQUIVALENT(self.__custom_user_data_key),
                            "value":
                            jp.EQUIVALENT(self.__custom_user_data_value),
                        })
                    ]),
                )))

        payload = self.agent.make_json_payload_from_kwargs(
            job=job,
            description=self.__mig_title + " Test - clone server group",
            application=self.TEST_APP,
        )

        return st.OperationContract(
            self.new_post_operation(title="clone_server_group",
                                    data=payload,
                                    path=self.__path),
            contract=builder.build(),
        )
Beispiel #25
0
    def update_app(self):
        contract = jc.Contract()

        spec = {}
        for name, value in self.initial_app_spec.items():
            if name == 'name':
                spec[name] = value
            elif name == 'cloudProviders':
                spec[name] = value + ',kubernetes'
            elif name in ['updateTs', 'createTs']:
                spec[name] = str(int(value) + 1)
            elif isinstance(value, basestring):
                spec[name] = 'NEW_' + value
        payload = self.agent.make_json_payload_from_object(spec)
        expectUpdate = dict(spec)

        # The actual update is determined by front50.
        # The createTs we gave ignored.
        # As before, the name is upper-cased.
        del expectUpdate['updateTs']
        expectUpdate['createTs'] = self.initial_app_spec['createTs']
        expectUpdate['name'] = self.initial_app_spec['name'].upper()
        self.app_history.insert(0, expectUpdate)

        # TODO(ewiseblatt) 20160524:
        # Add clauses that observe Front50 to verify the history method works
        # and that the get method is the current version.
        num_versions = 2 if self.versioning_enabled else 1
        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            'Google Cloud Storage Contains File',
            retryable_for_secs=5).list_bucket(
                self.BUCKET,
                '/'.join([self.BASE_PATH, 'applications', self.TEST_APP]),
                with_versions=True).contains_path_value('name',
                                                        self.TEST_APP,
                                                        min=num_versions,
                                                        max=num_versions))
        (gcs_builder.new_clause_builder(
            'Updated File Content', retryable_for_secs=5).retrieve_content(
                self.BUCKET,
                '/'.join([
                    self.BASE_PATH, 'applications', self.TEST_APP,
                    'specification.json'
                ]),
                transform=json.JSONDecoder().decode).contains_match({
                    key: jp.EQUIVALENT(value)
                    for key, value in expectUpdate.items()
                }))

        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        f50_builder = st.http_observer.HttpContractBuilder(self.agent)
        (f50_builder.new_clause_builder(
            'History Records Changes').get_url_path(
                '/v2/applications/{app}/history'.format(
                    app=self.TEST_APP)).contains_path_match(
                        '[0]', {
                            key: jp.EQUIVALENT(value)
                            for key, value in self.app_history[0].items()
                        }).contains_path_match(
                            '[1]', {
                                key: jp.EQUIVALENT(value)
                                for key, value in self.app_history[1].items()
                            }))

        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        # TODO(ewiseblatt): 20160524
        # Add a mechanism here to check the previous version
        # so that we can verify version recovery as well.
        path = '/'.join(['/v2/applications', self.TEST_APP])
        return st.OperationContract(self.new_patch_operation(
            title='update_app', data=payload, path=path),
                                    contract=contract)
    def upsert_load_balancer(self, use_vpc):
        """Creates OperationContract for upsertLoadBalancer.

    Calls Spinnaker's upsertLoadBalancer with a configuration, then verifies
    that the expected resources and configurations are visible on AWS. See
    the contract builder for more info on what the expectations are.

    Args:
      use_vpc: [bool] if True configure a VPC otherwise dont.
    """
        bindings = self.bindings
        context = citest.base.ExecutionContext()

        # We're assuming that the given region has 'A' and 'B' availability
        # zones. This seems conservative but might be brittle since we permit
        # any region.
        region = bindings['TEST_AWS_REGION']
        avail_zones = [region + 'a', region + 'b']
        load_balancer_name = self.lb_name

        if use_vpc:
            # TODO(ewiseblatt): 20160301
            # We're hardcoding the VPC here, but not sure which we really want.
            # I think this comes from the spinnaker.io installation instructions.
            # What's interesting about this is that it is a 10.* CidrBlock,
            # as opposed to the others, which are public IPs. All this is sensitive
            # as to where the TEST_AWS_VPC_ID came from so this is going to be
            # brittle. Ideally we only need to know the vpc_id and can figure the
            # rest out based on what we have available.
            subnet_type = 'internal (defaultvpc)'
            vpc_id = bindings['TEST_AWS_VPC_ID']

            # Not really sure how to determine this value in general.
            security_groups = ['default']

            # The resulting load balancer will only be available in the zone of
            # the subnet we are using. We'll figure that out by looking up the
            # subnet we want.
            subnet_details = self.aws_observer.get_resource_list(
                context,
                root_key='Subnets',
                aws_command='describe-subnets',
                aws_module='ec2',
                args=[
                    '--filters', 'Name=vpc-id,Values={vpc_id}'
                    ',Name=tag:Name,Values=defaultvpc.internal.{region}'.
                    format(vpc_id=vpc_id, region=region)
                ])
            try:
                expect_avail_zones = [subnet_details[0]['AvailabilityZone']]
            except KeyError:
                raise ValueError(
                    'vpc_id={0} appears to be unknown'.format(vpc_id))
        else:
            subnet_type = ""
            vpc_id = None
            security_groups = None
            expect_avail_zones = avail_zones

            # This will be a second load balancer not used in other tests.
            # Decorate the name so as not to confuse it.
            load_balancer_name += '-pub'

        listener = {'Listener': {'InstancePort': 80, 'LoadBalancerPort': 80}}
        health_check = {
            'HealthyThreshold': 8,
            'UnhealthyThreshold': 3,
            'Interval': 12,
            'Timeout': 6,
            'Target': 'HTTP:%d/' % listener['Listener']['InstancePort']
        }

        payload = self.agent.make_json_payload_from_kwargs(
            job=[{
                'type':
                'upsertLoadBalancer',
                'cloudProvider':
                'aws',
                # 'loadBalancerName': load_balancer_name,
                'credentials':
                bindings['SPINNAKER_AWS_ACCOUNT'],
                'name':
                load_balancer_name,
                'stack':
                bindings['TEST_STACK'],
                'detail':
                self.lb_detail,
                'region':
                bindings['TEST_AWS_REGION'],
                'availabilityZones': {
                    region: avail_zones
                },
                'regionZones':
                avail_zones,
                'listeners': [{
                    'internalProtocol':
                    'HTTP',
                    'internalPort':
                    listener['Listener']['InstancePort'],
                    'externalProtocol':
                    'HTTP',
                    'externalPort':
                    listener['Listener']['LoadBalancerPort']
                }],
                'healthCheck':
                health_check['Target'],
                'healthCheckProtocol':
                'HTTP',
                'healthCheckPort':
                listener['Listener']['LoadBalancerPort'],
                'healthCheckPath':
                '/',
                'healthTimeout':
                health_check['Timeout'],
                'healthInterval':
                health_check['Interval'],
                'healthyThreshold':
                health_check['HealthyThreshold'],
                'unhealthyThreshold':
                health_check['UnhealthyThreshold'],
                'user':
                '******',
                'usePreferredZones':
                True,
                'vpcId':
                vpc_id,
                'subnetType':
                subnet_type,
                # If I set security group to this then I get an error it is missing.
                # bindings['TEST_AWS_SECURITY_GROUP_ID']],
                'securityGroups':
                security_groups
            }],
            description='Create Load Balancer: ' + load_balancer_name,
            application=self.TEST_APP)

        builder = aws.AwsContractBuilder(self.aws_observer)
        (builder.new_clause_builder(
            'Load Balancer Added', retryable_for_secs=10).collect_resources(
                aws_module='elb',
                command='describe-load-balancers',
                args=[
                    '--load-balancer-names', load_balancer_name
                ]).contains_path_match(
                    'LoadBalancerDescriptions', {
                        'HealthCheck':
                        jp.DICT_MATCHES({
                            key: jp.EQUIVALENT(value)
                            for key, value in health_check.items()
                        }),
                        'AvailabilityZones':
                        jp.LIST_SIMILAR(expect_avail_zones),
                        'ListenerDescriptions/Listener':
                        jp.DICT_MATCHES({
                            key: jp.NUM_EQ(value)
                            for key, value in listener['Listener'].items()
                        })
                    }))

        title_decorator = '_with_vpc' if use_vpc else '_without_vpc'
        return st.OperationContract(self.new_post_operation(
            title='upsert_load_balancer' + title_decorator,
            data=payload,
            path='tasks'),
                                    contract=builder.build())
Beispiel #27
0
    def create_app(self):
        payload = self.agent.make_json_payload_from_object(
            self.initial_app_spec)
        expect = dict(self.initial_app_spec)
        expect["name"] = self.initial_app_spec["name"].upper()
        expect["lastModifiedBy"] = "anonymous"

        contract = jc.Contract()

        # The update timestamp is determined by the server,
        # and we dont know what that is, so lets ignore it
        # and assume the unit tests verify it is properly updated.
        expect = dict(expect)
        del expect["updateTs"]
        self.app_history.insert(0, expect)
        f50_builder = st.http_observer.HttpContractBuilder(self.agent)

        gcs_builder = gcp.GcpStorageContractBuilder(self.gcs_observer)
        (gcs_builder.new_clause_builder(
            "Created Google Cloud Storage File",
            retryable_for_secs=5).list_bucket(
                self.BUCKET, "/".join([self.BASE_PATH,
                                       "applications"])).EXPECT(
                                           ov_factory.value_list_path_contains(
                                               "name",
                                               jp.STR_SUBSTR(self.TEST_APP))))
        (gcs_builder.new_clause_builder("Wrote File Content").retrieve_content(
            self.BUCKET,
            "/".join([
                self.BASE_PATH,
                "applications",
                self.TEST_APP,
                "specification.json",
            ]),
            transform=json.JSONDecoder().decode,
        ).EXPECT(
            ov_factory.value_list_contains(
                jp.DICT_MATCHES({
                    key: jp.EQUIVALENT(value)
                    for key, value in expect.items()
                }))))
        for clause in gcs_builder.build().clauses:
            contract.add_clause(clause)

        # These clauses are querying the Front50 http server directly
        # to verify that it returns the application we added.
        # We already verified the data was stored on GCS, but while we
        # are here we will verify that it is also being returned when queried.
        (f50_builder.new_clause_builder("Lists Application").get_url_path(
            "/v2/applications").EXPECT(
                ov_factory.value_list_path_contains(
                    "name", jp.STR_SUBSTR(self.TEST_APP.upper()))))
        (f50_builder.new_clause_builder("Returns Application").get_url_path(
            "/v2/applications").EXPECT(
                ov_factory.value_list_contains(
                    jp.DICT_MATCHES({
                        key: jp.EQUIVALENT(value)
                        for key, value in self.app_history[0].items()
                    }))))
        for clause in f50_builder.build().clauses:
            contract.add_clause(clause)

        path = "/v2/applications"
        return st.OperationContract(
            self.new_post_operation(title="create_app",
                                    data=payload,
                                    path=path),
            contract=contract,
        )