def as_dict(self, db_model): base = super(Plan, self).as_dict(db_model) if self.artifacts is not wsme.Unset: base.update({'artifacts': [wjson.tojson(Artifact, art) for art in self.artifacts]}) if self.services is not wsme.Unset: base.update({'services': [wjson.tojson(ServiceReference, ref) for ref in self.services]}) return base
def as_dict(self, db_model): base = super(Plan, self).as_dict_from_keys(['name', 'description']) if self.artifacts is not wsme.Unset: base.update({'artifacts': [wjson.tojson(Artifact, art) for art in self.artifacts]}) if self.services is not wsme.Unset: base.update({'services': [wjson.tojson(ServiceReference, ref) for ref in self.services]}) if self.parameters is not wsme.Unset: base.update({'parameters': self.parameters}) return base
def as_dict(self, db_model): base = super(Plan, self).as_dict(db_model) if self.artifacts is not wsme.Unset: base.update({'artifacts': [wjson.tojson(Artifact, art) for art in self.artifacts]}) if self.services is not wsme.Unset: base.update({'services': [wjson.tojson(ServiceReference, ref) for ref in self.services]}) if self.parameters is not wsme.Unset: base.update({'parameters': self.parameters}) return base
def test_after_publishes_payload(self, mock_get_original_resource, mock_publish): n = NotificationHook() sample_original_task = TaskWmodel.from_db_model( Task(id=1, creator_id=1, title='Test', status='inprogress', story_id=1, project_id=1, assignee_id=1, priority='medium')) sample_modified_task = TaskWmodel.from_db_model( Task(id=1, creator_id=1, title='Test', status='merged', story_id=1, project_id=1, assignee_id=1, priority='medium')) sot_json = tojson(TaskWmodel, sample_original_task) smt_json = tojson(TaskWmodel, sample_modified_task) # Mocking state object to simulate a 'PUT' request for task # resource 1 mock_state = Mock() mock_state.request.current_user_id = '1' mock_state.request.method = 'PUT' mock_state.request.headers = {'Referer': 'http://localhost/'} mock_state.request.query_string = '' mock_state.request.path = '/v1/tasks/1' mock_state.response.status_code = 200 mock_state.old_entity_values = sot_json mock_get_original_resource.return_value = smt_json n.after(mock_state) mock_publish.assert_called_with( author_id=mock_state.request.current_user_id, method=mock_state.request.method, url=mock_state.request.headers['Referer'], path=mock_state.request.path, query_string=mock_state.request.query_string, status=mock_state.response.status_code, resource='task', resource_id='1', sub_resource=None, sub_resource_id=None, resource_before=sot_json, resource_after=smt_json)
def as_dict(self, db_model): base = super(Plan, self).as_dict_from_keys(['name', 'description']) if self.artifacts is not wsme.Unset: base.update({ 'artifacts': [wjson.tojson(Artifact, art) for art in self.artifacts] }) if self.services is not wsme.Unset: base.update({ 'services': [wjson.tojson(ServiceReference, ref) for ref in self.services] }) return base
def test_after_publishes_payload(self, mock_get_original_resource, mock_publish): n = NotificationHook() sample_original_task = TaskWmodel.from_db_model(Task(id=1, creator_id=1, title='Test', status='inprogress', story_id=1, project_id=1, assignee_id=1, priority='medium')) sample_modified_task = TaskWmodel.from_db_model(Task(id=1, creator_id=1, title='Test', status='merged', story_id=1, project_id=1, assignee_id=1, priority='medium')) sot_json = tojson(TaskWmodel, sample_original_task) smt_json = tojson(TaskWmodel, sample_modified_task) # Mocking state object to simulate a 'PUT' request for task # resource 1 mock_state = Mock() mock_state.request.current_user_id = '1' mock_state.request.method = 'PUT' mock_state.request.headers = {'Referer': 'http://localhost/'} mock_state.request.query_string = '' mock_state.request.path = '/v1/tasks/1' mock_state.response.status_code = 200 mock_state.old_entity_values = sot_json mock_get_original_resource.return_value = smt_json n.after(mock_state) mock_publish.assert_called_with( author_id=mock_state.request.current_user_id, method=mock_state.request.method, url=mock_state.request.headers['Referer'], path=mock_state.request.path, query_string=mock_state.request.query_string, status=mock_state.response.status_code, resource='task', resource_id='1', sub_resource=None, sub_resource_id=None, resource_before=sot_json, resource_after=smt_json)
def _to_dict(self, model_property_type): # Convert the model PropertyTypes dict to a JSON encoding db_property_type_dict = dict() db_property_type_dict['schema'] = json.tojson( PropertyType, model_property_type) db_property_type_dict['name'] = model_property_type.name return db_property_type_dict
def _to_dict(self, model_property_type): # Convert the model PropertyTypes dict to a JSON string json_data = tojson(PropertyType, model_property_type) db_property_type_dict = dict() db_property_type_dict['schema'] = json.dumps(json_data) db_property_type_dict['name'] = model_property_type.name return db_property_type_dict
def _to_property_dict(self, name, value): # Convert the model PropertyTypes dict to a JSON string json_data = tojson(PropertyType, value) db_property_type_dict = dict() db_property_type_dict['schema'] = json.dumps(json_data) db_property_type_dict['name'] = name return db_property_type_dict
def event_create(values): new_event = api_base.entity_create(models.TimeLineEvent, values) if new_event: if new_event.story_id is not None: stories_api.story_update_updated_at(new_event.story_id) # TODO(SotK): Update board and worklist updated_at when they get events if CONF.enable_notifications: # Build the payload. Use of None is included to ensure that we don't # accidentally blow up the API call, but we don't anticipate it # happening. event_dict = tojson(TimeLineEvent, TimeLineEvent.from_db_model(new_event)) publish(author_id=request.current_user_id or None, method="POST", url=request.headers.get('Referer') or None, path=request.path or None, query_string=request.query_string or None, status=response.status_code or None, resource="timeline_event", resource_id=new_event.id or None, resource_after=event_dict or None) return new_event
def test_date_formatting(): """ISO 8601 formatted dates with timezones are correctly translated to datetime instances and back""" d = TypeWithDate() d.when = datetime(2015, 2, 28, 1, 2, 3, tzinfo=UTC) j = {'when': '2015-02-28T01:02:03+00:00'} eq_(tojson(TypeWithDate, d), j) eq_(fromjson(TypeWithDate, j).when, d.when)
def encode_result(value, datatype, **options): jsondata = tojson(datatype, value) if options.get('nest_result', False): jsondata = {options.get('nested_result_attrname', 'result'): jsondata} if jsondata: return json.dumps(jsondata) else: return ''
def test_unset_attrs(self): class AType(object): attr = int wsme.types.register_type(AType) j = tojson(AType, AType()) assert j == {}
def get_original_resource(self, resource, resource_id): """Given a resource name and ID, will load that resource and map it to a JSON object. """ if not resource or not resource_id or resource not in class_mappings.keys(): return None model_class, wmodel_class = class_mappings[resource] entity = api_base.entity_get(model_class, resource_id) if entity: return tojson(wmodel_class, wmodel_class.from_db_model(entity)) else: # In the case of a DELETE, the entity will be returned as None return None
def get_original_resource(self, resource, resource_id): """Given a resource name and ID, will load that resource and map it to a JSON object. """ if not resource or not resource_id or resource not in \ class_mappings.keys(): return None model_class, wmodel_class = class_mappings[resource] entity = api_base.entity_get(model_class, resource_id) if entity: return tojson(wmodel_class, wmodel_class.from_db_model(entity)) else: # In the case of a DELETE, the entity will be returned as None return None
def index(self, response, result): params = dict(response.request.params) params.pop('marker', None) query = urlparse.urlencode(params) result.first = "/v2/metadefs/namespaces" result.schema = "/v2/schemas/metadefs/namespaces" if query: result.first = '%s?%s' % (result.first, query) if result.next: params['marker'] = result.next next_query = urlparse.urlencode(params) result.next = '/v2/metadefs/namespaces?%s' % next_query ns_json = json.tojson(Namespaces, result) response = self.__render(ns_json, response)
def event_create(values): new_event = api_base.entity_create(models.TimeLineEvent, values) if CONF.enable_notifications: # Build the payload. Use of None is included to ensure that we don't # accidentally blow up the API call, but we don't anticipate it # happening. event_dict = tojson(TimeLineEvent, TimeLineEvent.from_db_model(new_event)) publish(author_id=request.current_user_id or None, method="POST", path=request.path or None, status=response.status_code or None, resource="timeline_event", resource_id=new_event.id or None, resource_after=event_dict or None) return new_event
def _format_metadef_object_to_db(self, metadata_object): required_str = ",".join(metadata_object.required) if metadata_object.required else None # Convert the model PropertyTypes dict to a JSON string properties = metadata_object.properties db_schema = {} if properties: for k, v in properties.items(): json_data = json.tojson(PropertyType, v) db_schema[k] = json_data db_metadata_object = { "name": metadata_object.name, "required": required_str, "description": metadata_object.description, "json_schema": db_schema, } return db_metadata_object
def _format_metadef_object_to_db(self, metadata_object): required_str = (",".join(metadata_object.required) if metadata_object.required else None) # Convert the model PropertyTypes dict to a JSON string properties = metadata_object.properties db_schema = {} if properties: for k, v in properties.items(): json_data = json.tojson(PropertyType, v) db_schema[k] = json_data db_metadata_object = { 'name': metadata_object.name, 'required': required_str, 'description': metadata_object.description, 'json_schema': db_schema } return db_metadata_object
def index(self, response, result): metadata_tags_json = json.tojson(MetadefTags, result) body = jsonutils.dumps(metadata_tags_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def update(self, response, namespace): ns_json = json.tojson(Namespace, namespace) response = self.__render(ns_json, response, 200)
def show(self, response, result): resource_type_json = tojson(ResourceTypeAssociations, result) body = json.dumps(resource_type_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def _to_property_dict(self, name, value): # Convert the model PropertyTypes dict to a JSON string db_property_type_dict = dict() db_property_type_dict["schema"] = json.tojson(PropertyType, value) db_property_type_dict["name"] = name return db_property_type_dict
def test_array_tojson(self): assert tojson([int], None) is None assert tojson([int], []) == [] assert tojson([str], ['1', '4']) == ['1', '4']
def test_bytes_tojson(self): assert tojson(wsme.types.bytes, None) is None assert tojson(wsme.types.bytes, b('ascii')) == u('ascii')
def create(self, response, namespace): ns_json = json.tojson(Namespace, namespace) response = self.__render(ns_json, response, 201) response.location = get_namespace_href(namespace)
def test_None_tojson(self): for dt in (datetime.date, datetime.time, datetime.datetime, decimal.Decimal): assert tojson(dt, None) is None
def post(self): """Create a new CAMP-style plan.""" if not pecan.request.body or len(pecan.request.body) < 1: raise exception.BadRequest # check to make sure the request has the right Content-Type if (pecan.request.content_type is None or pecan.request.content_type != 'application/x-yaml'): raise exception.UnsupportedMediaType( name=pecan.request.content_type, method='POST') try: yaml_input_plan = yamlutils.load(pecan.request.body) except ValueError as excp: raise exception.BadRequest(reason='Plan is invalid. ' + six.text_type(excp)) camp_version = yaml_input_plan.get('camp_version') if camp_version is None: raise exception.BadRequest( reason='camp_version attribute is missing from submitted Plan') elif camp_version != 'CAMP 1.1': raise exception.BadRequest(reason=UNSUP_VER_ERR % camp_version) # Use Solum's handler as the point of commonality. We can do this # because Solum stores plans in the DB in their JSON form. handler = (plan_handler. PlanHandler(pecan.request.security_context)) model_plan = model.Plan(**yaml_input_plan) # Move any inline Service Specifications to the "services" section. # This avoids an issue where WSME can't properly handle multi-typed # attributes (e.g. 'fulfillment'). It also smoothes out the primary # difference between CAMP plans and Solum plans, namely that Solum # plans don't have inline Service Specifications. for art in model_plan.artifacts: if art.requirements != wsme.Unset: for req in art.requirements: if (req.fulfillment != wsme.Unset and isinstance(req.fulfillment, model.ServiceSpecification)): s_spec = req.fulfillment # if the inline service spec doesn't have an id # generate one if s_spec.id == wsme.Unset: s_spec.id = uuidutils.generate_uuid() # move the inline service spec to the 'services' # section if model_plan.services == wsme.Unset: model_plan.services = [s_spec] else: model_plan.services.append(s_spec) # set the fulfillment to the service spec id req.fulfillment = "id:%s" % s_spec.id db_obj = handler.create(clean_plan(wjson.tojson(model.Plan, model_plan))) plan_dict = fluff_plan(db_obj.refined_content(), db_obj.uuid) pecan.response.status = 201 pecan.response.location = plan_dict['uri'] return plan_dict
def tojson(self): return tojson(type(self), self)
def create(self, response, result): resource_type_json = json.tojson(ResourceTypeAssociation, result) response.status_int = 201 body = jsonutils.dumps(resource_type_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def create_tags(self, response, result): response.status_int = http.CREATED metadata_tags_json = json.tojson(MetadefTags, result) body = jsonutils.dumps(metadata_tags_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def test_dict_tojson(self): assert tojson({int: str}, None) is None assert tojson({int: str}, {5: '5'}) == {5: '5'}
def index(self, response, result): result.schema = "v2/schemas/metadefs/objects" metadata_objects_json = json.tojson(MetadefObjects, result) body = jsonutils.dumps(metadata_objects_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def show(self, response, result): property_type_json = json.tojson(PropertyType, result) body = jsonutils.dumps(property_type_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def _eq_token(token, attrs): eq_(tojson(types.JsonToken, token), tojson(types.JsonToken, types.JsonToken(**attrs)))
def index(self, response, result): resource_type_json = json.tojson(ResourceTypes, result) body = jsonutils.dumps(resource_type_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def show(self, response, namespace): ns_json = json.tojson(Namespace, namespace) response = self.__render(ns_json, response)
def _to_property_dict(self, name, value): # Convert the model PropertyTypes dict to a JSON string db_property_type_dict = dict() db_property_type_dict['schema'] = json.tojson(PropertyType, value) db_property_type_dict['name'] = name return db_property_type_dict
def index(self, response, result): property_type_json = json.tojson(PropertyTypes, result) body = jsonutils.dumps(property_type_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def show(self, response, metadata_object): metadata_object_json = json.tojson(MetadefObject, metadata_object) body = jsonutils.dumps(metadata_object_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'
def index(self, response, result): resource_type_json = tojson(ResourceTypes, result) body = json.dumps(resource_type_json, ensure_ascii=False) response.unicode_body = six.text_type(body) response.content_type = 'application/json'