def insert_object(self, data: (CmdbObject, dict)) -> int:
     """
     Insert new CMDB Object
     Args:
         data: init data
         request_user: current user, to detect who triggered event
     Returns:
         Public ID of the new object in database
     """
     if isinstance(data, dict):
         try:
             new_object = CmdbObject(**data)
         except CMDBError as e:
             LOGGER.debug(
                 f'Error while inserting object - error: {e.message}')
             raise ObjectManagerInsertError(e)
     elif isinstance(data, CmdbObject):
         new_object = data
     try:
         ack = self.dbm.insert(collection=CmdbObject.COLLECTION,
                               data=new_object.to_database())
         if self._event_queue:
             event = Event(
                 "cmdb.core.object.added", {
                     "id": new_object.get_public_id(),
                     "type_id": new_object.get_type_id(),
                     "user_id": new_object.author_id
                 })
             self._event_queue.put(event)
     except (CMDBError, PublicIDAlreadyExists) as e:
         raise ObjectInsertError(e)
     return ack
 def get_objects_by(self, sort='public_id', direction=-1, user: UserModel = None,
                    permission: AccessControlPermission = None, **requirements):
     ack = []
     objects = self._get_many(collection=CmdbObject.COLLECTION, sort=sort, direction=direction, **requirements)
     for obj in objects:
         object_ = CmdbObject(**obj)
         try:
             type_ = self._type_manager.get(object_.type_id)
             verify_access(type_, user, permission)
         except CMDBError:
             continue
         ack.append(CmdbObject(**obj))
     return ack
 def get_all_objects(self):
     ack = []
     objects = self._get_many(collection=CmdbObject.COLLECTION,
                              sort='public_id')
     for obj in objects:
         ack.append(CmdbObject(**obj))
     return ack
    def sort_objects_by_field_value(self, value: str, order=-1, match=None):
        """This method does not actually
           performs the find() operation
           but instead returns
           a objects sorted by value of the documents that meet the selection criteria.

           Args:
               value (str): sorted by value
               order : Ascending/Descending Sort e.g. -1
               match (dict): stage filters the documents to only pass documents.
           Returns:
               returns the list of CMDB Objects sorted by value of the documents
           """
        agr = []
        if match:
            agr.append({'$match': match})
        agr.append({"$addFields": {
            "order": {
                "$filter": {
                    "input": "$fields",
                    "as": "fields",
                    "cond": {"$eq": ["$$fields.name", value]}
                }
            }
        }})
        agr.append({'$sort': {'order': order}})

        object_list = []
        cursor = self.dbm.aggregate(CmdbObject.COLLECTION, agr)
        for document in cursor:
            put_data = json.loads(json_util.dumps(document), object_hook=object_hook)
            object_list.append(CmdbObject(**put_data))

        return object_list
Example #5
0
    def insert_object(self,
                      data: (CmdbObject, dict),
                      user: UserModel = None,
                      permission: AccessControlPermission = None) -> int:
        """
        Insert new CMDB Object
        Args:
            data: init data
            user: current user, to detect who triggered event
            permission: extended user acl rights
        Returns:
            Public ID of the new object in database
        """
        new_object = None
        if isinstance(data, dict):
            try:
                new_object = CmdbObject(**data)
            except CMDBError as e:
                LOGGER.debug(
                    f'Error while inserting object - error: {e.message}')
                raise ObjectManagerInsertError(e)
        elif isinstance(data, CmdbObject):
            new_object = data

        type_ = self._type_manager.get(new_object.type_id)
        if not type_.active:
            raise AccessDeniedError(
                f'Objects cannot be created because type `{type_.name}` is deactivated.'
            )

        verify_access(type_, user, permission)

        try:
            ack = self.dbm.insert(collection=CmdbObject.COLLECTION,
                                  data=new_object.__dict__)
            if self._event_queue:
                event = Event(
                    "cmdb.core.object.added", {
                        "id": new_object.get_public_id(),
                        "type_id": new_object.get_type_id(),
                        "user_id": new_object.author_id,
                        "event": 'insert'
                    })
                self._event_queue.put(event)
        except (CMDBError, PublicIDAlreadyExists) as e:
            raise ObjectInsertError(e)
        return ack
    def update_object(self,
                      data: (dict, CmdbObject),
                      user: UserModel = None,
                      permission: AccessControlPermission = None) -> str:

        if isinstance(data, dict):
            update_object = CmdbObject(**data)
        elif isinstance(data, CmdbObject):
            update_object = data
        else:
            raise ObjectManagerUpdateError(
                'Wrong CmdbObject init format - expecting CmdbObject or dict')
        update_object.last_edit_time = datetime.utcnow()
        if user:
            update_object.editor_id = user.public_id

        type_ = self._type_manager.get(update_object.type_id)
        verify_access(type_, user, permission)

        ack = self._update(collection=CmdbObject.COLLECTION,
                           public_id=update_object.get_public_id(),
                           data=update_object.__dict__)
        # create cmdb.core.object.updated event
        if self._event_queue and user:
            event = Event(
                "cmdb.core.object.updated", {
                    "id": update_object.get_public_id(),
                    "type_id": update_object.get_type_id(),
                    "user_id": user.get_public_id()
                })
            self._event_queue.put(event)
        return ack.acknowledged
 def get_objects_by(self, sort='public_id', direction=-1, **requirements):
     ack = []
     objects = self._get_many(collection=CmdbObject.COLLECTION,
                              sort=sort,
                              direction=direction,
                              **requirements)
     for obj in objects:
         ack.append(CmdbObject(**obj))
     return ack
    def get_object(self, public_id: int, user: UserModel = None,
                   permission: AccessControlPermission = None) -> CmdbObject:
        try:
            resource = CmdbObject(**self._get(
                collection=CmdbObject.COLLECTION,
                public_id=public_id))
        except Exception as err:
            raise ObjectManagerGetError(str(err))

        type_ = self._type_manager.get(resource.type_id)
        verify_access(type_, user, permission)
        return resource
Example #9
0
    def group_objects_by_value(self,
                               value: str,
                               match=None,
                               user: UserModel = None,
                               permission: AccessControlPermission = None):
        """This method does not actually
           performs the find() operation
           but instead returns
           a objects grouped by type of the documents that meet the selection criteria.

           Args:
               value (str): grouped by value
               match (dict): stage filters the documents to only pass documents.
               user (UserModel): request user
               permission (AccessControlPermission):  ACL operations
           Returns:
               returns the objects grouped by value of the documents
           """
        ack = []
        agr = []
        if match:
            agr.append({'$match': match})
        agr.append({
            '$group': {
                '_id': '$' + value,
                'result': {
                    '$first': '$$ROOT'
                },
                'count': {
                    '$sum': 1
                },
            }
        })
        agr.append({'$sort': {'count': -1}})

        objects = self.dbm.aggregate(CmdbObject.COLLECTION, agr)
        for obj in objects:
            object_ = CmdbObject(**obj['result'])
            try:
                type_ = self._type_manager.get(object_.type_id)
                verify_access(type_, user, permission)
            except CMDBError:
                continue
            ack.append(obj)
        return ack
Example #10
0
 def gen_server_object():
     return CmdbObject(
         **{
             "public_id":
             public_id_counter,
             "author_id":
             random.choice(user_list).get_public_id(),
             "type_id":
             5,
             "views":
             self._faker.random_number(4),
             "version":
             "1.0.0",
             "last_edit_time":
             self._faker.date_time_between(start_date="-30d"),
             "active":
             self._faker.boolean(chance_of_getting_true=80),
             "creation_time":
             self._faker.date_time_between(start_date="-30d"),
             "status":
             True,
             "fields": [{
                 "name": "hostname",
                 "value": self._faker.hostname()
             }, {
                 "name": "ipv4",
                 "value": self._faker.ipv4()
             }, {
                 "name": "ipv4_network_class",
                 "value": self._faker.ipv4_network_class()
             }, {
                 "name": "ipv4_intranet",
                 "value": self._faker.ipv4_private()
             }, {
                 "name": "ipv6",
                 "value": self._faker.ipv6()
             }]
         })
 def update_object(self, data: (dict, CmdbObject),
                   request_user: User) -> str:
     if isinstance(data, dict):
         update_object = CmdbObject(**data)
     elif isinstance(data, CmdbObject):
         update_object = data
     else:
         raise ObjectManagerUpdateError(
             'Wrong CmdbObject init format - expecting CmdbObject or dict')
     update_object.last_edit_time = datetime.utcnow()
     ack = self._update(collection=CmdbObject.COLLECTION,
                        public_id=update_object.get_public_id(),
                        data=update_object.to_database())
     # create cmdb.core.object.updated event
     if self._event_queue and request_user:
         event = Event(
             "cmdb.core.object.updated", {
                 "id": update_object.get_public_id(),
                 "type_id": update_object.get_type_id(),
                 "user_id": request_user.get_public_id()
             })
         self._event_queue.put(event)
     return ack.acknowledged
Example #12
0
    def update_object(self,
                      data: (dict, CmdbObject),
                      user: UserModel = None,
                      permission: AccessControlPermission = None) -> str:

        if isinstance(data, dict):
            update_object = CmdbObject(**data)
        elif isinstance(data, CmdbObject):
            update_object = data
        else:
            raise ObjectManagerUpdateError(
                'Wrong CmdbObject init format - expecting CmdbObject or dict')
        update_object.last_edit_time = datetime.now(timezone.utc)
        if user:
            update_object.editor_id = user.public_id

        type_ = self._type_manager.get(update_object.type_id)
        if not type_.active:
            raise AccessDeniedError(
                f'Objects cannot be updated because type `{type_.name}` is deactivated.'
            )
        verify_access(type_, user, permission)

        ack = self._update(collection=CmdbObject.COLLECTION,
                           public_id=update_object.get_public_id(),
                           data=update_object.__dict__)

        if self._event_queue and user:
            event = Event(
                "cmdb.core.object.updated", {
                    "id": update_object.get_public_id(),
                    "type_id": update_object.get_type_id(),
                    "user_id": user.get_public_id(),
                    'event': 'update'
                })
            self._event_queue.put(event)
        return ack.acknowledged
Example #13
0
    def aggregate(self,
                  pipeline: Pipeline,
                  request_user: UserModel = None,
                  limit: int = Search.DEFAULT_LIMIT,
                  skip: int = Search.DEFAULT_SKIP,
                  **kwargs) -> SearchResult[RenderResult]:
        """
        Use mongodb aggregation system with pipeline queries
        Args:
            pipeline (Pipeline): list of requirement pipes
            request_user (UserModel): user who started this search
            limit (int): max number of documents to return
            skip (int): number of documents to be skipped
            **kwargs:
        Returns:
            SearchResult with generic list of RenderResults
        """
        # Insert skip and limit
        plb = PipelineBuilder(pipeline)

        # define search output
        stages: dict = {}
        active = kwargs.get('active', True)

        if kwargs.get('resolve', False):
            plb.add_pipe(
                plb.lookup_sub_(from_='framework.objects',
                                let_={'ref_id': '$public_id'},
                                pipeline_=[
                                    plb.match_({
                                        '$expr': {
                                            '$in':
                                            ['$$ref_id', '$fields.value']
                                        }
                                    })
                                ],
                                as_='refs'))
            if active:
                active_pipe = [{
                    '$match': {
                        'active': {
                            "$eq": True
                        }
                    }
                }, {
                    '$match': {
                        '$expr': {
                            '$in': ['$$ref_id', '$fields.value']
                        }
                    }
                }]
            else:
                active_pipe = [{
                    '$match': {
                        '$expr': {
                            '$in': ['$$ref_id', '$fields.value']
                        }
                    }
                }]
            plb.add_pipe(
                plb.facet_({
                    'root': [{
                        '$replaceRoot': {
                            'newRoot': {
                                '$mergeObjects': ['$$ROOT']
                            }
                        }
                    }],
                    'references': [{
                        '$lookup': {
                            'from': 'framework.objects',
                            'let': {
                                'ref_id': '$public_id'
                            },
                            'pipeline': active_pipe,
                            'as': 'refs'
                        }
                    }, {
                        '$unwind': '$refs'
                    }, {
                        '$replaceRoot': {
                            'newRoot': '$refs'
                        }
                    }]
                }))
            plb.add_pipe(
                plb.project_(specification={
                    'complete': {
                        '$concatArrays': ['$root', '$references']
                    }
                }))
            plb.add_pipe(plb.unwind_(path='$complete'))
            plb.add_pipe({'$replaceRoot': {'newRoot': '$complete'}})

        stages.update({'metadata': [PipelineBuilder.count_('total')]})
        stages.update({
            'data':
            [PipelineBuilder.skip_(skip),
             PipelineBuilder.limit_(limit)]
        })

        group_stage: dict = {
            'group': [
                PipelineBuilder.lookup_(TypeModel.COLLECTION, 'type_id',
                                        'public_id', 'lookup_data'),
                PipelineBuilder.unwind_('$lookup_data'),
                PipelineBuilder.project_({
                    '_id': 0,
                    'type_id': 1,
                    'label': '$lookup_data.label'
                }),
                PipelineBuilder.group_('$$ROOT.type_id', {
                    'types': {
                        '$first': '$$ROOT'
                    },
                    'total': {
                        '$sum': 1
                    }
                }),
                PipelineBuilder.project_({
                    '_id': 0,
                    'searchText': '$types.label',
                    'searchForm': 'type',
                    'searchLabel': '$types.label',
                    'settings': {
                        'types': ['$types.type_id']
                    },
                    'total': 1
                }),
                PipelineBuilder.sort_('total', -1)
            ]
        }
        stages.update(group_stage)
        plb.add_pipe(PipelineBuilder.facet_(stages))

        raw_search_result = self.manager.aggregate(
            collection=CmdbObject.COLLECTION, pipeline=plb.pipeline)
        raw_search_result_list = list(raw_search_result)

        try:
            matches_regex = plb.get_regex_pipes_values()
        except Exception as err:
            LOGGER.error(f'Extract regex pipes: {err}')
            matches_regex = []

        if len(raw_search_result_list[0]['data']) > 0:
            raw_search_result_list_entry = raw_search_result_list[0]
            # parse result list
            pre_rendered_result_list = [
                CmdbObject(**raw_result)
                for raw_result in raw_search_result_list_entry['data']
            ]
            rendered_result_list = RenderList(
                pre_rendered_result_list,
                request_user,
                object_manager=self.manager).render_result_list()

            total_results = raw_search_result_list_entry['metadata'][0].get(
                'total', 0)
            group_result_list = raw_search_result_list[0]['group']

        else:
            rendered_result_list = []
            group_result_list = []
            total_results = 0
        # generate output
        search_result = SearchResult[RenderResult](
            results=rendered_result_list,
            total_results=total_results,
            groups=group_result_list,
            alive=raw_search_result.alive,
            matches_regex=matches_regex,
            limit=limit,
            skip=skip)
        return search_result
 def get_object(self, public_id: int):
     try:
         return CmdbObject(**self._get(collection=CmdbObject.COLLECTION,
                                       public_id=public_id))
     except (CMDBError, Exception) as err:
         raise ObjectManagerGetError(err.message)
Example #15
0
 def gen_location_object():
     try:
         employee_id = random.choice(employee_list).get_public_id()
     except Exception:
         employee_id = None
     return CmdbObject(
         **{
             "public_id":
             public_id_counter,
             "author_id":
             random.choice(user_list).get_public_id(),
             "type_id":
             4,
             "views":
             self._faker.random_number(4),
             "version":
             "1.0.0",
             "last_edit_time":
             self._faker.date_time_between(start_date="-30d"),
             "active":
             self._faker.boolean(chance_of_getting_true=80),
             "creation_time":
             self._faker.date_time_between(start_date="-30d"),
             "status":
             True,
             "fields": [
                 {
                     "name": "naming",
                     "value": self._faker.suffix()
                 },
                 {
                     "name": "description",
                     "value": self._faker.paragraph()
                 },
                 {
                     "name": "entrance",
                     "value": self._faker.paragraph()
                 },
                 {
                     "name": "person_in_charge",
                     "value": employee_id
                 },
                 {
                     "name": "street",
                     "value": self._faker.street_name()
                 },
                 {
                     "name": "zip",
                     "value": self._faker.zipcode()
                 },
                 {
                     "name": "city",
                     "value": self._faker.city()
                 },
                 {
                     "name": "map-lang",
                     "value": str(self._faker.latitude())
                 },
                 {
                     "name": "map-long",
                     "value": str(self._faker.longitude())
                 },
             ]
         })
Example #16
0
    def aggregate(self,
                  pipeline: Pipeline,
                  request_user: UserModel = None,
                  permission: AccessControlPermission = None,
                  limit: int = Search.DEFAULT_LIMIT,
                  skip: int = Search.DEFAULT_SKIP,
                  **kwargs) -> SearchResult[RenderResult]:
        """
        Use mongodb aggregation system with pipeline queries
        Args:
            pipeline (Pipeline): list of requirement pipes
            request_user (UserModel): user who started this search
            permission (AccessControlPermission) : Permission enum for possible ACL operations..
            limit (int): max number of documents to return
            skip (int): number of documents to be skipped
            **kwargs:
        Returns:
            SearchResult with generic list of RenderResults
        """

        # Insert skip and limit
        plb = SearchPipelineBuilder(pipeline)

        # define search output
        stages: dict = {}

        stages.update({'metadata': [SearchPipelineBuilder.count_('total')]})
        stages.update({
            'data': [
                SearchPipelineBuilder.skip_(skip),
                SearchPipelineBuilder.limit_(limit)
            ]
        })

        group_stage: dict = {
            'group': [
                SearchPipelineBuilder.lookup_(TypeModel.COLLECTION, 'type_id',
                                              'public_id', 'lookup_data'),
                SearchPipelineBuilder.unwind_('$lookup_data'),
                SearchPipelineBuilder.project_({
                    '_id': 0,
                    'type_id': 1,
                    'label': '$lookup_data.label'
                }),
                SearchPipelineBuilder.group_('$$ROOT.type_id', {
                    'types': {
                        '$first': '$$ROOT'
                    },
                    'total': {
                        '$sum': 1
                    }
                }),
                SearchPipelineBuilder.project_({
                    '_id': 0,
                    'searchText': '$types.label',
                    'searchForm': 'type',
                    'searchLabel': '$types.label',
                    'settings': {
                        'types': ['$types.type_id']
                    },
                    'total': 1
                }),
                SearchPipelineBuilder.sort_('total', -1)
            ]
        }
        stages.update(group_stage)
        plb.add_pipe(SearchPipelineBuilder.facet_(stages))
        raw_search_result = self.manager.aggregate(
            collection=CmdbObject.COLLECTION, pipeline=plb.pipeline)
        raw_search_result_list = list(raw_search_result)

        try:
            matches_regex = plb.get_regex_pipes_values()
        except Exception as err:
            LOGGER.error(f'Extract regex pipes: {err}')
            matches_regex = []

        if len(raw_search_result_list[0]['data']) > 0:
            raw_search_result_list_entry = raw_search_result_list[0]
            # parse result list
            pre_rendered_result_list = [
                CmdbObject(**raw_result)
                for raw_result in raw_search_result_list_entry['data']
            ]
            rendered_result_list = RenderList(
                pre_rendered_result_list,
                request_user,
                database_manager=self.manager.dbm,
                object_manager=self.manager).render_result_list()

            total_results = raw_search_result_list_entry['metadata'][0].get(
                'total', 0)
            group_result_list = raw_search_result_list[0]['group']

        else:
            rendered_result_list = []
            group_result_list = []
            total_results = 0
        # generate output
        search_result = SearchResult[RenderResult](
            results=rendered_result_list,
            total_results=total_results,
            groups=group_result_list,
            alive=raw_search_result.alive,
            matches_regex=matches_regex,
            limit=limit,
            skip=skip)
        return search_result
Example #17
0
    def aggregate(self,
                  pipeline: Pipeline,
                  request_user: User = None,
                  limit: int = Search.DEFAULT_LIMIT,
                  skip: int = Search.DEFAULT_SKIP,
                  **kwargs) -> SearchResult[RenderResult]:
        """
        Use mongodb aggregation system with pipeline queries
        Args:
            pipeline (Pipeline): list of requirement pipes
            request_user (User): user who started this search
            matches_regex (List): list of regex match values
            limit (int): max number of documents to return
            skip (int): number of documents to be skipped
            **kwargs:
        Returns:
            SearchResult with generic list of RenderResults
        """
        # Insert skip and limit
        plb = PipelineBuilder(pipeline)

        # define search output
        stages: dict = {
            'metadata': [PipelineBuilder.count_('total')],
            'data':
            [PipelineBuilder.skip_(skip),
             PipelineBuilder.limit_(limit)]
        }
        plb.add_pipe(PipelineBuilder.facet_(stages))

        raw_search_result = self.manager.aggregate(
            collection=CmdbObject.COLLECTION, pipeline=plb.pipeline)
        raw_search_result_list = list(raw_search_result)
        try:
            matches_regex = plb.get_regex_pipes_values()
        except Exception as err:
            LOGGER.error(f'Extract regex pipes: {err}')
            matches_regex = []

        if len(raw_search_result_list[0]['data']) > 0:
            raw_search_result_list_entry = raw_search_result_list[0]
            # parse result list
            pre_rendered_result_list = [
                CmdbObject(**raw_result)
                for raw_result in raw_search_result_list_entry['data']
            ]
            rendered_result_list = RenderList(
                pre_rendered_result_list,
                request_user,
                object_manager=self.manager).render_result_list()
            total_results = raw_search_result_list_entry['metadata'][0].get(
                'total', 0)
        else:
            rendered_result_list = []
            total_results = 0
        # generate output
        search_result = SearchResult[RenderResult](
            results=rendered_result_list,
            total_results=total_results,
            alive=raw_search_result.alive,
            matches_regex=matches_regex,
            limit=limit,
            skip=skip)
        return search_result
Example #18
0
 def gen_router_object():
     try:
         location = random.choice(location_list).get_public_id()
     except Exception:
         location = None
     return CmdbObject(
         **{
             "public_id":
             public_id_counter,
             "author_id":
             random.choice(user_list).get_public_id(),
             "type_id":
             3,
             "views":
             self._faker.random_number(4),
             "version":
             "1.0.0",
             "last_edit_time":
             self._faker.date_time_between(start_date="-30d"),
             "active":
             self._faker.boolean(chance_of_getting_true=80),
             "creation_time":
             self._faker.date_time_between(start_date="-30d"),
             "status":
             True,
             "fields": [
                 {
                     "name": "management_ip",
                     "value": self._faker.ipv4()
                 },
                 {
                     "name": "hostname",
                     "value": self._faker.hostname()
                 },
                 {
                     "name": "monitoring",
                     "value": self._faker.boolean()
                 },
                 {
                     "name": "os",
                     "value": self._faker.word()
                 },
                 {
                     "name": "username",
                     "value": self._faker.user_name()
                 },
                 {
                     "name":
                     "password",
                     "value":
                     self._security_manager.encrypt_aes(
                         self._faker.password())
                 },
                 {
                     "name": "address",
                     "value": location
                 },
                 {
                     "name": "building",
                     "value": self._faker.word()
                 },
                 {
                     "name": "room",
                     "value": self._faker.word()
                 },
                 {
                     "name": "rack",
                     "value": self._faker.word()
                 },
                 {
                     "name":
                     "manufacturer",
                     "value":
                     self._faker.first_name() +
                     self._faker.company_suffix()
                 },
                 {
                     "name":
                     "supplier",
                     "value":
                     self._faker.last_name() +
                     self._faker.company_suffix()
                 },
                 {
                     "name": "model",
                     "value": self._faker.word()
                 },
                 {
                     "name": "serial_number",
                     "value": self._faker.random_number()
                 },
                 {
                     "name": "software_version",
                     "value": self._faker.random_number()
                 },
             ]
         })
Example #19
0
 def gen_leased_line_object():
     try:
         location_id_a = random.choice(location_list).get_public_id()
         location_id_b = random.choice(location_list).get_public_id()
     except Exception:
         location_id_a = None
         location_id_b = None
     return CmdbObject(
         **{
             "public_id":
             public_id_counter,
             "author_id":
             random.choice(user_list).get_public_id(),
             "type_id":
             1,
             "views":
             self._faker.random_number(4),
             "version":
             "1.0.0",
             "last_edit_time":
             self._faker.date_time_between(start_date="-30d"),
             "active":
             self._faker.boolean(chance_of_getting_true=80),
             "creation_time":
             self._faker.date_time_between(start_date="-30d"),
             "status":
             True,
             "fields": [{
                 "name": "state",
                 "value": self._faker.boolean()
             }, {
                 "name":
                 "product_name",
                 "value":
                 self._faker.last_name() + " " +
                 self._faker.company_suffix()
             }, {
                 "name":
                 "transfer_rate",
                 "value":
                 str(self._faker.random_int(max=1000)) + " Mbit/s"
             }, {
                 "name":
                 "company_name_a",
                 "value":
                 self._faker.first_name() + " " +
                 self._faker.company_suffix()
             }, {
                 "name": "street_a",
                 "value": self._faker.street_name()
             }, {
                 "name": "zip_a",
                 "value": self._faker.zipcode()
             }, {
                 "name": "city_a",
                 "value": self._faker.city()
             }, {
                 "name": "location_details_a",
                 "value": location_id_a
             }, {
                 "name":
                 "company_name_b",
                 "value":
                 self._faker.first_name() + " " +
                 self._faker.company_suffix()
             }, {
                 "name": "street_b",
                 "value": self._faker.street_name()
             }, {
                 "name": "zip_b",
                 "value": self._faker.zipcode()
             }, {
                 "name": "city_b",
                 "value": self._faker.city()
             }, {
                 "name": "location_details_b",
                 "value": location_id_b
             }]
         })