def generate_object(self, entry: dict, *args, **kwargs) -> dict: """create the native cmdb object from parsed content""" possible_fields: List[dict] = kwargs['fields'] mapping: dict = self.config.get_mapping() working_object: dict = { 'type_id': self.config.get_type_id(), 'fields': [], 'author_id': self.request_user.get_public_id(), 'version': '1.0.0', 'creation_time': datetime.datetime.utcnow() } map_properties = mapping.get('properties') for prop in map_properties: working_object = self._map_element(prop, entry, working_object) for entry_field in entry.get('fields'): field_exists = next((item for item in possible_fields if item["name"] == entry_field['name']), None) if field_exists: entry_field['value'] = ImproveObject.improve_boolean( entry_field['value']) entry_field['value'] = ImproveObject.improve_date( entry_field['value']) working_object.get('fields').append(entry_field) return working_object
def generate_object(self, entry: dict, *args, **kwargs) -> dict: try: possible_fields: List[dict] = kwargs['fields'] except (KeyError, IndexError, ValueError) as err: raise ImportRuntimeError(CsvObjectImporter, f'[CSV] cant import objects: {err}') working_object: dict = { 'active': True, 'type_id': self.get_config().get_type_id(), 'fields': [], 'author_id': self.request_user.get_public_id(), 'version': '1.0.0', 'creation_time': datetime.datetime.utcnow() } current_mapping = self.get_config().get_mapping() property_entries: List[ MapEntry] = current_mapping.get_entries_with_option( query={'type': 'property'}) field_entries: List[ MapEntry] = current_mapping.get_entries_with_option( query={'type': 'field'}) # Insert properties for property_entry in property_entries: working_object.update({ property_entry.get_name(): entry.get(property_entry.get_value()) }) # Improve insert object improve_object = ImproveObject(entry, field_entries, possible_fields) entry = improve_object.improve_entry() # Validate insert fields for field_entry in field_entries: if field_entry.get_name() not in possible_fields: continue working_object['fields'].append({ 'name': field_entry.get_name(), 'value': entry.get(field_entry.get_value()) }) return working_object
def generate_object(self, entry: dict, *args, **kwargs) -> dict: try: possible_fields: List[dict] = kwargs['fields'] except (KeyError, IndexError, ValueError) as err: raise ImportRuntimeError(CsvObjectImporter, f'[CSV] cant import objects: {err}') working_object: dict = { 'active': True, 'type_id': self.get_config().get_type_id(), 'fields': [], 'author_id': self.request_user.get_public_id(), 'version': '1.0.0', 'creation_time': datetime.datetime.utcnow() } current_mapping = self.get_config().get_mapping() property_entries: List[ MapEntry] = current_mapping.get_entries_with_option( query={'type': 'property'}) field_entries: List[ MapEntry] = current_mapping.get_entries_with_option( query={'type': 'field'}) foreign_entries: List[ MapEntry] = current_mapping.get_entries_with_option( query={'type': 'ref'}) # field/properties improvement improve_object = ImproveObject(entry, property_entries, field_entries, possible_fields) entry = improve_object.improve_entry() # Insert properties for property_entry in property_entries: working_object.update({ property_entry.get_name(): entry.get(property_entry.get_value()) }) # Validate insert fields for entry_field in field_entries: field_exists = next((item for item in possible_fields if item["name"] == entry_field.get_name()), None) if field_exists: working_object['fields'].append({ 'name': entry_field.get_name(), 'value': entry.get(entry_field.get_value()) }) for foreign_entry in foreign_entries: LOGGER.debug( f'[CSV] search for object based on {foreign_entry.__dict__}') try: working_type_id = foreign_entry.get_option()['type_id'] except (KeyError, IndexError) as err: continue try: query: dict = { 'type_id': working_type_id, 'fields': { '$elemMatch': { '$and': [ { 'name': foreign_entry.get_option()['ref_name'] }, { 'value': entry.get(foreign_entry.get_value()) }, ] } } } LOGGER.debug(f'[CSV] Ref query: {query}') founded_objects: List[ CmdbObject] = self.object_manager.get_objects_by(**query) LOGGER.debug(founded_objects) if len(founded_objects) != 1: continue else: working_object['fields'].append({ 'name': foreign_entry.get_name(), 'value': founded_objects[0].get_public_id() }) except (ObjectManagerGetError, Exception) as err: LOGGER.error( f'[CSV] Error while loading ref object {err.message}') continue return working_object