示例#1
0
def link_medienart(easydb_context, data, search_result, name):
	# get the medienart id from the search result and set it in the object
	result_objects = get_json_value(search_result, "objects")
	if isinstance(result_objects, list) and len(result_objects) > 0:

		# there should only be on hit, but to be sure iterate through the list of result objects and find the one with the correct name
		for k in range(len(result_objects)):

			# check if the name is correct and there is a valid id
			medienart_name = get_json_value(result_objects[k], "medienart.name")
			print medienart_name, type(medienart_name)
			if isinstance(medienart_name, unicode) and medienart_name == unicode(name):
				medienart_id = get_json_value(result_objects[k], "medienart._id")
				print medienart_id, type(medienart_id)
				if isinstance(medienart_id, int):

					# the medienart id is valid, add a linked object to the data
					data["medium"]["medienart"] = {
							"medienart": {
								"_id": medienart_id
							},
							"_objecttype": "medienart",
							"_mask": "_all_fields"
						}

					return data
	return data
示例#2
0
def link_medienart(easydb_context, logger, data, search_result, name):
    # get the medienart id from the search result and set it in the object
    result_objects = get_json_value(search_result, 'objects')
    if isinstance(result_objects, list) and len(result_objects) > 0:

        # there should only be on hit, but to be sure iterate through the list of result objects and find the one with the correct name
        for k in range(len(result_objects)):

            # check if the name is correct and there is a valid id
            medienart_name = get_json_value(result_objects[k],
                                            'medienart.name')
            if isinstance(medienart_name,
                          unicode) and medienart_name == unicode(name):
                medienart_id = get_json_value(result_objects[k],
                                              'medienart._id')
                if isinstance(medienart_id, int):

                    # the medienart id is valid, add a linked object to the data
                    data['medium']['medienart'] = {
                        'medienart': {
                            '_id': medienart_id
                        },
                        '_objecttype': 'medienart',
                        '_mask': '_all_fields'
                    }
                    logger.debug('link object %d with medienart id %d' %
                                 (k, medienart_id))

                    return data
    return data
示例#3
0
def perform_search_easydb(easydb_context, query, logger = None):
	# get the user id from the current session
	user_id = None
	try:
		session = easydb_context.get_session()
		user_id = get_json_value(session, "user.user._id")
		if not isinstance(user_id, int):
			logger.error("Could not get user id from session")
			return None
	except Exception as e:
		logger.error("Could not get user id from session: %s" % e)
		return None
	search_result = easydb_context.search("user", user_id, query)
	if logger is not None:
		logger.debug("Search Result: %s" % json.dumps(search_result, indent = 4))

	return search_result
示例#4
0
def export_as_yml(easydb_context, parameters):
	logger = easydb_context.get_logger('example-plugin.export_as_yml')

	# get the exporter definition
	exporter = easydb_context.get_exporter()

	# check if the export definition fits to this plugin
	produce_options = get_json_value(exporter.getExport(), "export.produce_options", False)
	if str(get_json_value(produce_options, "plugin", False)) != "example_export":
		return

	# check if the produce option for exporting the YML with or without tags is a boolean, else set it to false
	with_tags = get_json_value(produce_options, "with_tags", False)
	if not isinstance(with_tags, bool):
		with_tags = False

	# load exported files (need to be exported as JSON files)
	export_dir = exporter.getFilesPath()
	logger.debug("Export Dir: %s" % export_dir)

	files = exporter.getFiles()
	if not isinstance(files, list) or len(files) < 1:
		logger.warn("No valid file list!")
		return

	# iterate over the definitions of the exported files and parse the json content
	for f in files:
		file_path = str(get_json_value(f, "path", False))
		if file_path.lower().endswith(".json"):
			# absolute path to the original file
			file_path = os.path.abspath(export_dir + "/" + file_path)

			# path of the new file
			file_name = str(f["path"].split(".")[0] + ".yml")

			logger.debug("Converting JSON file %s to YML" % file_path)

			try:
				# load and parse the json file
				file = open(file_path, "r")
				content = json.loads(file.read().decode('utf-8'))
				file.close()

				# convert the objects that are defined in a json array to YML and save it in a file next to the original file
				objects = get_json_value(content, "objects", False)
				if isinstance(objects, list) and len(objects) > 0:

					# save the file in the temporary folder and add it later to the exported files
					tmp_filename = os.path.abspath("%s/../tmp/objects.yml" % export_dir)

					with open(tmp_filename, "w") as yml_file:
						# define the final dict that will be converted to YML
						object_output = {
							"objects": objects
						}

						# depending on the produce options, export the YML with or without tags
						if with_tags:
							yaml.dump(object_output, yml_file, default_flow_style = False)
						else:
							yaml.safe_dump(object_output, yml_file, default_flow_style = False)

						yml_file.close()

						logger.debug("Saved objects as %s" % tmp_filename)

						# add the new YML file to the export so it can be opened or downloaded from the frontend
						exporter.addFile(tmp_filename, file_name)

						# remove the old JSON file
						exporter.removeFile(f["path"])

				else:
					logger.debug("Found no 'objects' array")
			except Exception as e:
				logger.warn("Could not convert JSON to YML: %s" % str(e))
示例#5
0
def pre_update(easydb_context, easydb_info):
	# get a logger
	logger = easydb_context.get_logger('example-plugin.pre_update')
	logger.info("pre_update was called")

	# get the object data
	data = get_json_value(easydb_info, "data")
	logger.debug("%d Objects" % len(data))

	# check the data, and if there is invalid data, throw an InvalidValueError
	for i in range(len(data)):

		# check if the objecttype is correct
		if data[i]["_objecttype"] != "medium":
			logger.debug("Ignoring object type %s" % data[i]["_objecttype"])
			continue

		# depending on the mask, check if mandatory fields are set and set the linked object medienart
		if data[i]["_mask"] == "medium_cd":
			logger.debug("Checking mandatory fields for 'CD'")
			spieldauer_min = get_json_value(data[i], "medium.spieldauer_min")

			# check if the fields are valid
			if spieldauer_min is None or not isinstance(spieldauer_min, int) or spieldauer_min <= 0:
				raise InvalidValueError("spieldauer_min", str(spieldauer_min), "integer > 0")

			# format the time to hh:mm:ss. the decimal number is defined as an integer, so divide the value by 100 to get seconds
			hours, remainder = divmod(int(float(spieldauer_min) / 100.0), 3600)
			minutes, seconds = divmod(remainder, 60)
			data[i]["medium"]["spieldauer"] = "%02d:%02d:%02d" % (hours, minutes, seconds)

			# set the linked object medienart with the value 'CD'
			# perform an elasticsearch request to get the id of the medienart object
			search_result = search_for_medienart(easydb_context, "CD")
			data[i] = link_medienart(easydb_context, data[i], search_result, "CD")

		elif data[i]["_mask"] == "medium_buch":
			logger.debug("Checking mandatory fields for 'Buch'")
			seitenzahl = get_json_value(data[i], "medium.seitenzahl")

			# check if the fields are valid
			if seitenzahl is None or not isinstance(seitenzahl, int) or seitenzahl <= 0:
				raise InvalidValueError("seitenzahl", str(seitenzahl), "integer > 0")

			# set the linked object medienart with the value 'Buch'
			# perform an elasticsearch request to get the id of the medienart object
			search_result = search_for_medienart(easydb_context, "Buch")
			data[i] = link_medienart(easydb_context, data[i], search_result, "Buch")

		# to avoid confusion with masks and read/write settings in masks, always use the _all_fields mask
		data[i]["_mask"] = "_all_fields"

		# generate a unique id for this object, if there is none (when the object was just created)
		if get_json_value(data[i], "medium.identifier") is None:
			new_id = str(generate_unique_id(easydb_context))
			logger.debug("Generating new ID for Object %d: %s" % (i, new_id))
			data[i]["medium"]["identifier"] = new_id


	# always return if no exception was thrown, so the server and frontend are not blocked
	print json.dumps(data, indent=4)
	return data
示例#6
0
    def update(self, easydb_context, easydb_info):

        self.logger = easydb_context.get_logger(
            'base.custom_data_type_gazetteer')

        # get the object data
        data = get_json_value(easydb_info, 'data')
        if len(data) < 1:
            return []

        self.db_cursor = easydb_context.get_db_cursor()

        if not get_bool_from_baseconfig(self.db_cursor, 'system',
                                        'gazetteer_plugin_settings',
                                        'enabled'):
            self.logger.debug('automatic update not enabled')
            return data

        self.objecttype = get_string_from_baseconfig(
            self.db_cursor, 'system', 'gazetteer_plugin_settings',
            'objecttype')
        if self.objecttype is None:
            self.logger.debug(
                'automatic update enabled, but no objecttype selected')
            return data

        self.logger.debug('objecttype: %s' % self.objecttype)

        _dm = easydb_context.get_datamodel(show_easy_pool_link=True,
                                           show_is_hierarchical=True)
        self.objecttype_id = None
        self.easy_pool_link = None

        if not 'user' in _dm or not 'tables' in _dm['user']:
            self.logger.warn('invalid datamodel: user.tables not found')
            return data

        for _ot in _dm['user']['tables']:
            if 'name' in _ot and _ot['name'] == self.objecttype:
                if not ('is_hierarchical' in _ot
                        and _ot['is_hierarchical'] == True):
                    raise GazetteerError('objecttype.not_hierarchical',
                                         'objecttype: "%s"' % self.objecttype)

                self.easy_pool_link = 'easy_pool_link' in _ot and _ot[
                    'easy_pool_link'] == True
                try:
                    if 'table_id' in _ot:
                        self.objecttype_id = int(_ot['table_id'])
                        self.logger.debug('objecttype id: %s' %
                                          self.objecttype_id)
                except:
                    pass
                break

        if self.objecttype_id is None:
            self.logger.warn('objecttype %s not found in datamodel' %
                             self.objecttype)
            return data

        self.field_to = get_string_from_baseconfig(
            self.db_cursor, 'system', 'gazetteer_plugin_settings', 'field_to')
        if self.field_to is None:
            raise GazetteerError('field_to.not_set')

        if self.field_to.startswith(self.objecttype + "."):
            self.field_to = self.field_to[len(self.objecttype) + 1:]
        self.logger.debug('field_to: %s' % self.field_to)

        self.field_from = get_string_from_baseconfig(
            self.db_cursor, 'system', 'gazetteer_plugin_settings',
            'field_from')
        if self.field_from is not None:
            if self.field_from.startswith(self.objecttype + "."):
                self.field_from = self.field_from[len(self.objecttype) + 1:]
            self.logger.debug('field_from: %s' % self.field_from)
        else:
            self.logger.debug('field_from not set, will use field_to %s' %
                              self.field_to)

        self.script = "%s/../../build/scripts/gazetteer-update.js" % os.path.abspath(
            os.path.dirname(__file__))
        self.config = easydb_context.get_config()

        on_update = get_bool_from_baseconfig(self.db_cursor, 'system',
                                             'gazetteer_plugin_settings',
                                             'on_update')

        for i in range(len(data)):

            if not '_objecttype' in data[i]:
                self.logger.debug(
                    'could not find _objecttype in data[%s] -> skip' % i)
                continue

            if data[i]['_objecttype'] != self.objecttype:
                self.logger.debug('data[%s]["_objecttype"] != %s -> skip' %
                                  (i, self.objecttype))
                continue

            if not self.objecttype in data[i]:
                self.logger.debug('data[%s][%s] not found -> skip' %
                                  (i, self.objecttype))
                continue

            if not on_update:
                if not '_version' in data[i][self.objecttype]:
                    self.logger.debug(
                        'on_update is enabled, but could not find _version in data[%s] -> skip'
                        % i)
                    continue

                if data[i][self.objecttype]['_version'] != 1:
                    self.logger.debug(
                        'on_update is enabled, but _version of data[%s] = %s -> no insert -> skip'
                        % (i, data[i][self.objecttype]['_version']))
                    continue

            _pool_id = None
            if self.easy_pool_link:
                _pool_id = get_json_value(
                    data[i], '%s._pool.pool._id' % self.objecttype)
                if _pool_id is None:
                    self.logger.debug(
                        'could not find _pool.pool._id in data[%s] -> skip' %
                        i)
                    continue
                self.logger.debug('pool id: %s' % _pool_id)

            _gazetteer_id = get_json_value(
                data[i], '%s.%s' %
                (self.objecttype,
                 self.field_from)) if self.field_from is not None else None
            self.logger.debug(
                'data.%s.%s.%s: \'%s\'' %
                (i, self.objecttype, self.field_from, str(_gazetteer_id)))
            if _gazetteer_id is None:
                _gazetteer_id = get_json_value(
                    data[i], '%s.%s.gazId' % (self.objecttype, self.field_to))
                self.logger.debug(
                    'data.%s.%s.%s.gazId: \'%s\'' %
                    (i, self.objecttype, self.field_to, str(_gazetteer_id)))
                if _gazetteer_id is None:
                    self.logger.debug(
                        'data.%s.%s.[%s / %s.gazId] not found or null -> skip'
                        % (i, self.objecttype, self.field_from, self.field_to))
                    continue

            _response = self.load_gazetteer(easydb_context, _gazetteer_id)
            if _response is None:
                self.logger.warn(
                    'did not get a response from server for gazetteer id \'%s\''
                    % _gazetteer_id)
                return data

            _objects = []
            if 'gazId' in _response:
                _objects = [{'id': 1, 'gazId': str(_response['gazId'])}]
            else:
                self.logger.warn(
                    'could not find \'gazId\' in response for query for gazetteer id %s'
                    % _gazetteer_id)
                return data

            if 'parents' in _response:
                for p in range(len(_response['parents'])):
                    if 'gazId' in _response['parents'][p]:
                        _objects.append({
                            'id':
                            p + 2,
                            'gazId':
                            str(_response['parents'][p]['gazId'])
                        })

            _objects_to_index = set()

            _formatted_data = self.format_custom_data(_objects)
            if len(_formatted_data) < 1:
                self.logger.warn(
                    'did not get any formatted data from node_runner')
                return data

            _parent_id = None
            if len(_formatted_data) > 1:
                self.logger.debug('gazetteer object has %d parents' %
                                  (len(_formatted_data) - 1))
                k = len(_formatted_data) - 1
                while k >= 1:
                    _object_id, _owner_id = self.exists_gazetteer_object(
                        _formatted_data[k])
                    if _owner_id is None:
                        _owner_id = 1  # assume root user
                    if _object_id is None:
                        # object does not exist yet, create new object
                        _object_id = self.create_gazetteer_object(
                            _formatted_data[k], _owner_id, _parent_id,
                            _pool_id)
                        self.logger.debug(
                            'inserted new object %s:%s (parent: %s)' %
                            (self.objecttype, _object_id, _parent_id))
                        _objects_to_index.add(_object_id)
                    _parent_id = _object_id
                    self.logger.debug('parent id: %s' % _parent_id)
                    k -= 1

            easydb_context.update_user_objects(self.objecttype,
                                               list(_objects_to_index))

            data[i][self.objecttype]['_id_parent'] = _parent_id

            data[i]['_mask'] = '_all_fields'
            data[i][self.objecttype][self.field_to] = _formatted_data[0]
            self.logger.debug(
                'data.%s.%s.%s updated with custom data from gazetteer repository'
                % (i, self.objecttype, self.field_to))

        return data
示例#7
0
def check_expiration_date(easydb_context):

    logger = easydb_context.get_logger(
        'pf.plugin.base.example_plugin.check_expiration_date')

    # connect to the database
    connection = easydb_context.db_connect('check_expiration_date')

    # perform a check if the objecttype 'medium' exists

    sql = """
        SELECT EXISTS(
            SELECT * FROM information_schema.tables
            WHERE table_schema = 'public' AND table_name = 'medium'
        ) AS medium_exists;
    """

    # perform the request and save the result
    connection.cursor().execute(sql)
    result = connection.cursor().fetchone()

    if result['medium_exists'] != u't':
        logger.debug('objecttype \'medium\' does not exist, skip')
        return

    # load the configuration
    config = easydb_context.get_config(connection)

    # search all objects of the type 'medium', using a SQL query, where the expiration data is in less then a week
    while True:

        # create and format a date that is 7 days in the future
        days_in_future = 7
        date = datetime.now() + relativedelta(days=days_in_future)
        date_str = date.strftime('%Y-%m-%d')

        # build the Postgres statement
        sql = """
            SELECT m."id:pkey", m.titel, m.identifier, m.ablaufdatum, m.":owner:ez_user:id",
                u.login, u.name, u.firstname, u.displayname, u.frontend_language,
                e.address
            FROM medium m JOIN ez_user u ON m.":owner:ez_user:id" = u."ez_user:id"
            AND ablaufdatum <= '%s'
            JOIN "ez_user:email" e ON e."ez_user:id" = u."ez_user:id"
            AND e.is_primary AND e.send_email AND address IS NOT NULL;
        """

        # perform the request and save the result
        connection.cursor().execute(sql % date_str)
        result = connection.cursor().fetchall()

        mails_to_send = {}

        logger.debug('%s results found' % len(result))
        for row in result:
            try:
                # information about the object
                identifier = row['identifier']
                titel = row['titel']
                ablaufdatum = datetime.strptime(row['ablaufdatum'], '%Y-%m-%d')

                # mail address
                address = row['address'] if len(row['address']) else None

                if address is None:
                    continue

                # user information
                user_displayname = row['displayname']
                if user_displayname is None or len(user_displayname) < 1:
                    user_displayname = ''
                    user_displayname += row['firstname'] if row[
                        'firstname'] is not None else ''
                    user_displayname += ' ' if len(
                        user_displayname) > 0 else ''
                    user_displayname += row['name'] if row[
                        'name'] is not None else ''

                    if len(user_displayname) < 1:
                        user_displayname = row['login'] if row[
                            'login'] is not None else ''

                        if len(user_displayname) < 1:
                            user_displayname = address

                # set the locale according to the user language
                user_lang = row['frontend_language']

                # write the text for the mail in german or english
                # TODO get the l10n translations from the server
                mail_text = None

                if user_lang == 'de-DE':
                    locale.setlocale(locale.LC_ALL, 'de_DE.utf8')
                    mail_text = 'Medium %s (%s) laeuft am %s ab' % (
                        identifier, titel, '%s, den %s' %
                        (calendar.day_name[ablaufdatum.weekday()],
                         ablaufdatum.strftime('%d.%m.%Y')))
                else:
                    locale.setlocale(locale.LC_ALL, 'en_US.utf8')
                    mail_text = 'Medium %s (%s) expires %s' % (
                        identifier, titel, '%s, %s' %
                        (calendar.day_name[ablaufdatum.weekday()],
                         ablaufdatum.strftime('%Y-%m-%d')))

                logger.info(mail_text)

                if not address in mails_to_send:
                    mails_to_send[address] = {
                        'language': user_lang,
                        'name': user_displayname,
                        'mail_text': []
                    }

                mails_to_send[address]['mail_text'].append(mail_text)

            except Exception as e:
                raise e

        for adr in mails_to_send:

            mail = None
            if mails_to_send[adr]['language'] == 'de-DE':
                mail = 'Hallo %s,\n\ndie folgenden Objekte laufen innerhalb der naechsten %d Tage ab:\n\n%s\n\nMit freundlichen Gruessen'
            else:
                mail = 'Hello %s,\n\nthe following objects expire during the next %d days:\n\n%s\n\nRegards'

            logger.debug('Mail to %s:\n%s' %
                         (adr, mail %
                          (mails_to_send[adr]['name'], days_in_future,
                           '\n - '.join(mails_to_send[adr]['mail_text']))))

            # TODO send the mail instead of logging the mail text

            # log a custom event to add the sending of the mail to the event log of the server
            db_name = get_json_value(easydb_context.get_instance(), 'db-name')
            db_conn = easydb_context.db_connect(db_name)
            easydb_context.log_event(
                db_conn, 'EASYDB_EXAMPLE_PLUGIN_EVENT', {
                    'medium_id': identifier,
                    'date': ablaufdatum.strftime('%Y-%m-%d'),
                    'title': titel,
                    'owner_name': mails_to_send[adr]['name']
                })
            db_conn.commit()

        # sleep for one hour
        sleep(60 * 60)
示例#8
0
def pre_update(easydb_context, easydb_info):
    # get a logger
    logger = easydb_context.get_logger(
        'pf.plugin.base.example_plugin.pre_update')
    logger.info('pre_update was called')

    # get the object data
    data = get_json_value(easydb_info, 'data')
    logger.debug('%d Objects' % len(data))

    # check the data, and if there is invalid data, throw an InvalidValueError
    for i in range(len(data)):

        # check if the objecttype is set
        if '_objecttype' not in data[i]:
            continue

        # check if the objecttype is correct
        if data[i]['_objecttype'] != 'medium':
            logger.debug('Ignoring object type %s' % data[i]['_objecttype'])
            continue

        # depending on the mask, check if mandatory fields are set and set the linked object medienart
        if data[i]['_mask'] == 'medium_cd':
            logger.debug('Checking mandatory fields for \'CD\'')
            spieldauer_min = get_json_value(data[i], 'medium.spieldauer_min')

            # check if the fields are valid
            if spieldauer_min is None or not isinstance(
                    spieldauer_min, int) or spieldauer_min <= 0:
                raise InvalidValueError('spieldauer_min', str(spieldauer_min),
                                        'integer > 0')

            # format the time to hh:mm:ss. the decimal number is defined as an integer, so divide the value by 100 to get seconds
            hours, remainder = divmod(int(float(spieldauer_min) / 100.0), 3600)
            minutes, seconds = divmod(remainder, 60)
            data[i]['medium']['spieldauer'] = '%02d:%02d:%02d' % (
                hours, minutes, seconds)

            # set the linked object medienart with the value 'CD'
            # perform an elasticsearch request to get the id of the medienart object
            search_result = search_for_medienart(easydb_context, 'CD')
            data[i] = link_medienart(easydb_context, logger, data[i],
                                     search_result, 'CD')

        elif data[i]['_mask'] == 'medium_buch':
            logger.debug('Checking mandatory fields for \'Buch\'')
            seitenzahl = get_json_value(data[i], 'medium.seitenzahl')

            # check if the fields are valid
            if seitenzahl is None or not isinstance(seitenzahl,
                                                    int) or seitenzahl <= 0:
                raise InvalidValueError('seitenzahl', str(seitenzahl),
                                        'integer > 0')

            # set the linked object medienart with the value 'Buch'
            # perform an elasticsearch request to get the id of the medienart object
            search_result = search_for_medienart(easydb_context, 'Buch')
            data[i] = link_medienart(easydb_context, logger, data[i],
                                     search_result, 'Buch')

        # to avoid confusion with masks and read/write settings in masks, always use the _all_fields mask
        data[i]['_mask'] = '_all_fields'

        # generate a unique id for this object, if there is none (when the object was just created)
        if get_json_value(data[i], 'medium.identifier') is None:
            new_id = str(generate_unique_id(easydb_context))
            logger.debug('Generating new ID for Object %d: %s' % (i, new_id))
            data[i]['medium']['identifier'] = new_id

    # always return if no exception was thrown, so the server and frontend are not blocked
    logger.debug(
        'pre_update function returns following data to the database: %s' %
        json.dumps(data, indent=4))
    return data
示例#9
0
def pre_update(easydb_context, easydb_info):
    inventarnummer = ''
    convertedFormula = ''
    # get a logger
    logger = easydb_context.get_logger('ulb.pre_update')
    logger.info("pre_update called via ulb plugin")

    # get the object data
    data = get_json_value(easydb_info, "data")
    logger.debug("%d Objects" % len(data))

    # check the data, and if there is invalid data, throw an InvalidValueError
    for i in range(len(data)):

        # check if the objecttype is set
        if "_objecttype" not in data[i]:
            continue

        # check if the objecttype is correct
        if data[i]["_objecttype"] != "invrnr":
            url = "http://example.com"  # TODO insert url of invnr service here
            jsonwebtoken = "token"  # TODO insert valid jwt here
            try:
                result = requests.post(
                    url=url,
                    headers={'Authorization': 'Bearer ' + jsonwebtoken},
                    json={
                        "institution": "ABC",
                        "prefix": "cfg"
                    })
                # TODO configure prefix
                json_data = result.json()
                if "invnr" in json_data:
                    inventarnummer = json_data["invnr"]
                else:
                    logger.debug(json.dumps(json_data))
            except requests.exceptions.ConnectionError:
                logger.debug("Exception")
        if data[i]["_objecttype"] != "formula":
            url = "http://example.com"  # TODO insert url of converter service here
            result = requests.post(url=url,
                                   json={
                                       "formula": data[i]["ztest"]["formula"]
                                   })  # TODO configure formula sent
            json_data = result.json()
            if "convertedFormula" in json_data:
                convertedFormula = json_data["convertedFormula"]
            else:
                logger.debug(json.dumps(json_data))
        # to avoid confusion with masks and read/write settings in masks, always use the _all_fields mask
        data[i]["_mask"] = "_all_fields"

        # only write invnr if field is empty
        if get_json_value(data[i], "ztest.invnr") is None:
            try:
                data[i]["ztest"]["invnr"] = inventarnummer
            except:
                logger.debug("Problem generating invnr: " + inventarnummer +
                             " at object " +
                             get_json_value(data[i], "ztest._id"))
        if get_json_value(data[i], "ztest.formula") is None:
            try:
                data[i]["ztest"]["formula"] = convertedFormula
            except:
                logger.debug("Problem saving formula: " + convertedFormula +
                             " at object " +
                             get_json_value(data[i], "ztest._id"))
    # always return if no exception was thrown, so the server and frontend are not blocked
    print(json.dumps(data, indent=4))
    return data