Beispiel #1
0
    async def _retrieve_omf_types_already_created(self, configuration_key,
                                                  type_id):
        """ Retrieves the list of OMF types already defined/sent to the PICROMF
         Args:
             configuration_key - part of the key to identify the type
             type_id           - part of the key to identify the type
         Returns:
            List of Asset code already defined into the PI Server
         Raises:
         """
        payload = payload_builder.PayloadBuilder() \
            .WHERE(['configuration_key', '=', configuration_key]) \
            .AND_WHERE(['type_id', '=', type_id]) \
            .payload()

        omf_created_objects = await self._sending_process_instance._storage_async.query_tbl_with_payload(
            'omf_created_objects', payload)
        self._logger.debug("{func} - omf_created_objects {item} ".format(
            func="_retrieve_omf_types_already_created",
            item=omf_created_objects))
        # Extracts only the asset_code column
        rows = []
        for row in omf_created_objects['rows']:
            rows.append(row['asset_code'])

        return rows
Beispiel #2
0
    async def get_all_backups(
            self,
            limit: int,
            skip: int,
            status: [lib.BackupStatus, None],
            sort_order: lib.SortOrder = lib.SortOrder.DESC) -> list:
        """ Returns a list of backups is returned sorted in chronological order with the most recent backup first.

        Args:
            limit: int - limit the number of backups returned to the number given
            skip: int - skip the number of backups specified before returning backups-
                  this, in conjunction with the limit option, allows for a paged interface to be built
            status: lib.BackupStatus - limit the returned backups to those only with the specified status,
                    None = retrieves information for all the backups
            sort_order: lib.SortOrder - Defines the order used to present information, DESC by default

        Returns:
            backups_information: all the information available related to the requested backups

        Raises:
        """
        payload = payload_builder.PayloadBuilder().SELECT("id", "status", "ts", "file_name", "type") \
            .ALIAS("return", ("ts", 'ts')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS"))
        if status:
            payload.WHERE(['status', '=', status])

        backups_from_storage = await self._storage.query_tbl_with_payload(
            self._backup_lib.STORAGE_TABLE_BACKUPS, payload.payload())
        backups_information = backups_from_storage['rows']

        return backups_information
Beispiel #3
0
 async def add_statistics(key, description):
     payload = payload_builder.PayloadBuilder() \
         .INSERT(key=key, description=description) \
         .payload()
     await self._storage_async.insert_into_tbl("statistics", payload)
     rows = await get_rows(key=key)
     return rows[0]['key']
Beispiel #4
0
 async def get_rows_from_stream_id(stream_id):
     payload = payload_builder.PayloadBuilder() \
         .SELECT("id", "description", "active") \
         .WHERE(['id', '=', stream_id]) \
         .payload()
     streams = await self._storage_async.query_tbl_with_payload("streams", payload)
     return streams['rows']
Beispiel #5
0
    def sl_get_backup_details(self, backup_id: int) -> dict:
        """ Returns the details of a backup

        Args:
            backup_id: int - the id of the backup to return

        Returns:
            backup_information: all the information available related to the requested backup_id

        Raises:
            exceptions.DoesNotExist
            exceptions.NotUniqueBackup
        """
        payload = payload_builder.PayloadBuilder().SELECT("id", "status", "ts", "file_name", "type")\
            .ALIAS("return", ("ts", 'ts')).FORMAT("return", ("ts", "YYYY-MM-DD HH24:MI:SS.MS"))\
            .WHERE(['id', '=', backup_id]).payload()

        backup_from_storage = asyncio.get_event_loop().run_until_complete(
            self._storage.query_tbl_with_payload(self.STORAGE_TABLE_BACKUPS,
                                                 payload))

        if backup_from_storage['count'] == 0:
            raise exceptions.DoesNotExist

        elif backup_from_storage['count'] == 1:

            backup_information = backup_from_storage['rows'][0]
        else:
            raise exceptions.NotUniqueBackup

        return backup_information
Beispiel #6
0
    def sl_get_backup_details_from_file_name(self, _file_name):
        """ Retrieves backup information from file name

        Args:
            _file_name: file name to search in the Storage layer

        Returns:
            backup_information: Backup information related to the file name

        Raises:
            exceptions.DoesNotExist
            exceptions.NotUniqueBackup
        """

        payload = payload_builder.PayloadBuilder() \
            .WHERE(['file_name', '=', _file_name]) \
            .payload()

        backups_from_storage = asyncio.get_event_loop().run_until_complete(
            self._storage.query_tbl_with_payload(self.STORAGE_TABLE_BACKUPS,
                                                 payload))

        if backups_from_storage['count'] == 1:

            backup_information = backups_from_storage['rows'][0]

        elif backups_from_storage['count'] == 0:
            raise exceptions.DoesNotExist

        else:
            raise exceptions.NotUniqueBackup

        return backup_information
Beispiel #7
0
 async def get_rows(key):
     payload = payload_builder.PayloadBuilder() \
         .SELECT("key", "description") \
         .WHERE(['key', '=', key]) \
         .LIMIT(1) \
         .payload()
     statistics = await self._storage_async.query_tbl_with_payload("statistics", payload)
     return statistics['rows']
Beispiel #8
0
 async def add_table_statistics_history(self, pyz, file_spec):
     # The contents of the statistics history from the storage layer
     temp_file = self._interim_file_path + "/" + "statistics-history-{}".format(file_spec)
     payload = payload_builder.PayloadBuilder() \
         .LIMIT(1000) \
         .ORDER_BY(['history_ts', 'DESC']) \
         .payload()
     data = await self._storage.query_tbl_with_payload("statistics_history", payload)
     self.write_to_tar(pyz, temp_file, data)
Beispiel #9
0
 async def add_stream(config_stream_id, description):
     if config_stream_id:
         payload = payload_builder.PayloadBuilder() \
             .INSERT(id=config_stream_id,
                     description=description) \
             .payload()
         await self._storage_async.insert_into_tbl("streams", payload)
         rows = await get_rows_from_stream_id(stream_id=config_stream_id)
     else:
         # If an user is upgrading Fledge, then it has got existing data in streams table but
         # no entry in configuration for streams_id for this schedule name. Hence it must
         # check if an entry is already there for this schedule name in streams table.
         rows = await get_rows_from_name(description=self._name)
         if len(rows) == 0:
             payload = payload_builder.PayloadBuilder() \
                 .INSERT(description=description) \
                 .payload()
             await self._storage_async.insert_into_tbl("streams", payload)
             rows = await get_rows_from_name(description=self._name)
     return rows[0]['id'], rows[0]['active']
Beispiel #10
0
 async def _last_object_id_update(self, new_last_object_id):
     """ Updates reached position"""
     try:
         payload = payload_builder.PayloadBuilder() \
             .SET(last_object=new_last_object_id, ts='now()') \
             .WHERE(['id', '=', self._stream_id]) \
             .payload()
         await self._storage_async.update_tbl("streams", payload)
     except Exception as _ex:
         SendingProcess._logger.error(_MESSAGES_LIST["e000020"].format(_ex))
         raise
Beispiel #11
0
 async def add_table_plugin_data(self, pyz, file_spec):
     # The contents of the plugin_data from the storage layer
     temp_file = self._interim_file_path + "/" + "plugin-data-{}".format(
         file_spec)
     payload = payload_builder.PayloadBuilder() \
         .LIMIT(1000) \
         .ORDER_BY(['key', 'ASC']) \
         .payload()
     data = await self._storage.query_tbl_with_payload(
         "plugin_data", payload)
     self.write_to_tar(pyz, temp_file, data)
Beispiel #12
0
    async def _delete_backup_information(self, _id):
        """ Deletes backup information from the Storage layer

        Args:
            _id: Backup id to delete
        Returns:
        Raises:
        """
        payload = payload_builder.PayloadBuilder() \
            .WHERE(['id', '=', _id]) \
            .payload()
        await self._storage.delete_from_tbl(
            self._backup_lib.STORAGE_TABLE_BACKUPS, payload)
Beispiel #13
0
    async def deleted_omf_types_already_created(self, config_category_name,
                                                type_id):
        """ Deletes OMF types/objects tracked as already created, it is used to force the recreation of the types
         Args:
            config_category_name: used to identify OMF objects already created
            type_id:              used to identify OMF objects already created
         Returns:
         Raises:
         """
        payload = payload_builder.PayloadBuilder() \
            .WHERE(['configuration_key', '=', config_category_name]) \
            .AND_WHERE(['type_id', '=', type_id]) \
            .payload()

        await self._sending_process_instance._storage_async.delete_from_tbl(
            "omf_created_objects", payload)
Beispiel #14
0
 async def _load_data_into_memory_statistics(self, last_object_id):
     """ Extracts statistics data from the DB Layer, converts it into the proper format"""
     raw_data = None
     try:
         payload = payload_builder.PayloadBuilder() \
             .SELECT("id", "key", '{"column": "ts", "timezone": "UTC"}', "value", "history_ts") \
             .WHERE(['id', '>', last_object_id]) \
             .LIMIT(self._config['blockSize']) \
             .ORDER_BY(['id', 'ASC']) \
             .payload()
         statistics_history = await self._storage_async.query_tbl_with_payload('statistics_history', payload)
         raw_data = statistics_history['rows']
         converted_data = self._transform_in_memory_data_statistics(raw_data)
     except Exception:
         SendingProcess._logger.error(_MESSAGES_LIST["e000009"])
         raise
     return converted_data
Beispiel #15
0
 async def _flag_created_omf_type(self, configuration_key, type_id,
                                  asset_code):
     """ Stores into the Storage layer the successfully creation of the type into PICROMF.
      Args:
          configuration_key - part of the key to identify the type
          type_id           - part of the key to identify the type
          asset_code        - asset code defined into PICROMF
      Returns:
      Raises:
      """
     payload = payload_builder.PayloadBuilder()\
         .INSERT(configuration_key=configuration_key,
                 asset_code=asset_code,
                 type_id=type_id)\
         .payload()
     await self._sending_process_instance._storage_async.insert_into_tbl(
         "omf_created_objects", payload)
Beispiel #16
0
    def sl_backup_status_update(self, _id, _status, _exit_code):
        """ Updates the status of the backup using the Storage layer

        Args:
            _id: Backup's Id to update
            _status: status of the backup {BackupStatus.SUCCESSFUL|BackupStatus.RESTORED}
            _exit_code: exit status of the backup/restore execution
        Returns:
        Raises:
        """

        _logger.debug("{func} - id |{file}| ".format(
            func="sl_backup_status_update", file=_id))

        payload = payload_builder.PayloadBuilder() \
            .SET(status=_status,
                 ts="now()",
                 exit_code=_exit_code) \
            .WHERE(['id', '=', _id]) \
            .payload()

        asyncio.get_event_loop().run_until_complete(
            self._storage.update_tbl(self.STORAGE_TABLE_BACKUPS, payload))
Beispiel #17
0
    def sl_backup_status_create(self, _file_name, _type, _status):
        """ Logs the creation of the backup in the Storage layer

        Args:
            _file_name: file_name used for the backup as a full path
            _type: backup type {BackupType.FULL|BackupType.INCREMENTAL}
            _status: backup status, usually BackupStatus.RUNNING
        Returns:
        Raises:
        """

        _logger.debug("{func} - file name |{file}| ".format(
            func="sl_backup_status_create", file=_file_name))

        payload = payload_builder.PayloadBuilder() \
            .INSERT(file_name=_file_name,
                    ts="now()",
                    type=_type,
                    status=_status,
                    exit_code=0) \
            .payload()

        asyncio.get_event_loop().run_until_complete(
            self._storage.insert_into_tbl(self.STORAGE_TABLE_BACKUPS, payload))