def __init__(self, name: str): super().__init__(name) # self.input_pipe_format.default = MIME('text/plain;charset=utf-8') self.input_pipe_format.set_value(MIME('text/plain;charset=utf-8')) self.output_pipe_format.default = LOG_FORMAT self.output_pipe_format.set_value(LOG_FORMAT)
def __init__(self, name: str): super().__init__(name) # self.output_pipe_format.default = MIME('text/plain;charset=utf-8') self.output_pipe_format.set_value(MIME('text/plain;charset=utf-8')) # self.template: StrOption = \ StrOption('template', "Text formatting template") self.func: PyCallableOption = \ PyCallableOption('func', "Function that returns text representation of data", 'def f(data: Any, utils: TransformationUtilities) -> str:\n ...\n')
def handle_accept_client(self, channel: Channel, session: _Session) -> None: """Handler is executed when client connects to the data pipe via OPEN message. Arguments: channel: Channel associated with data pipe. session: Session associated with client. The session attributes `data_pipe`, `pipe_socket`, `data_format` and `params` contain information sent by client, and the event handler validates the request. If request should be rejected, it raises the `StopError` exception with `code` attribute containing the `ErrorCode` to be returned in CLOSE message. """ if session.pipe != self.data_pipe: raise StopError(f"Unknown data pipe '{session.pipe}'", code=ErrorCode.PIPE_ENDPOINT_UNAVAILABLE) elif session.socket != self.pipe_socket: raise StopError(f"'{session.socket}' socket not available", code=ErrorCode.PIPE_ENDPOINT_UNAVAILABLE) # We work with MIME formats, so we'll convert the format specification to MIME session.data_format: MIME = MIME(session.data_format) if session.data_format.mime_type != 'text/plain': raise StopError("Only 'text/plain' format supported", code=ErrorCode.DATA_FORMAT_NOT_SUPPORTED) session.charset = session.data_format.params.get('charset', 'ascii') session.errors = session.data_format.params.get('errors', 'strict') # Client reqeast is ok, we'll open the file we are configured to work with. self._open_file()
def __init__(self, name: str): super().__init__(name) # Adjust default batch_size to compensate default 64K messages in producers self.batch_size.default = 5 #: File specification self.filename: StrOption = StrOption('filename', "File specification", required=True) #: File data format specification self.file_format: MIMEOption = \ MIMEOption('file_format', "File data format specification", required=True, default=MIME('text/plain;charset=utf-8')) #: File I/O mode self.file_mode: EnumOption = \ (EnumOption('file_mode', FileOpenMode, "File I/O mode", required=False, default=FileOpenMode.WRITE))
def __init__(self, name: str): super().__init__(name) # Adjust default batch_size to compensate default 64K messages self.batch_size.default = 5 #: File specification self.filename: StrOption = StrOption('filename', "File specification", required=True) #: File data format specification self.file_format: MIMEOption = \ MIMEOption('file_format', "File data format specification", required=True, default=MIME('text/plain;charset=utf-8')) #: Max. number of characters transmitted in one message self.max_chars: IntOption = \ IntOption('max_chars', "Max. number of characters transmitted in one message", required=True, default=65535)
def aquire_resources(self) -> None: """Aquire resources required by component (open files, connect to other services etc.). Must raise an exception when resource aquisition fails. """ get_logger(self).info("Aquiring resources...") # Connect to the data pipe if self.pipe_mode == SocketMode.CONNECT: chn: Channel = self.mngr.channels[PIPE_CHN] session = chn.connect(self.pipe_address) # OPEN the data pipe connection, this also fills session attributes cast(FBDPClient, chn.protocol).send_open(chn, session, self.data_pipe, self.pipe_socket, self.file_format) # We work with MIME formats, so we'll convert the format specification to MIME session.data_format = MIME(session.data_format) session.charset = session.data_format.params.get( 'charset', 'ascii') session.errors = session.data_format.params.get('errors', 'strict') self._open_file()
from __future__ import annotations import uuid from functools import partial from firebird.base.config import create_config from saturnin.base import VENDOR_UID, Error, MIME, MIME_TYPE_TEXT, MIME_TYPE_PROTO, \ pkg_name, AgentDescriptor, ServiceDescriptor from saturnin.lib.data.filter import DataFilterConfig # OID: iso.org.dod.internet.private.enterprise.firebird.butler.platform.saturnin.micro.firebird.log.parser SERVICE_OID: str = '1.3.6.1.4.1.53446.1.2.0.3.4.1.2' SERVICE_UID: uuid.UUID = uuid.uuid5(uuid.NAMESPACE_OID, SERVICE_OID) SERVICE_VERSION: str = '0.2.0' LOG_PROTO = 'saturnin.protobuf.fblog.LogEntry' LOG_FORMAT = MIME(f'{MIME_TYPE_PROTO};type={LOG_PROTO}') # Configuration class FbLogParserConfig(DataFilterConfig): """Firebird log parser microservice configuration. """ def __init__(self, name: str): super().__init__(name) # self.input_pipe_format.default = MIME('text/plain;charset=utf-8') self.input_pipe_format.set_value(MIME('text/plain;charset=utf-8')) self.output_pipe_format.default = LOG_FORMAT self.output_pipe_format.set_value(LOG_FORMAT)
from __future__ import annotations import uuid from functools import partial from firebird.base.config import create_config, ListOption from saturnin.base import VENDOR_UID, Error, MIME, MIME_TYPE_PROTO, \ pkg_name, AgentDescriptor, ServiceDescriptor from saturnin.lib.data.filter import DataFilterConfig # OID: iso.org.dod.internet.private.enterprise.firebird.butler.platform.saturnin.micro.proto.aggregator SERVICE_OID: str = '1.3.6.1.4.1.53446.1.2.0.3.3.3' SERVICE_UID: uuid.UUID = uuid.uuid5(uuid.NAMESPACE_OID, SERVICE_OID) SERVICE_VERSION: str = '0.2.0' AGGREGATE_PROTO = 'saturnin.protobuf.GenericDataRecord' AGGREGATE_FORMAT = MIME(f'{MIME_TYPE_PROTO};type={AGGREGATE_PROTO}') AGGREGATE_FUNCTIONS = ['count', 'min', 'max', 'sum', 'avg'] # Configuration class ProtoAggregatorConfig(DataFilterConfig): """Data aggregator microservice configuration. """ def __init__(self, name: str): super().__init__(name) # self.group_by: ListOption = \ ListOption('group_by', str, "Specification of fields that are 'group by' key", required=True) self.aggregate: ListOption = \
from __future__ import annotations import uuid from functools import partial from firebird.base.config import create_config from saturnin.base import VENDOR_UID, Error, MIME, MIME_TYPE_TEXT, MIME_TYPE_PROTO, \ pkg_name, AgentDescriptor, ServiceDescriptor from saturnin.lib.data.filter import DataFilterConfig # OID: iso.org.dod.internet.private.enterprise.firebird.butler.platform.saturnin.micro.firebird.trace.parser SERVICE_OID: str = '1.3.6.1.4.1.53446.1.2.0.3.4.2.2' SERVICE_UID: uuid.UUID = uuid.uuid5(uuid.NAMESPACE_OID, SERVICE_OID) SERVICE_VERSION: str = '0.1.0' TRACE_PROTO = 'saturnin.protobuf.fbtrace.TraceEntry' TRACE_FORMAT = MIME(f'{MIME_TYPE_PROTO};type={TRACE_PROTO}') # Configuration class FbTraceParserConfig(DataFilterConfig): """Firebird log parser microservice configuration. """ def __init__(self, name: str): super().__init__(name) # self.input_pipe_format.default = MIME('text/plain;charset=utf-8') self.input_pipe_format.set_value(MIME('text/plain;charset=utf-8')) self.output_pipe_format.default = TRACE_FORMAT self.output_pipe_format.set_value(TRACE_FORMAT)