def hello(self, data: bytes) -> Tuple[bytes, bytes]: """Add a client's data to global clients dictionary. Parameters ---------- data : bytes Client's name. Returns ------- bytes Result. bytes Response message. """ self.name = data.decode() if self.name in self.server.clients: self.name = '' raise exception.WazuhClusterError(3028, extra_message=data) elif self.name == self.server.configuration['node_name']: raise exception.WazuhClusterError(3029) else: self.server.clients[self.name] = self self.tag = f'{self.tag} {self.name}' context_tag.set(self.tag) return b'ok', f'Client {self.name} added'.encode()
def process_request(self, command: bytes, data: bytes): """Define requests available in the local server. Parameters ---------- command : bytes Received command from client. data : bytes Received command from client. Returns ------- bytes Result. bytes Response message. """ context_tag.set("Local " + self.name) if command == b'dapi': self.server.dapi.add_request(self.name.encode() + b' ' + data) return b'ok', b'Added request to API requests queue' elif command == b'dapi_fwd': node_name, request = data.split(b' ', 1) node_name = node_name.decode() if node_name in self.server.node.clients: asyncio.create_task( self.server.node.clients[node_name].send_request( b'dapi', self.name.encode() + b' ' + request)) return b'ok', b'Request forwarded to worker node' else: raise WazuhClusterError(3022) else: return super().process_request(command, data)
async def start(self): """ Starts the server and the infinite asynchronous tasks """ # Get a reference to the event loop as we plan to use # low-level APIs. context_tag.set(self.tag) asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) self.loop.set_exception_handler(c_common.asyncio_exception_handler) if self.enable_ssl: ssl_context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH) ssl_context.load_cert_chain(certfile='{}/etc/sslmanager.cert'.format(common.ossec_path), keyfile='{}/etc/sslmanager.key'.format(common.ossec_path)) else: ssl_context = None try: server = await self.loop.create_server( protocol_factory=lambda: self.handler_class(server=self, loop=self.loop, logger=self.logger, fernet_key=self.configuration['key'], cluster_items=self.cluster_items), host=self.configuration['bind_addr'], port=self.configuration['port'], ssl=ssl_context) except OSError as e: self.logger.error("Could not start master: {}".format(e)) raise KeyboardInterrupt self.logger.info('Serving on {}'.format(server.sockets[0].getsockname())) self.tasks.append(server.serve_forever) async with server: # use asyncio.gather to run both tasks in parallel await asyncio.gather(*map(lambda x: x(), self.tasks))
def __init__(self, **kwargs): """Class constructor. Parameters ---------- kwargs Arguments for the parent class constructor. """ super().__init__(**kwargs, tag="Worker") # Sync availability variables. Used to prevent sync process from overlapping. self.sync_integrity_free = True # the worker isn't currently synchronizing integrity self.sync_extra_valid_free = True # Sync status variables. Used in cluster_control -i and GET/cluster/healthcheck. self.sync_integrity_status = {'date_start_master': "n/a", 'date_end_master': "n/a", 'total_files': {'missing': 0, 'shared': 0, 'extra': 0, 'extra_valid': 0}} self.sync_agent_info_status = {'date_start_master': "n/a", 'date_end_master': "n/a", 'total_agentinfo': 0} self.sync_extra_valid_status = {'date_start_master': "n/a", 'date_end_master': "n/a", 'total_agentgroups': 0} # Variables which will be filled when the worker sends the hello request. self.version = "" self.cluster_name = "" self.node_type = "" # Dictionary to save loggers for each sync task. self.task_loggers = {} context_tag.set(self.tag)
def __init__(self, performance_test: int, concurrency_test: int, configuration: Dict, cluster_items: Dict, enable_ssl: bool, logger: logging.Logger = None, tag: str = "Abstract Server"): """ Class constructor :param performance_test: Message length to use in the performance test :param concurrency_test: Number of requests to do in the concurrency test :param configuration: ossec.conf cluster configuration :param cluster_items: cluster.json cluster internal configuration :param enable_ssl: Whether to enable asyncio's SSL support :param logger: Logger to use :param tag: Log tag """ self.clients = {} self.performance = performance_test self.concurrency = concurrency_test self.configuration = configuration self.cluster_items = cluster_items self.enable_ssl = enable_ssl self.tag = tag self.logger = logging.getLogger('wazuh') if not logger else logger # logging tag context_tag.set(self.tag) context_subtag.set("Main") self.tasks = [self.check_clients_keepalive] self.handler_class = AbstractServerHandler self.loop = asyncio.get_running_loop()
def process_request(self, command: bytes, data: bytes): """ Defines requests available in the local server :param command: Received command :param data: Received payload :return: A response """ # modify logger filter tag in LocalServerHandlerWorker entry point context_tag.set("Local " + self.name) self.logger.debug2("Command received: {}".format(command)) if command == b'dapi': if self.server.node.client is None: raise WazuhClusterError(3023) asyncio.create_task(self.server.node.client.send_request(b'dapi', self.name.encode() + b' ' + data)) return b'ok', b'Added request to API requests queue' elif command == b'sendsync': if self.server.node.client is None: raise WazuhClusterError(3023) asyncio.create_task(self.server.node.client.send_request(b'sendsync', self.name.encode() + b' ' + data)) return None, None elif command == b'sendasync': if self.server.node.client is None: raise WazuhClusterError(3023) asyncio.create_task(self.server.node.client.send_request(b'sendsync', self.name.encode() + b' ' + data)) return b'ok', b'Added request to sendsync requests queue' else: return super().process_request(command, data)
def process_request(self, command: bytes, data: bytes): """ Defines requests available in the local server :param command: Received command :param data: Received payload :return: A response """ #modify logger filter tag in LocalServerHandlerMaster entry point context_tag.set("Local " + self.name) if command == b'dapi': self.server.dapi.add_request(self.name.encode() + b' ' + data) return b'ok', b'Added request to API requests queue' elif command == b'dapi_forward': node_name, request = data.split(b' ', 1) node_name = node_name.decode() if node_name in self.server.node.clients: asyncio.create_task( self.server.node.clients[node_name].send_request(b'dapi', self.name.encode() + b' ' + request)) return b'ok', b'Request forwarded to worker node' else: raise WazuhClusterError(3022) else: return super().process_request(command, data)
def __init__(self, server, loop: asyncio.AbstractEventLoop, fernet_key: str, cluster_items: Dict, logger: logging.Logger = None, tag: str = "Client"): """Class constructor. Parameters ---------- server : AbstractServer object Abstract server object that created this handler. loop : asyncio.AbstractEventLoop Asyncio loop. fernet_key : str Key used to encrypt and decrypt messages. cluster_items : dict Cluster.json object containing cluster internal variables. logger : Logger object Logger object to use. tag : str Log tag. """ super().__init__(fernet_key=fernet_key, logger=logger, tag=f"{tag} {random.randint(0, 1000)}", cluster_items=cluster_items) self.server = server self.loop = loop self.last_keepalive = time.time() self.tag = tag context_tag.set(self.tag) self.name = None self.ip = None self.transport = None
def __init__(self, server, loop: asyncio.AbstractEventLoop, fernet_key: str, cluster_items: Dict, logger: logging.Logger = None, tag: str = "Client"): """ Class constructor :param server: Abstract server object that created this handler :param loop: Asyncio loop :param fernet_key: Key used to encrypt and decrypt messages :param logger: Logger object to use :param cluster_items: Cluster.json object containing cluster internal variables :param tag: Log tag """ super().__init__(fernet_key=fernet_key, logger=logger, tag="{} {}".format(tag, random.randint(0, 1000)), cluster_items=cluster_items) self.server = server self.loop = loop self.last_keepalive = time.time() self.tag = tag context_tag.set(self.tag) self.name = None self.ip = None self.transport = None
def connection_made(self, transport): """ Defines the process of accepting a connection :param transport: socket to write data on """ self.name = str(random.SystemRandom().randint(0, 2 ** 20 - 1)) self.transport = transport self.server.clients[self.name] = self self.tag = "Local " + self.name # modify filter tags with context vars context_tag.set(self.tag) self.logger.debug('Connection received in local server.')
def __init__(self, configuration: Dict, cluster_items: Dict, enable_ssl: bool, performance_test: int, concurrency_test: int, file: str, string: int, logger: logging.Logger = None, tag: str = "Client Manager"): """Class constructor. Parameters ---------- configuration : dict Client configuration. cluster_items : dict Cluster.json object containing cluster internal variables. enable_ssl : bool Whether to use SSL encryption or not. performance_test : int Value for the performance test function. concurrency_test : int Value for the concurrency test function. file : str File path for the send_file test function. string : int String size for the send_string test function. logger : Logger object Logger to use. tag : str Log tag. """ self.name = configuration['node_name'] self.configuration = configuration self.cluster_items = cluster_items self.ssl = enable_ssl self.performance_test = performance_test self.concurrency_test = concurrency_test self.file = file self.string = string self.logger = logging.getLogger('wazuh') if not logger else logger self.tag = tag # Modify filter tags with context vars. context_tag.set(self.tag) context_subtag.set("Main") self.tasks = [] self.handler_class = AbstractClient self.client = None self.extra_args = {} self.loop = asyncio.get_running_loop()
def process_request(self, command: bytes, data: bytes): """Define available requests in the local server. Parameters ---------- command : bytes Received command from client. data : bytes Received payload from client. Returns ------- bytes Result. bytes Response message. """ # Modify logger filter tag in LocalServerHandlerWorker entry point. context_tag.set("Local " + self.name) self.logger.debug2(f"Command received: {command}") if command == b'dapi': if self.server.node.client is None: raise WazuhClusterError(3023) asyncio.create_task( self.server.node.client.send_request( b'dapi', self.name.encode() + b' ' + data)) return b'ok', b'Added request to API requests queue' elif command == b'sendsync': if self.server.node.client is None: raise WazuhClusterError(3023) asyncio.create_task( self.server.node.client.send_request( b'sendsync', self.name.encode() + b' ' + data)) return None, None elif command == b'sendasync': if self.server.node.client is None: raise WazuhClusterError(3023) asyncio.create_task( self.server.node.client.send_request( b'sendsync', self.name.encode() + b' ' + data)) return b'ok', b'Added request to sendsync requests queue' else: return super().process_request(command, data)
def __init__(self, **kwargs): """Class constructor. Parameters ---------- kwargs Arguments for the parent class constructor. """ super().__init__(**kwargs, tag="Worker") # Sync availability variables. Used to prevent sync process from overlapping. self.sync_agent_info_free = True self.sync_integrity_free = True # Variable used to check whether integrity sync process includes extra_valid files. self.extra_valid_requested = False # Sync status variables. Used in cluster_control -i and GET/cluster/healthcheck. default_date = datetime.fromtimestamp(0) self.integrity_check_status = { 'date_start_master': default_date, 'date_end_master': default_date } self.integrity_sync_status = { 'date_start_master': default_date, 'tmp_date_start_master': default_date, 'date_end_master': default_date, 'total_extra_valid': 0, 'total_files': { 'missing': 0, 'shared': 0, 'extra': 0, 'extra_valid': 0 } } self.sync_agent_info_status = { 'date_start_master': default_date, 'date_end_master': default_date, 'n_synced_chunks': 0 } # Variables which will be filled when the worker sends the hello request. self.version = "" self.cluster_name = "" self.node_type = "" # Dictionary to save loggers for each sync task. self.task_loggers = {} context_tag.set(self.tag)
def hello(self, data: bytes) -> Tuple[bytes, bytes]: """ Adds a client's data to global clients dictionary :param data: client's data -> name :return: successful result """ self.name = data.decode() if self.name in self.server.clients: self.name = '' raise exception.WazuhClusterError(3028, extra_message=data) elif self.name == self.server.configuration['node_name']: raise exception.WazuhClusterError(3029) else: self.server.clients[self.name] = self self.tag = '{} {}'.format(self.tag, self.name) context_tag.set(self.tag) return b'ok', 'Client {} added'.format(self.name).encode()
def __init__(self, configuration: Dict, cluster_items: Dict, enable_ssl: bool, performance_test: int, concurrency_test: int, file: str, string: int, logger: logging.Logger = None, tag: str = "Client Manager"): """ Class constructor :param configuration: client configuration :param enable_ssl: Whether to use SSL encryption or not :param performance_test: Value for the performance test function :param concurrency_test: Value for the concurrency test function :param file: File path for the send file test function :param string: String size for the send string test function """ self.name = configuration['node_name'] self.configuration = configuration self.cluster_items = cluster_items self.ssl = enable_ssl self.performance_test = performance_test self.concurrency_test = concurrency_test self.file = file self.string = string self.logger = logging.getLogger('wazuh') if not logger else logger # logging tag self.tag = tag # modify filter tags with context vars context_tag.set(self.tag) context_subtag.set("Main") self.tasks = [] self.handler_class = AbstractClient self.client = None self.extra_args = {} self.loop = asyncio.get_running_loop()