def upload_list(list_file, path): """ Updates CDB lists :param list_file: content of the list :param path: Destination of the new list file :return: Confirmation message. """ # path of temporary file tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.txt'.format(common.ossec_path, time.time(), random.randint(0, 1000)) try: # create temporary file with open(tmp_file_path, 'w') as tmp_file: # write json in tmp_file_path for element in list_file.splitlines(): # skip empty lines if not element: continue tmp_file.write(element.strip() + '\n') chmod(tmp_file_path, 0o640) except IOError: raise WazuhException(1005) except Exception: raise WazuhException(1000) # move temporary file to group folder try: new_conf_path = join(common.ossec_path, path) safe_move(tmp_file_path, new_conf_path, permissions=0o660) except Error: raise WazuhException(1016) except Exception: raise WazuhException(1000) return 'File updated successfully'
def overwrite_or_create_files(filename, data): full_filename_path = common.ossec_path + filename if os.path.basename(filename) == 'client.keys': self._check_removed_agents("{}{}".format(zip_path, filename), logger) if data['merged']: # worker nodes can only receive agent-groups files if data['merge-type'] == 'agent-info': logger.warning("Agent status received in a worker node") raise WazuhException(3011) for name, content, _ in cluster.unmerge_agent_info( 'agent-groups', zip_path, filename): full_unmerged_name = os.path.join(common.ossec_path, name) tmp_unmerged_path = full_unmerged_name + '.tmp' with open(tmp_unmerged_path, 'wb') as f: f.write(content) safe_move(tmp_unmerged_path, full_unmerged_name, permissions=self.cluster_items['files'][ data['cluster_item_key']]['permissions'], ownership=(common.ossec_uid, common.ossec_gid)) else: if not os.path.exists(os.path.dirname(full_filename_path)): utils.mkdir_with_mode(os.path.dirname(full_filename_path)) safe_move("{}{}".format(zip_path, filename), full_filename_path, permissions=self.cluster_items['files'][ data['cluster_item_key']]['permissions'], ownership=(common.ossec_uid, common.ossec_gid))
def overwrite_or_create_files(filename: str, data: Dict): """ Updates a file coming from the master :param filename: Filename to update :param data: File metadata such as modification time, whether it's a merged file or not, etc. :return: None """ full_filename_path = common.ossec_path + filename if os.path.basename(filename) == 'client.keys': self._check_removed_agents("{}{}".format(zip_path, filename), logger) if data['merged']: # worker nodes can only receive agent-groups files if data['merge-type'] == 'agent-info': logger.warning("Agent status received in a worker node") raise WazuhException(3011) for name, content, _ in cluster.unmerge_agent_info('agent-groups', zip_path, filename): full_unmerged_name = os.path.join(common.ossec_path, name) tmp_unmerged_path = full_unmerged_name + '.tmp' with open(tmp_unmerged_path, 'wb') as f: f.write(content) safe_move(tmp_unmerged_path, full_unmerged_name, permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'], ownership=(common.ossec_uid(), common.ossec_gid()) ) else: if not os.path.exists(os.path.dirname(full_filename_path)): utils.mkdir_with_mode(os.path.dirname(full_filename_path)) safe_move("{}{}".format(zip_path, filename), full_filename_path, permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'], ownership=(common.ossec_uid(), common.ossec_gid()) )
def upload_group_configuration(group_id, file_content): """ Updates group configuration :param group_id: Group to update :param file_content: File content of the new configuration in a string. :return: Confirmation message. """ # path of temporary files for parsing xml input tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.xml'.format(common.ossec_path, time.time(), random.randint(0, 1000)) # create temporary file for parsing xml input and validate XML format try: with open(tmp_file_path, 'w') as tmp_file: # beauty xml file xml = parseString('<root>' + file_content + '</root>') # remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines pretty_xml = '\n'.join(filter(lambda x: x.strip(), xml.toprettyxml(indent=' ').split('\n')[2:-2])) + '\n' # revert xml.dom replacings # (https://github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305) pretty_xml = pretty_xml.replace("&", "&").replace("<", "<").replace(""", "\"",)\ .replace(">", ">") tmp_file.write(pretty_xml) except Exception as e: raise WazuhException(1113, str(e)) try: # check Wazuh xml format try: subprocess.check_output(['{}/bin/verify-agent-conf'.format(common.ossec_path), '-f', tmp_file_path], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: # extract error message from output. # Example of raw output # 2019/01/08 14:51:09 verify-agent-conf: ERROR: (1230): Invalid element in the configuration: 'agent_conf'.\n2019/01/08 14:51:09 verify-agent-conf: ERROR: (1207): Syscheck remote configuration in '/var/ossec/tmp/api_tmp_file_2019-01-08-01-1546959069.xml' is corrupted.\n\n # Example of desired output: # Invalid element in the configuration: 'agent_conf'. Syscheck remote configuration in '/var/ossec/tmp/api_tmp_file_2019-01-08-01-1546959069.xml' is corrupted. output_regex = re.findall(pattern=r"\d{4}\/\d{2}\/\d{2} \d{2}:\d{2}:\d{2} verify-agent-conf: ERROR: " r"\(\d+\): ([\w \/ \_ \- \. ' :]+)", string=e.output.decode()) if output_regex: raise WazuhException(1114, ' '.join(output_regex)) else: raise WazuhException(1115, e.output.decode()) except Exception as e: raise WazuhException(1743, str(e)) # move temporary file to group folder try: new_conf_path = "{}/{}/agent.conf".format(common.shared_path, group_id) safe_move(tmp_file_path, new_conf_path, permissions=0o660) except Exception: raise WazuhException(1016) return 'Agent configuration was updated successfully' except Exception as e: # remove created temporary file remove(tmp_file_path) raise e
def upload_xml(xml_file, path): """ Updates XML files (rules and decoders) :param xml_file: content of the XML file :param path: Destination of the new XML file :return: Confirmation message """ # path of temporary files for parsing xml input tmp_file_path = '{}/tmp/api_tmp_file_{}_{}.xml'.format(common.ossec_path, time.time(), random.randint(0, 1000)) # create temporary file for parsing xml input try: with open(tmp_file_path, 'w') as tmp_file: # beauty xml file xml = parseString('<root>' + xml_file + '</root>') # remove first line (XML specification: <? xmlversion="1.0" ?>), <root> and </root> tags, and empty lines indent = ' ' # indent parameter for toprettyxml function pretty_xml = '\n'.join(filter(lambda x: x.strip(), xml.toprettyxml(indent=indent).split('\n')[2:-2])) + '\n' # revert xml.dom replacings # (https://github.com/python/cpython/blob/8e0418688906206fe59bd26344320c0fc026849e/Lib/xml/dom/minidom.py#L305) pretty_xml = pretty_xml.replace("&", "&").replace("<", "<").replace(""", "\"", ) \ .replace(">", ">").replace(''', "'") # delete two first spaces of each line final_xml = re.sub(fr'^{indent}', '', pretty_xml, flags=re.MULTILINE) tmp_file.write(final_xml) chmod(tmp_file_path, 0o660) except IOError: raise WazuhException(1005) except ExpatError: raise WazuhException(1113) except Exception as e: raise WazuhException(1000, str(e)) try: # check xml format try: load_wazuh_xml(tmp_file_path) except Exception as e: raise WazuhException(1113, str(e)) # move temporary file to group folder try: new_conf_path = join(common.ossec_path, path) safe_move(tmp_file_path, new_conf_path, permissions=0o660) except Error: raise WazuhException(1016) except Exception: raise WazuhException(1000) return 'File updated successfully' except Exception as e: # remove created temporary file if an exception happens remove(tmp_file_path) raise e
def test_safe_move(mock_utime, mock_chmod, mock_chown, ownership, time, permissions): """Tests safe_move function works""" with TemporaryDirectory() as tmpdirname: tmp_file = NamedTemporaryFile(dir=tmpdirname, delete=False) target_file = join(tmpdirname, 'target') safe_move(tmp_file.name, target_file, ownership=ownership, time=time, permissions=permissions) assert (exists(target_file)) mock_chown.assert_called_once_with(target_file, *ownership) if time is not None: mock_utime.assert_called_once_with(target_file, time) if permissions is not None: mock_chmod.assert_called_once_with(target_file, permissions)
async def update_file(name: str, data: Dict): """ Updates a file from the worker. It checks the modification date to decide whether to update it or not. If it's a merged file, it unmerges it. :param name: Filename to update :param data: File metadata :return: None """ # Full path full_path, error_updating_file, n_merged_files = common.ossec_path + name, False, 0 # Cluster items information: write mode and permissions lock_full_path = "{}/queue/cluster/lockdir/{}.lock".format(common.ossec_path, os.path.basename(full_path)) lock_file = open(lock_full_path, 'a+') try: fcntl.lockf(lock_file, fcntl.LOCK_EX) if os.path.basename(name) == 'client.keys': self.logger.warning("Client.keys received in a master node") raise WazuhException(3007) if data['merged']: is_agent_info = data['merge_type'] == 'agent-info' if is_agent_info: self.sync_agent_info_status['total_agent_info'] = len(agent_ids) else: self.sync_extra_valid_status['total_extra_valid'] = len(agent_ids) for file_path, file_data, file_time in cluster.unmerge_agent_info(data['merge_type'], decompressed_files_path, data['merge_name']): full_unmerged_name = os.path.join(common.ossec_path, file_path) tmp_unmerged_path = os.path.join(common.ossec_path, 'queue/cluster', self.name, os.path.basename(file_path)) try: if is_agent_info: agent_name_re = re.match(r'(^.+)-(.+)$', os.path.basename(file_path)) agent_name = agent_name_re.group(1) if agent_name_re else os.path.basename(file_path) if agent_name not in agent_names: n_errors['warnings'][data['cluster_item_key']] = 1 \ if n_errors['warnings'].get(data['cluster_item_key']) is None \ else n_errors['warnings'][data['cluster_item_key']] + 1 self.logger.debug2("Received status of an non-existent agent '{}'".format(agent_name)) continue else: agent_id = os.path.basename(file_path) if agent_id not in agent_ids: n_errors['warnings'][data['cluster_item_key']] = 1 \ if n_errors['warnings'].get(data['cluster_item_key']) is None \ else n_errors['warnings'][data['cluster_item_key']] + 1 self.logger.debug2("Received group of an non-existent agent '{}'".format(agent_id)) continue try: mtime = datetime.strptime(file_time, '%Y-%m-%d %H:%M:%S.%f') except ValueError: mtime = datetime.strptime(file_time, '%Y-%m-%d %H:%M:%S') if os.path.isfile(full_unmerged_name): local_mtime = datetime.utcfromtimestamp(int(os.stat(full_unmerged_name).st_mtime)) # check if the date is older than the manager's date if local_mtime > mtime: logger.debug2("Receiving an old file ({})".format(file_path)) continue with open(tmp_unmerged_path, 'wb') as f: f.write(file_data) mtime_epoch = timegm(mtime.timetuple()) utils.safe_move(tmp_unmerged_path, full_unmerged_name, ownership=(common.ossec_uid(), common.ossec_gid()), permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'], time=(mtime_epoch, mtime_epoch) ) except Exception as e: self.logger.error("Error updating agent group/status ({}): {}".format(tmp_unmerged_path, e)) if is_agent_info: self.sync_agent_info_status['total_agent_info'] -= 1 else: self.sync_extra_valid_status['total_extra_valid'] -= 1 n_errors['errors'][data['cluster_item_key']] = 1 \ if n_errors['errors'].get(data['cluster_item_key']) is None \ else n_errors['errors'][data['cluster_item_key']] + 1 await asyncio.sleep(0.0001) else: zip_path = "{}{}".format(decompressed_files_path, name) utils.safe_move(zip_path, full_path, ownership=(common.ossec_uid(), common.ossec_gid()), permissions=self.cluster_items['files'][data['cluster_item_key']]['permissions'] ) except WazuhException as e: logger.debug2("Warning updating file '{}': {}".format(name, e)) error_tag = 'warnings' error_updating_file = True except Exception as e: logger.debug2("Error updating file '{}': {}".format(name, e)) error_tag = 'errors' error_updating_file = True if error_updating_file: n_errors[error_tag][data['cluster_item_key']] = 1 if not n_errors[error_tag].get( data['cluster_item_key']) \ else n_errors[error_tag][data['cluster_item_key']] + 1 fcntl.lockf(lock_file, fcntl.LOCK_UN) lock_file.close()