def delete_backups(system_id='local', backup_type='configuration', backup_list=[]): """ Delete backups from the system """ success, system_ip = get_system_ip_from_system_id(system_id) if not success: api_log.error(str(system_ip)) error_msg = "Error retrieving the system ip for the system id " error_msg = error_msg + "%s -> %s" % (system_id, str(system_ip)) return False, error_msg success, files = get_files_in_path(system_ip=system_ip, path=BACKUP_PATH) if not success: return False, files # Report warnings for non-existing backup files existing_backup_list = [] for backup_name in backup_list: backup_name = os.path.basename(backup_name) success, backup_path = secure_path_join(BACKUP_PATH, backup_name) if not success: api_log.error("delete_backups: %s '%s'" % (backup_path, backup_name)) elif backup_path not in files.keys(): api_log.error("delete_backups: %s does not exist" % backup_path) else: existing_backup_list.append(backup_path) # Removing existing backups for backup_path in existing_backup_list: try: success, msg = remove_file(host_list=[system_ip], file_name=backup_path) if not success: api_log.error(str(msg)) error_msg = "Error removing %s " % backup_path error_msg = error_msg + "from system %s" % system_ip return False, error_msg except Exception as e: api_log.error("delete_backups Error: %s" % str(e)) error_msg = "Error trying to delete the backup '%s'" % backup_name error_msg = ": %s" % str(e) return False, error_msg try: get_backup_list(system_id=system_id, backup_type=backup_type, no_cache=True) except Exception as e: error_msg = "Error when trying to flush the cache " \ "after deleting backups: %s" % str(e) api_log.error(error_msg) return success, ''
def delete_backups(system_id='local', backup_type='configuration', backup_list=None): """ Delete backups from the system """ if backup_list is None: backup_list = [] success, system_ip = get_system_ip_from_system_id(system_id) if not success: api_log.error(str(system_ip)) error_msg = "Error retrieving the system ip for the system id %s -> %s" % (system_id, str(system_ip)) return False, error_msg success, files = get_files_in_path(system_ip=system_ip, path=BACKUP_PATH) if not success: return False, files # Report warnings for non-existing backup files existing_backup_list = [] backup_name = '' for backup_name in backup_list: backup_name = os.path.basename(backup_name) success, backup_path = secure_path_join(BACKUP_PATH, backup_name) if not success: api_log.error("delete_backups: %s '%s'" % (backup_path, backup_name)) elif backup_path not in files.keys(): api_log.error("delete_backups: %s does not exist" % backup_path) else: existing_backup_list.append(backup_path) # Removing existing backups for backup_path in existing_backup_list: try: success, msg = remove_file(host_list=[system_ip], file_name=backup_path) if not success: api_log.error(str(msg)) error_msg = "Error removing %s from system %s " % (backup_path, system_ip) return False, error_msg except Exception as e: api_log.error("delete_backups Error: %s" % str(e)) error_msg = "Error trying to delete the backup '%s': %s" % (backup_name, str(e)) return False, error_msg try: get_backup_list(system_id=system_id, backup_type=backup_type, no_cache=True) except Exception as e: error_msg = "Error when trying to flush the cache after deleting backups: %s" % str(e) api_log.error(error_msg) return success, ''
def ansible_install_plugin(system_ip, plugin_path, sql_path): if not (system_ip or plugin_path or sql_path): return False, "[ansible_install_plugin]: Missing arguments" # Copy plugin file to plugins dir remote_plugin_path = "/etc/ossim/agent/plugins/" + basename(plugin_path) cmd_args = "src=%s dest=%s force=yes owner=root " + \ "group=alienvault mode=644" % (plugin_path, remote_plugin_path) (success, msg) = copy_file([system_ip], cmd_args) if not success: error_msg = "[ansible_install_plugin] Failed to copy " + \ "plugin file: %s" % msg return False, error_msg # Copy SQL file to tmp dir remote_sql_path = "/tmp/tmp_" + basename(sql_path) cmd_args = "src=%s dest=%s force=yes " % (sql_path, remote_sql_path) + \ "owner=root group=alienvault mode=644" (success, msg) = copy_file([system_ip], cmd_args) if not success: error_msg = "[ansible_install_plugin] Failed to copy " + \ "sql file: %s" % msg return False, error_msg # Apply SQL file cmd_args = "/usr/bin/ossim-db < %s" % remote_sql_path response = ansible.run_module(host_list=[system_ip], module="shell", use_sudo=True, args=cmd_args) (success, msg) = ansible_is_valid_response(system_ip, response) if not success: error_msg = "[ansible_install_plugin] Failed to apply " + \ "sql file: %s" % msg return False, error_msg # Delete SQL file (success, msg) = remove_file([system_ip], remote_sql_path) if not success: error_msg = "[ansible_install_plugin] Failed to delete " + \ "sql file: %s" % msg return False, error_msg return True, "[ansible_install_plugin] Plugin installed OK"
def remove_plugin_from_sensors(plugin_file): """ Disable and remove custom plugin from all systems. Args: plugin_file: (str) Full path to plugin file. Returns: (bool) Status """ plugin_name = splitext(basename(plugin_file))[0] result, added_sensors = get_systems(system_type="Sensor", exclusive=True, convert_to_dict=True) # In [3]: systems # Out[3]: {'564d1731-5369-d912-e91b-61c1fff3cf6c': '192.168.87.197'} if not result: api_log.error( 'Cannot get list of connected sensors: {}'.format(added_sensors)) return False # Add local check if isinstance(added_sensors, dict): added_sensors['local'] = '127.0.0.1' if added_sensors: for sensor_id, sensor_ip in added_sensors.iteritems(): api_log.info( 'Trying to disable global plugin "{}" plugin on - {}'.format( plugin_name, sensor_ip)) result, msg = disable_plugin_globally(plugin_name, sensor_ip) if not result: api_log.error(msg) api_log.info( 'Trying to disable per-asset plugin "{}" plugin on - {}'. format(plugin_name, sensor_ip)) result, msg = disable_plugin_per_assets(plugin_name, sensor_ip) if not result: api_log.error(msg) # Remove plugin file from disk api_log.info('Removing plugin file: {} on sensors {}'.format( plugin_file, added_sensors)) result = remove_file(host_list=added_sensors.values(), file_name=plugin_file) return result
def rotate_backups(system_id, backup_type="configuration", nbackups=10): """ Rotate the backups """ (success, result) = ret = get_backup_list(system_id=system_id, backup_type=backup_type, no_cache=True) if not success: return ret (success, system_ip) = ret = get_system_ip_from_system_id(system_id) if not success: return ret result = [ x for x in result if x['date'] is not None and x['method'] == 'auto' ] if len(result) < nbackups: return True, 'No backups to remove' # Sort the list # Discard entries without date? # Clean the x['date'] == None origbackup = sorted(result, key=lambda x: x['date']) ref = origbackup[0]['date'] for backup in origbackup: backup['index'] = backup['date'] - ref backups = origbackup[:nbackups] for now_bk in origbackup[nbackups:]: backups = optimize(backups + [now_bk]) # Files we want to retain are in backups. keep_files = [x['file'] for x in backups] files_to_remove = [] backup_path = "/var/alienvault/backup/" for entry in origbackup: filepath = entry['file'] if filepath not in keep_files: files_to_remove.append(os.path.join(backup_path, filepath)) if len(files_to_remove) == 0: return True, 'No backups to remove' (success, result) = ret = remove_file([system_ip], " ".join(files_to_remove)) if not success: return ret return True, "Removed %d backups" % len(files_to_remove)
def rotate_backups(system_id, backup_type="configuration", nbackups=10): """ Rotate the backups """ (success, result) = ret = get_backup_list(system_id=system_id, backup_type=backup_type, no_cache=True) if not success: return ret (success, system_ip) = ret = get_system_ip_from_system_id(system_id) if not success: return ret result = [x for x in result if x['date'] is not None and x['method'] == 'auto'] if len(result) < nbackups: return True, 'No backups to remove' # Sort the list # Discard entries without date? # Clean the x['date'] == None origbackup = sorted(result, key=lambda x: x['date']) ref = origbackup[0]['date'] for backup in origbackup: backup['index'] = backup['date'] - ref backups = origbackup[:nbackups] for now_bk in origbackup[nbackups:]: backups = optimize(backups + [now_bk]) # Files we want to retain are in backups. keep_files = [x['file'] for x in backups] files_to_remove = [] backup_path = "/var/alienvault/backup/" for entry in origbackup: filepath = entry['file'] if filepath not in keep_files: files_to_remove.append(os.path.join(backup_path, filepath)) if len(files_to_remove) == 0: return True, 'No backups to remove' (success, result) = ret = remove_file([system_ip], " ".join(files_to_remove)) if not success: return ret return True, "Removed %d backups" % len(files_to_remove)
def remove_plugin_from_sensors(plugin_file): """ Disable and remove custom plugin from all systems. Args: plugin_file: (str) Full path to plugin file. Returns: (bool) Status """ plugin_name = splitext(basename(plugin_file))[0] result, added_sensors = get_systems(system_type="Sensor", exclusive=True, convert_to_dict=True) # In [3]: systems # Out[3]: {'564d1731-5369-d912-e91b-61c1fff3cf6c': '192.168.87.197'} if not result: api_log.error('Cannot get list of connected sensors: {}'.format(added_sensors)) return False # Add local check if isinstance(added_sensors, dict): added_sensors['local'] = '127.0.0.1' if added_sensors: for sensor_id, sensor_ip in added_sensors.iteritems(): api_log.info('Trying to disable global plugin "{}" plugin on - {}'.format(plugin_name, sensor_ip)) result, msg = disable_plugin_globally(plugin_name, sensor_ip) if not result: api_log.error(msg) api_log.info('Trying to disable per-asset plugin "{}" plugin on - {}'.format(plugin_name, sensor_ip)) result, msg = disable_plugin_per_assets(plugin_name, sensor_ip) if not result: api_log.error(msg) # Remove plugin file from disk api_log.info('Removing plugin file: {} on sensors {}'.format(plugin_file, added_sensors)) result = remove_file(host_list=added_sensors.values(), file_name=plugin_file) return result
def sync_asec_plugins(plugin=None, enable=True): """ Send the ASEC generated plugins to the system sensors and enable them Args: plugin: plugin name enable: wether we should enable the plugin or not. Default = True Returns: success (bool): msg (str): Success message/Error info """ if not plugin: return False, "No plugin to sync" try: plugin_path = "/var/lib/asec/plugins/" + plugin + ".cfg" sql_path = plugin_path + ".sql" sensors = [] (success, sensors) = get_systems(system_type='sensor') if not success: return False, "Unable to get sensors list: %s" % sensors # Bug in ansible copy module prevents us from copying the files from # /var/lib/asec/plugins as it has permissions 0 for "other" # Workaround: make a local copy using ansible command module plugin_tmp_path = "/tmp/" + plugin + ".cfg" sql_tmp_path = plugin_tmp_path + ".sql" success, local_ip = get_system_ip_from_local() if not success: error_msg = "[ansible_install_plugin] " + \ "Failed to make get local IP: %s" % local_ip return False, error_msg (success, msg) = local_copy_file(local_ip, plugin_path, plugin_tmp_path) if not success: error_msg = "[ansible_install_plugin] " + \ "Failed to make temp copy of plugin file: %s" % msg return False, error_msg (success, msg) = local_copy_file(local_ip, sql_path, sql_tmp_path) if not success: error_msg = "[ansible_install_plugin] " + \ "Failed to make temp copy of sql file: %s" % msg return False, error_msg all_ok = True for (sensor_id, sensor_ip) in sensors: (success, msg) = ansible_install_plugin(sensor_ip, plugin_tmp_path, sql_tmp_path) if success and enable: # Get list of active plugins and add the new one. # Then send the list back to the sensor? (success, data) = get_sensor_detectors(sensor_ip) if success: data['sensor_detectors'].append(plugin) sensor_det = ','.join(data['sensor_detectors']) (success, msg) = set_sensor_detectors(sensor_ip, sensor_det) if not success: error_msg = "[sync_asec_plugins] " + \ "Error enabling plugin %s " % plugin + \ "for sensor %s: %s" % (sensor_ip, msg) api_log.error(error_msg) all_ok = False else: # Now launch reconfig task job = alienvault_reconfigure.delay(sensor_ip) else: error_msg = "[sync_asec_plugins] " + \ "Error installing plugin %s " % plugin + \ "in sensor %s: %s" % (sensor_ip, msg) api_log.error(error_msg) all_ok = False # Delete temporal copies of the files remove_file([local_ip], plugin_tmp_path) remove_file([local_ip], sql_tmp_path) if not all_ok: error_msg = "Plugin %s installation failed " % plugin + \ "for some sensors" return False, error_msg info_msg = "Plugin %s installed. Enabled = %s" % (plugin, str(enable)) return True, info_msg except Exception as e: api_log.error("[sync_asec_plugins] Exception catched: %s" % str(e)) return False, "[sync_asec_plugins] Unknown error"
def apimethod_upload_plugin(plugin_file, vendor, model, version, product_type, overwrite=False): """Uploads and verifies a given plugin file""" # 1 - check whether the plugin is a valid file or not try: temporal_plg_path = os.path.join(TEMPORAL_FOLDER, plugin_file) plugin_destination_path = os.path.join(END_FOLDER, plugin_file) temporal_plg_sql_path = temporal_plg_path + '.sql' plugin_asec_path = os.path.join(TEMPORAL_FOLDER, plugin_file) # The PluginCheck object will be able to check the syntax of a given plugin # return the available set of rules, etc. plugin = PluginFile() plugin.read(temporal_plg_path, encoding='latin1') data = plugin.check() data["need_overwrite"] = False if data["error_count"] > 0: raise APIInvalidPlugin(plugin.get_latest_error_msg()) if os.path.exists(plugin_destination_path) and not overwrite: data["need_overwrite"] = True return data # Choose what to do: insert or update need_to_update = get_plugin_data_for_plugin_id(plugin.plugin_id) and overwrite save_plugin_data_func = update_plugin_data if need_to_update else insert_plugin_data # Load plugin SQl into the DB. with open(temporal_plg_sql_path) as plugin_raw_sql: success, msg = save_plugin_from_raw_sql(plugin_raw_sql.read()) if not success: raise APICannotSavePlugin(msg) # Save plugin data. success, msg = save_plugin_data_func(plugin.plugin_id, plugin_name=plugin_file, vendor=vendor, model=model, version=version, nsids=len(data["rules"]), product_type=product_type) if not success: raise APICannotSavePlugin(msg) # 2 - Save plugin with the appropriate headers (vendor:model:version) if not plugin.save(destination=plugin_destination_path, vendor=vendor, model=model, product_type=product_type, version=version): remove_plugin_data(plugin.plugin_id) raise APICannotSavePlugin(message=plugin.get_latest_error_msg() or "Cannot save plugin file.") # Copy plugin sql file to plugins custom dir copy(temporal_plg_sql_path, END_FOLDER) # Remove via ansible due to file permissions remove_file(['127.0.0.1'], plugin_asec_path) remove_file(['127.0.0.1'], plugin_asec_path + '.sql') # TODO: Is the plugin fd already in use? What is the next free plugin id? # 3 - Synchronize Plugins. from celerymethods.tasks.monitor_tasks import monitor_sync_custom_plugins # Force synchronization job = monitor_sync_custom_plugins.delay() if job.id is None: raise APICannotSavePlugin("Cannot synchronize the plugin.") data["synchronization_job"] = job.id except Exception as e: api_log.error("[apimethod_upload_plugin] {}".format(str(e))) if not isinstance(e, APIException): raise APICannotSavePlugin() raise # The method should return a python dic with the job id (the one that is synchronizing the plugins) and # the list of plugin sids for the plugin. return data
def apimethod_upload_plugin(plugin_file, vendor, model, version, product_type, overwrite=False): """Uploads and verifies a given plugin file""" # 1 - check whether the plugin is a valid file or not try: temporal_plg_path = os.path.join(TEMPORAL_FOLDER, plugin_file) plugin_destination_path = os.path.join(END_FOLDER, plugin_file) temporal_plg_sql_path = temporal_plg_path + '.sql' plugin_asec_path = os.path.join(TEMPORAL_FOLDER, plugin_file) # The PluginCheck object will be able to check the syntax of a given plugin # return the available set of rules, etc. plugin = PluginFile() plugin.read(temporal_plg_path, encoding='latin1') data = plugin.check() data["need_overwrite"] = False if data["error_count"] > 0: raise APIInvalidPlugin(plugin.get_latest_error_msg()) if os.path.exists(plugin_destination_path) and not overwrite: data["need_overwrite"] = True return data # Choose what to do: insert or update need_to_update = get_plugin_data_for_plugin_id( plugin.plugin_id) and overwrite save_plugin_data_func = update_plugin_data if need_to_update else insert_plugin_data # Load plugin SQl into the DB. with open(temporal_plg_sql_path) as plugin_raw_sql: success, msg = save_plugin_from_raw_sql(plugin_raw_sql.read()) if not success: raise APICannotSavePlugin(msg) # Save plugin data. success, msg = save_plugin_data_func(plugin.plugin_id, plugin_name=plugin_file, vendor=vendor, model=model, version=version, nsids=len(data["rules"]), product_type=product_type) if not success: raise APICannotSavePlugin(msg) # 2 - Save plugin with the appropriate headers (vendor:model:version) if not plugin.save(destination=plugin_destination_path, vendor=vendor, model=model, product_type=product_type, version=version): remove_plugin_data(plugin.plugin_id) raise APICannotSavePlugin(message=plugin.get_latest_error_msg() or "Cannot save plugin file.") # Copy plugin sql file to plugins custom dir copy(temporal_plg_sql_path, END_FOLDER) # Remove via ansible due to file permissions remove_file(['127.0.0.1'], plugin_asec_path) remove_file(['127.0.0.1'], plugin_asec_path + '.sql') # TODO: Is the plugin fd already in use? What is the next free plugin id? # 3 - Synchronize Plugins. from celerymethods.tasks.monitor_tasks import monitor_sync_custom_plugins # Force synchronization job = monitor_sync_custom_plugins.delay() if job.id is None: raise APICannotSavePlugin("Cannot synchronize the plugin.") data["synchronization_job"] = job.id except Exception as e: api_log.error("[apimethod_upload_plugin] {}".format(str(e))) if not isinstance(e, APIException): raise APICannotSavePlugin() raise # The method should return a python dic with the job id (the one that is synchronizing the plugins) and # the list of plugin sids for the plugin. return data