def clear(agent_id=None, all_agents=False): """ Clears the database. :param agent_id: For an agent. :param all_agents: For all agents. :return: Message. """ # Clear DB conn = Connection(common.database_path) regex = re.compile(r'^\d{,3}-\S+$') db_agents_list = [] if not int(all_agents): raw_str = r'^' + "{}".format(int(agent_id)).zfill(3) + r'-\S+$' regex = re.compile(raw_str) for db_agent in conn.getDbsName(): if (regex.search(db_agent) != None): db_agents_list.append(db_agent) if (db_agents_list.count() <= 0): raise OssecAPIException(1600) for db_agent in db_agents_list: conn.connect(db_agent) if conn.getDb() != None: doc = conn.getDb()['pm_event'] if doc != None: doc.drop() conn.vacuum() doc = conn.getDb()['pmCounterInfo'] if doc != None: doc.drop() conn.vacuum() # Clear OSSEC info if int(all_agents): rootcheck_files = glob('{0}/queue/rootcheck/*'.format( common.ossec_path)) else: if agent_id == "000": rootcheck_files = [ '{0}/queue/rootcheck/rootcheck'.format(common.ossec_path) ] else: agent_info = Agent(agent_id).get_basic_information() rootcheck_files = glob( '{0}/queue/rootcheck/({1}) {2}->rootcheck'.format( common.ossec_path, agent_info['name'], agent_info['ip'])) for rootcheck_file in rootcheck_files: if path.exists(rootcheck_file): remove(rootcheck_file) return "Rootcheck database deleted"
def last_scan(agent_id): """ Gets the last scan of the agent. :param agent_id: Agent ID. :return: Dictionary: end, start. """ # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) data = {} lastSyscheckEndTime = None lastSyscheckEndTimeObj = list((conn.getDb()['pm_event'].find({ "log": 'Ending syscheck scan.' }).sort([('date_last', -1)]).limit(1)))[0] if (lastSyscheckEndTimeObj != None): lastSyscheckEndTime = lastSyscheckEndTimeObj.get('date_last') if lastSyscheckEndTime != None: data['end'] = (lastSyscheckEndTime + timedelta(seconds=timeoffset)).__str__() else: data['end'] = lastSyscheckEndTime.__str__() lastSyscheckStartTime = None lastSyscheckStartTimeObj = list((conn.getDb()['pm_event'].find({ "log": 'Starting syscheck scan.' }).sort([('date_last', -1)]).limit(1)))[0] if (lastSyscheckStartTimeObj != None): lastSyscheckStartTime = lastSyscheckStartTimeObj.get('date_last') if lastSyscheckStartTime != None: data['start'] = (lastSyscheckStartTime + timedelta(seconds=timeoffset)).__str__() else: data['start'] = lastSyscheckStartTime.__str__() return data
def get_pci(agent_id=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Get all the PCI requirements used in the rootchecks of the agent. :param agent_id: Agent ID. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ fields = {} request = {"$and": [{'pci_dss': {'$ne': None}}]} # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) # Search if search: regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \ else search['value']), re.IGNORECASE) search_con = {"$or": []} search_con["$or"].append({'pci_dss': regex}) if bool(search['negation']): if search_con["$or"]: request["$and"].append({"$not": search_con}) else: if search_con["$or"]: request["$and"].append(search_con) # Total items # conn.execute(query.format('COUNT(DISTINCT pci_dss)'), request) # data = {'totalItems': conn.fetch()[0]} # Sorting sort_con = [] if sort: if sort['fields']: allowed_sort_fields = set(fields.keys()) # Check if every element in sort['fields'] is in allowed_sort_fields if not set(sort['fields']).issubset(allowed_sort_fields): uncorrect_fields = list( map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) raise OssecAPIException( 1403, 'Allowed sort fields: {0}. Fields: {1}'.format( allowed_sort_fields, uncorrect_fields)) for i in sort['fields']: str_order = 1 if sort['order'] == 'asc' else -1 sort_con.append((fields[i], str_order)) else: sort_con.append(('pci_dss', 1 if sort['order'] == 'asc' else -1)) else: sort_con.append(('pci_dss', 1)) if limit: if limit > common.maximum_database_limit: raise OssecAPIException(1405, str(limit)) elif limit == 0: raise OssecAPIException(1406) if not request["$and"]: request = {} db_data = conn.getDb()['pm_event'].find(request).sort(sort_con).skip( offset).limit(limit).distinct('pci_dss') data = {} data['items'] = [] for pmEvent in db_data: data['items'].append(pmEvent) return data
def print_db(agent_id=None, status='all', pci=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Returns a list of events from the database. :param agent_id: Agent ID. :param status: Filters by status: outstanding, solved, all. :param pci: Filters by PCI DSS requirement. :param cis: Filters by CIS. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) request = {"$and": []} lastRootcheckEndTime = None lastRootcheckEndTimeObj = list((conn.getDb()['pm_event'].find({ "log": 'Ending rootcheck scan.' }).sort([('date_last', -1)]).limit(1)))[0] if (lastRootcheckEndTimeObj != None): lastRootcheckEndTime = lastRootcheckEndTimeObj.get( 'date_last', datetime.now()) fields = { 'status': 'status', 'event': 'log', 'oldDay': 'date_first', 'readDay': 'date_last' } request['$and'].append({ 'log': { '$nin': [ 'Starting rootcheck scan.', 'Ending rootcheck scan.', 'Starting syscheck scan.', 'Ending syscheck scan.' ] } }) if status == 'outstanding': if lastRootcheckEndTime != None: request['$and'].append({ 'date_last': { '$gt': (lastRootcheckEndTime - timedelta(seconds=86400)) } }) elif status == 'solved': if lastRootcheckEndTime != None: request['$and'].append({ 'date_last': { '$lte': (lastRootcheckEndTime - timedelta(seconds=86400)) } }) if pci: request["$and"].append({"pci_dss": pci}) # search if search: regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \ else search['value']), re.IGNORECASE) search_con = {"$or": []} for x in fields.values(): search_con["$or"].append({x: regex}) if bool(search['negation']): if search_con["$or"]: request["$and"].append({"$not": search_con}) else: if search_con["$or"]: request["$and"].append(search_con) # Sorting sort_con = [] if sort: if sort['fields']: allowed_sort_fields = set(fields.keys()) # Check if every element in sort['fields'] is in allowed_sort_fields if not set(sort['fields']).issubset(allowed_sort_fields): uncorrect_fields = list( map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) raise OssecAPIException( 1403, 'Allowed sort fields: {0}. Fields: {1}'.format( allowed_sort_fields, uncorrect_fields)) for i in sort['fields']: str_order = 1 if sort['order'] == 'asc' else -1 sort_con.append((Agent.fields[i], str_order)) else: sort_con.append( (fields["readDay"], 1 if sort['order'] == 'asc' else -1)) else: sort_con.append((fields["readDay"], -1)) if limit: if limit > common.maximum_database_limit: raise OssecAPIException(1405, str(limit)) elif limit == 0: raise OssecAPIException(1406) select = ["status", "date_first", "date_last", "log", "pci_dss"] select_fields = {} for x in set(select): select_fields[x] = 1 if not request["$and"]: request = {} data = {} db_data = conn.getDb()['pm_event'].find(request, select_fields) data['totalItems'] = db_data.count() db_data = db_data.sort(sort_con).skip(offset).limit(limit) # process get data data['items'] = [] for pmEvent in db_data: pmEvent.pop('_id') if pmEvent.get("date_last") != None: if (pmEvent['date_last'] > lastRootcheckEndTime): pmEvent['status'] = 'outstanding' elif (pmEvent['date_last'] <= lastRootcheckEndTime): pmEvent['status'] = 'solved' if pmEvent.get("date_first") != None: pmEvent['date_first'] = (pmEvent.get("date_first") + timedelta(seconds=timeoffset)).__str__() else: pmEvent['date_first'] = pmEvent.get("date_first").__str__() if pmEvent.get("date_last") != None: pmEvent['date_last'] = (pmEvent.get("date_last") + timedelta(seconds=timeoffset)).__str__() else: pmEvent['date_last'] = pmEvent.get("date_last").__str__() data['items'].append(pmEvent) return data
def files(agent_id=None, event=None, filename=None, filetype='file', md5=None, sha1=None, hash=None, summary=False, offset=0, limit=common.database_limit, sort=None, search=None): """ Return a list of files from the database that match the filters :param agent_id: Agent ID. :param event: Filters by event: added, readded, modified, deleted. :param filename: Filters by filename. :param filetype: Filters by filetype: file or registry. :param md5: Filters by md5 hash. :param sha1: Filters by sha1 hash. :param hash: Filters by md5 or sha1 hash. :param summary: Returns a summary grouping by filename. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) agent_info = Agent(agent_id).get_basic_information() if 'os' in agent_info: if 'windows' in agent_info['os']['name'].lower(): windows_agent = True else: windows_agent = False else: windows_agent = False # if 'os' in agent_info and 'platform' in agent_info['os']: # if agent_info['os']['platform'].lower() == 'windows': # windows_agent = True # else: # windows_agent = False # else: # # We do not know if it is a windows or linux agent. # # It is set to windows agent in order to avoid wrong data (uid, gid, ...) # windows_agent = True eventRequest = {"$and": []} fileRequest = {"$and": []} eventFields = { 'scanDate': 'date', 'modificationDate': 'mtime', 'size': 'size', 'user': '******', 'group': 'gname' } fileFields = {'file': 'path', 'filetype': 'type'} # Query # query = "SELECT {0} FROM fim_event, fim_file WHERE fim_event.id_file = fim_file.id AND fim_file.type = :filetype" # fileRequest['$and'].append({ # 'type': filetype # }) # if event: # # query += ' AND fim_event.type = :event' # # request['event'] = event # eventRequest['$and'].append({ # 'event': event # }) # if filename: # # query += ' AND path = :filename' # # request['filename'] = filename # fileRequest['$and'].append({ # 'path': filename # }) # if md5: # # query += ' AND md5 = :md5' # # request['md5'] = md5 # eventRequest['$and'].append({ # 'md5': md5 # }) # if sha1: # # query += ' AND sha1 = :sha1' # # request['sha1'] = sha1 # eventRequest['$and'].append({ # 'sha1': sha1 # }) # if hash: # # query += ' AND (md5 = :hash OR sha1 = :hash)' # # request['hash'] = hash # eventRequest['$and'].append({ # '$or': [ # { # 'md5': hash # }, # { # 'sha1': hash # } # ] # }) # if search: # query += " AND NOT" if bool(search['negation']) else ' AND' # query += " (" + " OR ".join(x + ' LIKE :search' for x in ('path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'inode', 'perm')) + " )" # request['search'] = '%{0}%'.format(search['value']) if search: regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \ else search['value']), re.IGNORECASE) event_search_con = {"$or": []} file_search_con = {"$or": []} for x in [ 'path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'perm' ]: if x == 'path': file_search_con["$or"].append({x: regex}) else: event_search_con["$or"].append({x: regex}) if bool(search['negation']): if event_search_con["$or"]: eventRequest["$and"].append({"$not": event_search_con}) if file_search_con["$or"]: fileRequest["$and"].append({"$not": file_search_con}) else: if event_search_con["$or"]: eventRequest["$and"].append(event_search_con) if file_search_con["$or"]: fileRequest["$and"].append(file_search_con) # Total items db_data = None events = [] if summary: db_data = conn.getDb()['fim_file'].aggregate([ { '$lookup': { 'from': 'fim_event', 'localField': '_id', 'foreignField': 'file_id', 'as': 'fim_events' } }, ], cursor={}) for sysFile in db_data: item = sysFile for fEvent in item['fim_events']: if not item.get('fim_event'): item['fim_event'] = fEvent else: if fEvent['date'] > item['fim_event']['date']: item['fim_event'] = fEvent item.pop('fim_events') if item['type'] != filetype: continue if event: if item['fim_event']['type'] != event: continue if filename: if item['path'] != filename: continue if md5: if item['fim_event']['md5'] != md5: continue if sha1: if item['fim_event']['sha1'] != md5: continue if hash: if (item['fim_event']['sha1'] != hash) and (item['fim_event']['md5'] != hash): continue if search: search_value = int(search['value']) if search['value'].isdigit( ) else search['value'] if (search_value not in item['path']) and (search_value not in item['fim_event']['date']) \ and (search_value not in item['fim_event']['size']) and (search_value not in item['fim_event']['md5']) \ and (search_value not in item['fim_event']['sha1']) and (search_value not in item['fim_event']['uname']) \ and (search_value not in item['fim_event']['gname']) and (search_value not in item['fim_event']['perm']): continue item['sha1'] = item['fim_event']['sha1'] item['uid'] = item['fim_event']['uid'] item['date'] = item['fim_event']['date'] item['gid'] = item['fim_event']['gid'] # item['mtime'] = item['fim_event']['mtime'] item['perm'] = item['fim_event']['perm'] item['md5'] = item['fim_event']['md5'] item.pop('fim_event') item['fim_event.type'] = item['type'] item.pop('type') events.append(item) # rFileRequest = fileRequest.copy() # if not rFileRequest['$and']: # rFileRequest = {} # db_data = conn.getDb()['fim_file'].find_one(rFileRequest) # # list_db_data = list(db_data) # for eFile in db_data: # rEventRequest = eventRequest.copy() # rEventRequest['$and'].append({ # 'file_id': eFile.get('_id') # }) # event_data = conn.getDb()['fim_event'].find(rEventRequest).sort(('date', -1)).limit(1) # if event_data.count() == 1: # item = list(event_data)[0] # item['type'] = eFile.get('type') # item['type'] = eFile.get('path') # print(item) # events.append(item) # query += ' group by path' # conn.execute("SELECT COUNT(*) FROM ({0}) AS TEMP".format(query.format("max(date)")), request) else: db_data = conn.getDb()['fim_event'].aggregate([ { '$lookup': { 'from': 'fim_file', 'localField': 'file_id', 'foreignField': '_id', 'as': 'fim_file' } }, ], cursor={}) for sysEvent in db_data: if sysEvent['fim_file'][0]['type'] != filetype: continue if event: if sysEvent['type'] != event: continue if filename: if sysEvent['fim_file'][0]['path'] != filename: continue if md5: if sysEvent['md5'] != md5: continue if sha1: if sysEvent['sha1'] != md5: continue if hash: if (sysEvent['sha1'] != hash) and (sysEvent['md5'] != hash): continue if search: search_value = int(search['value']) if search['value'].isdigit( ) else search['value'] if (search_value not in sysEvent['fim_file'][0]['path']) and (search_value not in sysEvent['date']) \ and (search_value not in sysEvent['size']) and (search_value not in sysEvent['md5']) \ and (search_value not in sysEvent['sha1']) and (search_value not in sysEvent['uname']) \ and (search_value not in sysEvent['gname']) and (search_value not in sysEvent['perm']): continue item = sysEvent item['fim_event.type'] = item['fim_file'][0]['type'] item['path'] = item['fim_file'][0]['path'] # print(item) item.pop('_id') item.pop('fim_file') events.append(item) data = {'totalItems': len(events)} # Sorting event_sort_con = [] file_sort_con = [] if sort: if sort['fields']: allowed_sort_fields = set(eventFields.keys() + fileFields.keys()) # Check if every element in sort['fields'] is in allowed_sort_fields if not set(sort['fields']).issubset(allowed_sort_fields): uncorrect_fields = list( map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) raise OssecAPIException( 1403, 'Allowed sort fields: {0}. Fields: {1}'.format( allowed_sort_fields, uncorrect_fields)) for i in sort['fields']: # str_order = 1 if sort['order'] == 'asc' else -1 sort_order = False if sort['order'] == 'asc' else True events.sort(key=lambda e: e[i], reverse=sort_order) # if i in eventFields.keys(): # event_sort_con.append((eventFields[i], str_order)) # events.sort(key=lambda e: e[i], reverse=sort_order) # elif i in fileFields.keys(): # file_sort_con.append((fileFields[i], str_order)) else: # event_sort_con.append((eventFields["date"], 1 if sort['order'] == 'asc' else -1)) sort_order = False if sort['order'] == 'asc' else True events.sort(key=lambda e: e['date'], reverse=sort_order) else: # event_sort_con.append((eventFields["date"], -1)) events.sort(key=lambda e: e['date'], reverse=True) # if sort: # if sort['fields']: # allowed_sort_fields = fields.keys() # # Check if every element in sort['fields'] is in allowed_sort_fields # if not set(sort['fields']).issubset(allowed_sort_fields): # uncorrect_fields = list(map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) # raise OssecAPIException(1403, 'Allowed sort fields: {0}. Fields: {1}'.format(allowed_sort_fields, uncorrect_fields)) # query += ' ORDER BY ' + ','.join(['{0} {1}'.format(fields[i], sort['order']) for i in sort['fields']]) # else: # query += ' ORDER BY date {0}'.format(sort['order']) # else: # query += ' ORDER BY date DESC' if limit: if limit > common.maximum_database_limit: raise OssecAPIException(1405, str(limit)) # query += ' LIMIT :offset,:limit' # request['offset'] = offset # request['limit'] = limit if offset >= 0: events = events[int(offset):(int(offset) + int(limit))] elif limit == 0: raise OssecAPIException(1406) # if summary: # select = ["max(date)", "mtime", "fim_event.type", "path"] # else: # select = ["date", "mtime", "fim_event.type", "path", "size", "perm", "uid", "gid", "md5", "sha1"] data['items'] = [] for fEvent in events: data_tuple = {} if fEvent.get('date') != None: data_tuple['scanDate'] = (fEvent.get('date') + timedelta(seconds=timeoffset)).__str__() else: data_tuple['scanDate'] = fEvent.get('date').__str__() # if fEvent.get('mtime') != None: # data_tuple['modificationDate'] = (fEvent.get('mtime') + timedelta(seconds=timeoffset)).__str__() # modificationDate # else: # data_tuple['modificationDate'] = data_tuple['scanDate'] # scanDate if fEvent.get('fim_event.type') != None: data_tuple['event'] = fEvent.get('fim_event.type') if fEvent.get('path') != None: data_tuple['file'] = fEvent.get('path') if not summary: try: permissions = filemode(int(fEvent.get('perm'), 8)) except TypeError: permissions = None if fEvent.get('size') != None: data_tuple['size'] = fEvent.get('size') if fEvent.get('md5') != None: data_tuple['md5'] = fEvent.get('md5') if fEvent.get('sha1') != None: data_tuple['sha1'] = fEvent.get('sha1') if not windows_agent: if fEvent.get('uid') != None: data_tuple['uid'] = fEvent.get('uid') if fEvent.get('gid') != None: data_tuple['gid'] = fEvent.get('gid') if fEvent.get('perm') != None: data_tuple['octalMode'] = fEvent.get('perm') if permissions: data_tuple['permissions'] = permissions data['items'].append(data_tuple) return data