def checkthreshold(bt_addr, volt): soglia = 0 with open('config.json') as json_data_file: jsondata = json.load(json_data_file) soglia = jsondata["voltthreshold"] if (soglia > 0 and volt < soglia): alert.send(bt_addr, volt)
def run(): logger = logging.getLogger('SweetSecurityServerLogger') logger.info('Checking Files Against FileCheck.io') fileData=getLogData() apiKey=getKey() if apiKey is None: logger.info('FileCheckIO Key Not Configured') return None else: logger.info('Checking Files Against FileCheck.IO') logger.info("Parsing %d files" % len(fileData)) for file in fileData: if '_grokparsefailure' not in file['_source']['tags']: #Size and Version are optional fileVersion='' fileSize='' #One or More of the Following Are Required fileName='' md5=file['_source']['md5'] if file['_source']['sha1'] != '-': sha1=file['_source']['sha1'] else: sha1='' sha256='' sha512='' if file['_source']['filename'] != '-': fileName=file['_source']['filename'] if 'filecheckscore' not in file['_source']: fileStatus=check(apiKey, fileName, fileSize, fileVersion, md5, sha1, sha256, sha512) fileCheckJson=json.loads(fileStatus.read()) try: if fileCheckJson['status']==400: logger.info("Exceeded FileCheck.io api requests") return None except: pass filecheckScore=fileCheckJson['validation'] filecheckScore=fileCheckJson['validation'] body = {'doc' : {'filecheckscore': filecheckScore}} es.update(esService, body, file['_index'], 'logs', file['_id']) if filecheckScore not in [0,404]: logger.info("ALERT: FileCheck.io found a malicious file!") message='FileCheck.io Found a Malicious File\nFile Name: %s\nFile MD5: %s\nFile SHA1: %s\nFileCheckIO Reputation Score: %s' % (fileName,md5,sha1,filecheckScore) alert.send('FileCheckIO',message,file['_id'],file['_index']) print "Sending Message"
def main(kml_filename: str) -> None: """Main entry point to the program""" config.validate() area_checks: List[area.AreaCheck] = area.get_area_checks(kml_filename) adverts: List[scraper.Advert] = scraper.get_adverts() matched_adverts: List[scraper.Advert] = [] for advert in adverts: for check in area_checks: if check(advert.coordinates): matched_adverts.append(advert) if matched_adverts: alert.send(matched_adverts) if adverts: cache.add_ids(list(map(lambda a: a.advert_id, adverts))) cache.add_info(list(map(lambda a: a.info_hash, adverts))) cache.add_info(list(map(lambda a: a.img_hash, adverts)))
def dnsSearch(ip, mac): numFound = 0 dnsData = getLogs(ip, '/opt/nsm/bro/logs/current/dns.log') knownQueries = [] knownDnsQuery = {"query": {"match_phrase": {"mac": {"query": mac}}}} knownDnsData = es.search(esService, knownDnsQuery, 'tardis', 'known_dnsqueries') for query in knownDnsData['hits']['hits']: if query['_source']['query'] not in knownQueries: knownQueries.append(query['_source']['query']) for log in dnsData['hits']['hits']: if log['_source']['query'] not in knownQueries: numFound += 1 knownQueries.append(log['_source']['query']) dnsData = {'mac': mac, 'query': log['_source']['query']} es.write(esService, dnsData, 'tardis', 'known_dnsqueries') alertMessage = 'A new DNS query was added to the baseline: %s' % log[ '_source']['query'] alert.send('Baseliner', alertMessage, log['_id'], log['_index']) return numFound
def connSearch(ip, mac): numFound = 0 connData = getLogs(ip, '/opt/nsm/bro/logs/current/conn.log') knownHosts = [] knownHostQuery = {"query": {"match_phrase": {"mac": {"query": mac}}}} knownHostData = es.search(esService, knownHostQuery, 'tardis', 'known_hosts') for device in knownHostData['hits']['hits']: if device['_source']['ip'] not in knownHosts: knownHosts.append(device['_source']['ip']) for log in connData['hits']['hits']: if log['_source']['resp_h'] not in knownHosts: numFound += 1 knownHosts.append(log['_source']['resp_h']) hostData = {'mac': mac, 'ip': log['_source']['resp_h']} es.write(esService, hostData, 'tardis', 'known_hosts') alertMessage = 'A new IP was added to the baseline: %s' % log[ '_source']['resp_h'] alert.send('Baseliner', alertMessage, log['_id'], log['_index']) return numFound
def httpSearch(ip, mac): numFound = 0 httpData = getLogs(ip, '/opt/nsm/bro/logs/current/http.log') knownWebsites = [] knownHostQuery = {"query": {"match_phrase": {"mac": {"query": mac}}}} knownHostData = es.search(esService, knownHostQuery, 'tardis', 'known_websites') for url in knownHostData['hits']['hits']: if url['_source']['server_name'] not in knownWebsites: knownWebsites.append(url['_source']['server_name']) for log in httpData['hits']['hits']: if log['_source']['server_name'] not in knownWebsites: numFound += 1 knownWebsites.append(log['_source']['server_name']) hostData = { 'mac': mac, 'server_name': log['_source']['server_name'] } es.write(esService, hostData, 'tardis', 'known_websites') alertMessage = 'A new website was added to the baseline: %s' % log[ '_source']['server_name'] alert.send('Baseliner', alertMessage, log['_id'], log['_index']) return numFound
def deleteOldLogs(): logger = logging.getLogger('SweetSecurityServerLogger') logger.info('Checking local disk space') diskUsage = checkDisk() #Warn user if disk storage is above 85% if diskUsage > 84: message = 'Server disk usage is at %d%%' % diskUsage response = alert.send('Disk Check', message, None, None) logger.info('Cleaning up logs') ssConfig = getSSConfig() defaultLogRetention = ssConfig['defaultLogRetention'] if defaultLogRetention == 0: logger.info('System configured to never delete logs') return 'Logs configured to never delete' else: logger.info('System is configured to delete logs older than %d days' % defaultLogRetention) matchAll = {"query": {"match_all": {}}} logsDeleted = 0 today = datetime.datetime.now() indices = [] for index in esService.indices.get('logstash-*'): indices.append(index) logger.info("There are %d days worth of logs" % len(indices)) indices = sorted(indices) for index in indices: indexData = es.search(esService, matchAll, index, 'logs') logCount = indexData['hits']['total'] indexDate = datetime.datetime.strptime(index[-10:], "%Y.%m.%d") indexDaysOld = today - indexDate indexDaysOld = indexDaysOld.days logger.info("%s is %d days old and has %d logs" % (index, indexDaysOld, logCount)) if indexDaysOld > defaultLogRetention: logger.info("Deleting index %s" % index) #esService.indices.delete(index=index) logsDeleted += logCount logger.info("Deleted %d logs" % logsDeleted) return "Deleted %d logs" % logsDeleted
# This is a simple script to be run twice a month # with the purpose of keeping twilio from revoking # frosti's number (twilio will cancel a number if # it goes more than a month without being used). # # This sends a simple message to users ostensibly # as a test. import alert alert.send("This is a test of the FROSTI Freezer Monitoring Alert System. There is no emergency at this time.", "all")