def new_task(request, session_key, params): access, session = Session.verify_session_key(session_key) if access: is_admin = session.user.is_admin if is_admin: context = {"params": params} return render(request, 'dashboard/new_task.html')
def generateRuleFiles(self): """Iterates trough all the local rulesets, and prints all the rules in them to corresponding rule-files. It is also in the same time generating sid-msg.map, which contains all the sid/msg and references.""" logger = logging.getLogger(__name__) logger.info("Starting to generate rule-files") s = Session.session() sidmsg = ConfigFile(os.path.join(self.configlocation, "sid-msg.map")) detectionFilters = {} for df in s.query(DetectionFilter).all(): detectionFilters[df.rule.SID] = df # For every ruleset: for ruleset in s.query(RuleSet).all(): # Create a configfile rulefile = ConfigFile( os.path.join(self.configlocation, "%s.rules" % ruleset.name)) # For every rule in the ruleset: for rule in s.query(Rule).filter( Rule.ruleset_id == ruleset.id).all(): # Print the rule to the configfile if (rule.SID in detectionFilters): df = detectionFilters[rule.SID] if (df.track == 1): track = "by_src" else: track = "by_dst" rawFilter = "detection-filter: track %s, count %d, seconds %d; " % ( track, df.count, df.seconds) raw = re.sub(r'(.*)(sid.*\))', r'\1' + rawFilter + r'\2', rule.raw) rulefile.addLine(raw) else: rulefile.addLine(rule.raw) # Start generate a line with sid/msg for sid-msg.map sidmsgstring = "%s || %s" % (rule.SID, rule.msg) # For evert reference this rule have: for ref in rule.references: # Append the reference to the line for sid-msg.map sidmsgstring += " || %s,%s" % (ref.referenceType.name, ref.reference) # Finally add the line to sid-msg.map sidmsg.addLine(sidmsgstring) # Close the rulefile, and add the name to the configfiles list. rulefile.close() self.configfiles.append("%s.rules" % ruleset.name) # When all rules are added, close sid-msg.map, and add the file to the configfiles list. sidmsg.close() self.configfiles.append("sid-msg.map") s.close()
def screenshotcheker(message): postgres = Session() score = 100 now = datetime.now() username = message.from_user.username chat_id = message.chat.id t = table('scores', column("id"), column("username"), column("score")) t.insert().values(id=chat_id, username=f'{username}', score=score) bot.send_message(message.chat.id, "Thank you for using our bot")
def dashboard(request, session_key): access, session = Session.verify_session_key(session_key) if access: is_admin = session.user.is_admin if is_admin: return render(request, 'dashboard/admin_dash.html') else: return render(request, 'dashboard/user_dash.html') else: return HttpResponse("Accessing incorrect page.")
def get(self, request, *args, **kwargs): taxi_token = self.kwargs['taxi'] driver_token = self.kwargs['driver'] phone_mac_addr = self.kwargs['phone'] session_id = 0 try: timeout = datetime.datetime.now() - datetime.timedelta(seconds=AppConfig.objects.first().session_timeout) session_id = (Session.objects.filter( taxi__token=taxi_token ).filter( phone__mac=phone_mac_addr ).filter( end_time__gte=timeout ).latest('end_time')).pk except: driver = Driver.objects.filter(token=driver_token) driver_id = None; if driver.exists(): driver_id = driver.first().pk taxi = Taxi.objects.filter(token=taxi_token) taxi_id = None; obj = {} if taxi.exists(): print taxi taxi_id = taxi.first().pk session = Session( driver_id=driver_id, taxi_id=taxi_id, phone_id=Phone.objects.filter(mac=phone_mac_addr).first().pk, start_time=datetime.datetime.now(), end_time=datetime.datetime.now() ) session.save() session_id = session.pk print session_id if session_id > 0: config_data = serializers.serialize('python', [AppConfig.objects.last()]) obj = { 'session_id': session_id, 'config': config_data[0]['fields'] } return http.HttpResponse(json.dumps(obj, default=self.date_handler))
def generateRuleFiles(self): """Iterates trough all the local rulesets, and prints all the rules in them to corresponding rule-files. It is also in the same time generating sid-msg.map, which contains all the sid/msg and references.""" logger = logging.getLogger(__name__) logger.info("Starting to generate rule-files") s = Session.session() sidmsg = ConfigFile(os.path.join(self.configlocation, "sid-msg.map")) detectionFilters = {} for df in s.query(DetectionFilter).all(): detectionFilters[df.rule.SID] = df # For every ruleset: for ruleset in s.query(RuleSet).all(): # Create a configfile rulefile = ConfigFile(os.path.join(self.configlocation, "%s.rules" % ruleset.name)) # For every rule in the ruleset: for rule in s.query(Rule).filter(Rule.ruleset_id == ruleset.id).all(): # Print the rule to the configfile if(rule.SID in detectionFilters): df = detectionFilters[rule.SID] if(df.track == 1): track = "by_src" else: track = "by_dst" rawFilter = "detection-filter: track %s, count %d, seconds %d; " % (track, df.count, df.seconds) raw = re.sub(r'(.*)(sid.*\))', r'\1' + rawFilter + r'\2', rule.raw) rulefile.addLine(raw) else: rulefile.addLine(rule.raw) # Start generate a line with sid/msg for sid-msg.map sidmsgstring = "%s || %s" % (rule.SID, rule.msg) # For evert reference this rule have: for ref in rule.references: # Append the reference to the line for sid-msg.map sidmsgstring += " || %s,%s" % (ref.referenceType.name, ref.reference) # Finally add the line to sid-msg.map sidmsg.addLine(sidmsgstring) # Close the rulefile, and add the name to the configfiles list. rulefile.close() self.configfiles.append("%s.rules" % ruleset.name) # When all rules are added, close sid-msg.map, and add the file to the configfiles list. sidmsg.close() self.configfiles.append("sid-msg.map") s.close()
def synchronizeClasses(self): """Retrieves all the RuleClasses from the central server, and stores them in the local cache.""" logger = logging.getLogger(__name__) logger.info("Starting RuleClass synchronization") s = Session.session() try: response = self.server.getRuleClasses(self.token) except socket.error as e: logger.error("Could not connect to %s!" % serveraddress) logger.error(str(e)) raise SnowmanServer.ConnectionError("Error in the connection to %s!" % serveraddress) if(response['status']): serverRuleClasses = response['classes'] else: logger.error("Could not retrieve RuleClasses from the server: %s", response['message']) raise SnowmanServer.ConnectionError("Could not retrieve RuleClasses from the server.") localRuleClasses = {} for rc in s.query(RuleClass).all(): localRuleClasses[rc.classtype] = rc # Import new classes i = 0 for c in serverRuleClasses: if c not in localRuleClasses: localRuleClasses[c] = RuleClass(classtype=c, description=serverRuleClasses[c]['description'], priority=serverRuleClasses[c]['priority']) s.add(localRuleClasses[c]) logger.debug("Added a new ruleClass to the local cache:" + str(localRuleClasses[c])) i += 1 else: if(localRuleClasses[c].description != serverRuleClasses[c]['description']): localRuleClasses[c].description = serverRuleClasses[c]['description'] if(localRuleClasses[c].priority != serverRuleClasses[c]['priority']): localRuleClasses[c].priority = serverRuleClasses[c]['priority'] s.commit() logger.info("Imported %d new RuleClasses from the srm-server" % i) # Delete Classes that we are not going to have anymore i = 0 for c in localRuleClasses: if c not in serverRuleClasses: logger.debug("Deleted a rule-class from the local cache: " + str(localRuleClasses[c])) s.delete(localRuleClasses[c]) i += 1 s.commit() logger.info("Removed %d RuleClasses from the local cache." % i) s.close() logger.info("Synchronization of the ruleclasses is finished.")
def synchronizeGenerators(self): """ Method which recieves all the generator-objects from the server, and stores them to the local cache.""" logger = logging.getLogger(__name__) logger.info("Starting Generator synchronization") s = Session.session() # request the generators try: response = self.server.getGenerators(self.token) except socket.error as e: logger.error("Could not connect to %s!" % serveraddress) logger.error(str(e)) raise SnowmanServer.ConnectionError("Error in the connection to %s!" % serveraddress) # See that we actually got a positive response. if(response['status']): generators = response['generators'] else: logger.error("Could not retrieve Generator from the server: %s", response['message']) raise SnowmanServer.ConnectionError("Could not retrieve Generator from the server.") # Collect the generators from the local cache. localGenerators = {} for g in s.query(Generator).all(): localGenerators[str(g.gid) + "-" + str(g.alertId)] = g # Save the new generators i = 0 for g in generators: if g not in localGenerators: localGenerators[g] = Generator(gid=generators[g]['GID'], alertId=generators[g]['alertID'], message=generators[g]['message']) s.add(localGenerators[g]) logger.debug("Added a new generator to the local cache:" + str(localGenerators[g])) i += 1 s.commit() logger.info("Imported %d new Generators from the srm-server" % i) # Remove obsolete generators. i = 0 for g in localGenerators: if g not in generators: logger.debug("Deleted a generator from the local cache: " + str(localGenerators[g])) s.delete(localGenerators[g]) i += 1 s.commit() logger.info("Removed %d Generators from the local cache." % i) s.close() logger.info("Synchronization of the generators is finished.")
def synchronizeRuleReferenceTypes(self): """REtrieves all the rulereferencetypes from the central server, and stores them in the local cache.""" logger = logging.getLogger(__name__) logger.info("Starting RuleReferenceType synchronization") s = Session.session() try: response = self.server.getReferenceTypes(self.token) except socket.error as e: logger.error("Could not connect to %s!" % serveraddress) logger.error(str(e)) raise SnowmanServer.ConnectionError("Error in the connection to %s!" % serveraddress) if(response['status']): referenceTypes = response['referenceTypes'] else: logger.error("Could not retrieve RuleReferenceTypes from the server: %s", response['message']) raise SnowmanServer.ConnectionError("Could not retrieve RuleReferenceType from the server.") localReferenceTypes = {} for e in s.query(RuleReferenceType).all(): localReferenceTypes[str(e.name)] = e i = 0 for r in referenceTypes: if r not in localReferenceTypes: localReferenceTypes[r] = RuleReferenceType(name=referenceTypes[r]['name'], prefix=referenceTypes[r]['urlPrefix']) s.add(localReferenceTypes[r]) logger.debug("Added a new RuleReferenceType to the local cache:" + str(localReferenceTypes[r])) i += 1 else: if(localReferenceTypes[r].prefix != referenceTypes[r]['urlPrefix']): localReferenceTypes[r].prefix = referenceTypes[r]['urlPrefix'] s.commit() logger.info("Imported %d new RuleReferenceTypes from the srm-server" % i) i = 0 for r in localReferenceTypes: if r not in referenceTypes: logger.debug("Deleted a RuleReferenceType from the local cache: " + str(localReferenceTypes[r])) s.delete(localReferenceTypes[r]) i += 1 s.commit() logger.info("Removed %d RuleReferenceTypes from the local cache." % i) s.close() logger.info("Synchronization of the RuleReferenceTypes is finished.")
def get_user_data(self, session_key): session = Session.get_session(session_key) if session != None: user = session.user groups = [] joined_groups = JoinedGroup.get_groups(user) for joined_group in joined_groups: groups.append(joined_group.group) if groups == []: return None user_data = UserData(session, user, groups) return user_data return None
def synchronizeRuleSets(self): """Retrieves all the RuleSets's (not the Rules contained in them though.), and stores them in the local cache. """ logger = logging.getLogger(__name__) logger.info("Starting to synchronize RuleSets") s = Session.session() try: response = self.server.getRuleSets(self.token) except socket.error as e: logger.error("Could not connect to %s!" % serveraddress) logger.error(str(e)) raise SnowmanServer.ConnectionError("Error in the connection to %s!" % serveraddress) if(response['status']): serverSets = response['rulesets'] else: logger.error("Could not retrieve RuleSets from the server: %s", response['message']) raise SnowmanServer.ConnectionError("Could not retrieve RuleSets from the server.") localSets = {} for rs in s.query(RuleSet).all(): localSets[rs.name] = rs # Add the new RuleSets i = 0 for rs in serverSets: if rs not in localSets: ruleSet = RuleSet(name=serverSets[rs]['name'], description=serverSets[rs]['description']) s.add(ruleSet) logger.debug("Added a new RuleSet to the local cache: " + str(ruleSet)) i += 1 s.commit() logger.info("Imported %d new RuleSets from the srm-server" % i) # Delete the rulesets that is not on the server anymore. i = 0 for rs in localSets: if rs not in serverSets: s.delete(localSets[rs]) logger.debug("Deleted a RuleSet from the local cache: " + str(localSets[rs])) i += 1 s.commit() logger.info("Removed %d RuleSets from the local cache" % i) s.close() logger.info("RuleSet synchronization is finished.")
def login(self, username, password): access, user = User.verify(username, password) if access: session = Session.generate_session(user) self.send( json.dumps({ 'response': 'verified', 'code': response_codes.correct, 'session_key': session.session_key })) else: self.send( json.dumps({ 'response': 'denied', 'code': response_codes.error, 'message': 'Invalid username or passoword' }))
def synchronizeFilters(self): """Collects all filters (EventFilter, DetectionFilter and Suppress) from the snowman-server, and updates the local cache to match. As we do not have a rev-number on the filters, opimization simmilar to the Rule-sync is not in place.""" logger = logging.getLogger(__name__) logger.info("Starting to synchronize Filters") s = Session.session() # Connect to the snowman-server, and fetch the filters for this sensor. try: response = self.server.getFilters(self.token) except socket.error as e: logger.error("Could not connect to %s!" % serveraddress) logger.error(str(e)) raise SnowmanServer.ConnectionError("Error in the connection to %s!" % serveraddress) if(response['status']): dFilters = response['detectionFilters'] eFilters = response['eventFilters'] suppresses = response['suppresses'] else: logger.error("Could not retrieve Filters from the server: %s", response['message']) raise SnowmanServer.ConnectionError("Could not retrieve Filters from the server.") # Collect all the rules from the local cache. rules = {} for r in s.query(Rule).all(): rules[str(r.SID)] = r # Collect the filters from the local cache. localDFilters = {} for df in s.query(DetectionFilter).all(): localDFilters[str(df.rule.SID)] = df localEFilters = {} for ef in s.query(EventFilter).all(): localEFilters[str(ef.rule.SID)] = ef localSuppress = {} for su in s.query(Suppress).all(): localSuppress[str(su.rule.SID)] = su # Add the new DetectionFilters, and update changed. new = 0 update = 0 for df in dFilters: if(df not in localDFilters): f = DetectionFilter(track=dFilters[df]['track'], count=dFilters[df]['count'], seconds=dFilters[df]['seconds'] ) f.rule = rules[df] s.add(f) logger.debug("Added a new DetectionFilter to the local cache: " + str(df)) new += 1 else: changed = False if(localDFilters[df].track != dFilters[df]['track']): changed = True localDFilters[df].track = dFilters[df]['track'] if(localDFilters[df].count != dFilters[df]['count']): changed = True localDFilters[df].count = dFilters[df]['count'] if(localDFilters[df].seconds != dFilters[df]['seconds']): changed = True localDFilters[df].seconds = dFilters[df]['seconds'] if(changed): update += 1 s.commit() logger.info("Imported %d new DetectionFilters from the snowman-server" % new) logger.info("Updated %d DetectionFilters" % update) # Add the new EventFilters, and update changed. new = 0 update = 0 for ef in eFilters: if(ef not in localEFilters): f = EventFilter(track=eFilters[ef]['track'], count=eFilters[ef]['count'], seconds=eFilters[ef]['seconds'], ttype=eFilters[ef]['type'] ) f.rule = rules[ef] s.add(f) logger.debug("Added a new EventFilter to the local cache: " + str(ef)) new += 1 else: changed = False if(localEFilters[ef].track != eFilters[ef]['track']): changed = True localEFilters[ef].track = eFilters[ef]['track'] if(localEFilters[ef].count != eFilters[ef]['count']): changed = True localEFilters[ef].count = eFilters[ef]['count'] if(localEFilters[ef].seconds != eFilters[ef]['seconds']): changed = True localEFilters[ef].seconds = eFilters[ef]['seconds'] if(localEFilters[ef].filtertype != eFilters[ef]['type']): changed = True localEFilters[ef].filtertype = eFilters[ef]['type'] if(changed): update += 1 s.commit() logger.info("Imported %d new DetectionFilters from the snowman-server" % new) logger.info("Updated %d DetectionFilters" % update) # Add new suppresses, and update the changed ones. new = 0 update = 0 for suppress in suppresses: if suppress not in localSuppress: sup = Suppress(track=suppresses[suppress]['track']) sup.rule = rules[suppress] s.add(sup) s.commit() for address in suppresses[suppress]['addresses']: a = SuppressAddress(address=address) a.suppress = sup s.add(a) s.commit() new += 1 else: changed = False if(localSuppress[suppress].track != suppresses[suppress]['track']): changed = True localSuppress[suppress].track = suppresses[suppress]['track'] addresses = s.query(SuppressAddress).filter(SuppressAddress.suppress_id==localSuppress[suppress].id).all() current = [] for a in addresses: if(a.address not in suppresses[suppress]['addresses']): changed = True s.delete(a) else: current.append(a.address) for a in suppresses[suppress]['addresses']: if(a not in current): changed = True a = SuppressAddress(address=a) a.suppress = localSuppress[suppress] s.add(a) if changed: update += 1 s.commit() logger.info("Imported %d new Suppresses from the snowman-server" % new) logger.info("Updated %d Suppresses" % update) # Delete the filters that is not on the server anymore. i = 0 for f in localDFilters: if f not in dFilters: s.delete(localDFilters[f]) logger.debug("Deleted a DetectionFilter from the local cache: " + str(localDFilter[f])) i += 1 s.commit() logger.info("Removed %d DetectionFilters from the local cache" % i) i = 0 for f in localEFilters: if f not in eFilters: s.delete(localEFilters[f]) logger.debug("Deleted a EventFilter from the local cache: " + str(localEFilter[f])) i += 1 s.commit() logger.info("Removed %d EventFilters from the local cache" % i) i = 0 for su in localSuppress: if su not in suppresses: s.delete(localSuppress) logger.debug("Deleted a Suppress fom the local cache: " + str(localSuppress[su])) i+= 1 s.commit() logger.info("Removed %d suppresses from the local cache" % i) s.close() logger.info("Filter-synchronization is finished.")
def synchronizeRules(self): """Collects lists of rules this sensor should have, compares it with what currently lies in the local cache, and request missing/changed rules from the central server.""" logger = logging.getLogger(__name__) maxRuleRequests = int(Config.get("sync", "maxRulesInRequest")) s = Session.session() logger.info("Starting to synchronize the Rules") logger.debug("Collecting the SID/rev pairs from the server") # Collect sid/rev pairs from the central server. response = self.server.getRuleRevisions(self.token) if(response['status'] == False): logger.error("Could not get rulerevisions from the server: %s" % response['message']) raise SnowmanServer.ConnectionError("Could not retrieve RuleRevisions from the server.") rulerevisions = response['revisions'] # Collect the rules in the local cache. localRules = s.query(Rule).all() for r in localRules: # If the current rule is in the rulerevisions-list if str(r.SID) in rulerevisions and int(r.rev) == int(rulerevisions[str(r.SID)]): rulerevisions.pop(str(r.SID)) logger.debug("Rule %d is already up to date" % r.SID) else: logger.debug("Rule %d is deleted, as it is going to be updated or removed." % r.SID) s.delete(r) s.commit() logger.debug("Starting to download %d rules from the server" % len(rulerevisions)) # Grab Ruleclasses, rulesets and rulereferencetypes from the local cache, to # have them at hand when it is needed to reference them. ruleClasses = {} for rc in s.query(RuleClass).all(): ruleClasses[rc.classtype] = rc ruleSets = {} for rs in s.query(RuleSet).all(): ruleSets[rs.name] = rs reftype = {} for ref in s.query(RuleReferenceType).all(): reftype[ref.name] = ref # Start collecting the full rules from the central server: rulerevisions = list(rulerevisions) while len(rulerevisions) > 0: request = rulerevisions[:maxRuleRequests] logger.debug("Requesting %d out of %d rules" % (len(request), len(rulerevisions))) # Request a chunk of the rules response = self.server.getRules(self.token, request) if(response['status'] == False): logger.error("Could not get rulers from the server: %s" % response['message']) raise SnowmanServer.ConnectionError("Could not retrieve Rule from the server.") else: rules = response['rules'] # Insert the recieved rules, and all its references. for r in rules: rule = Rule(sid=rules[r]['SID'], rev=rules[r]['rev'], msg=rules[r]['msg'], raw=rules[r]['raw']) rule.ruleset = ruleSets[rules[r]['ruleset']] rule.ruleclass = ruleClasses[rules[r]['ruleclass']] s.add(rule) logger.debug("Got a new rule from the server: " + str(rule)) for ref in rules[r]['references']: rref = RuleReference(reference=ref[1]) rref.referenceType = reftype[ref[0]] rref.rule = rule s.add(rref) rulerevisions.remove(r) s.commit() logger.info("Finished synchronizing the rules from the server") s.close()
def groups(request, session_key): access, session = Session.verify_session_key(session_key) if access: return render(request, 'dashboard/groups.html')
def filters(request, session_key, params): access, session = Session.verify_session_key(session_key) if access: context = {"params": params} return render(request, 'dashboard/filters.html', context)