def initialize (): global olivePress, floorMap, inputPanel, names #remove input panel and store player names try: for p in range(condition%2*2+1): if names[p].getMessage() != '': gPlayerData[p+1]['name'] = names[p].getMessage() inputPanel.remove() except: pass #add the factory olivePress = Factory.Factory(LANG) #load the state machine files loadMachFSM_Xl() loadFactFSM_Xl() #add machinery and other stuff to factory olivePress.AddMachinery(MACHINERY[trial]) olivePress.AddAllTools() olivePress.AddOtherStuff() #split the views according to players splitViews() #define the key events for deactivating collision (if players get stuck) collisionTempDeactivate() #set the time passed before game starts LogParser.sgetGameLoadingTime(True) AddProximitySensors() #initiate the factory states (actions, message) = SyncFactoryStates ('FACTORY/START') gPlayers[1]['player'].BroadcastActionsMessages(actions, message)
def main (argv): # set dynamic defaults Config.stat_year = time.gmtime (time.time())[0] # process command line arguments files = Config.parse_args (argv[1:]) if not files: print >>sys.stderr, '%s: %s' % (sys.argv[0], 'missing input files') print >>sys.stderr, Config.usage_help() sys.exit (1) # read, parse and sort input import LogParser print >>sys.stderr, '%s: sorting %u files...' % (sys.argv[0], len (files)) sort_pool = LogParser.log_file_sort_pool (files) print >>sys.stderr, '%s: parsing %u sorted files...' % (sys.argv[0], len (sort_pool)) lparser = LogParser.log_file_parse_pool (sort_pool) # collect statistics stats = Statistics.Statistics (int (Config.stat_year)) import TopVisits, DailyVisits, GeoHour stats.gauges += [ TopVisits.TopVisits (stats), DailyVisits.DailyVisits (stats), GeoHour.GeoHour (stats) ] stats.walk_hits (lparser) print >>sys.stderr, '%s: generating report...' % sys.argv[0] stats.done() # generate report print "Hits:\t%s" % stats.hits print "Visits:\t%s" % stats.visits destdir = './logreport' if not os.path.isdir (destdir) or not os.access (destdir, os.X_OK): try: os.mkdir (destdir) except OSError, ex: die (5, "failed to create or access directory %s: %s" % (destdir, ex.strerror))
def __init__(self, data_path, footage_path, orientation='portrait', resolution='1080', start_second=0, start_date=None, end_second=None, unit='mm', out_path='onewheel.MP4'): self.footage_path = footage_path self.orientation = orientation self.resolutions = compute_resolutions(orientation, resolution) self.start_second = start_second self.end_second = end_second self.icon_manager = IconManager( resolution=res_2_tuple(self.resolutions['icon']), unit='metric' if unit[1] == 'm' else 'imperial') self.data = LogParser.parse(data_path, unit) self.start_date = LogParser.parse_millisecond_time(start_date) self.out_path = out_path print 'Footage is coming from', self.footage_path print 'Footage orientation is', self.orientation print 'Resolutions are:' print self.resolutions
def SaveLoggedData(): global gLogSaved if studyMode and trial: #log data only for main trial try: LogParser.storeLogData(FSM, gPlayers, condition, group) print "LOG DATA SAVED!" gLogSaved = True except: print "Finite State Machine not loaded!"
def ReadFile(): global resList text = request.data ff = text.decode('utf-8').split(",") file_name = ''.join(ff[0][4:]).replace("\"", '').strip() file_format = ff[1].replace("\\n", '').replace("\"", '').strip() file_text = ''.join(ff[2:]).replace("\\r", "") #print("File_text ---- :",file_text) file_text = str( file_text.replace("\\\\n", "/n").replace("\\n", "\n").replace("\"", '').strip()) file_text1 = file_text.strip().split("\n") del file_text1[len(file_text1) - 1] #print("File_text11 ---- :",file_text1) log_format = titles[file_format]['log_format'] print(log_format) if (file_format == 'Others'): rex = titles[file_format]['regex'][0] mainList = [] for line in file_text1: l = re.findall(rex, line) size = len(log_format) size1 = len(l) if (size1 < size): pass else: l[size - 1] = ' '.join(l[size - 1:]) del l[size:] d = OrderedDict() for k, v in it.zip_longest(log_format, l): d[k] = v mainList.append(d) resList = mainList df = pd.DataFrame(mainList, columns=log_format) df.to_csv(file_name + "_structured.csv", sep=',', encoding='utf-8', index=False) return jsonify(mainList, log_format, file_name) else: print("----") parser = LogParser(log_format, file_format) resList = parser.parse(file_text1) #rex = titles[file_format]['regex'][0] col = list(resList.columns.values) return jsonify(resList.to_dict('records'), col, file_name)
def test_polished_user_data(request, user_name=None): if user_name: all_users = models.PisteUser.all() all_users.filter('user_id = ', user_name) logs_user = all_users.get() if logs_user.rawlog_set.count() > 0: polished = [] for log_entry in logs_user.rawlog_set: tree = LogParser.get_parse_tree(log_entry.log_data) log_dict = { "highest": LogParser.get_highest_altitude(tree), "location": LogParser.get_location(tree), "jumps": LogParser.get_jumps_count(tree), "avg_speed": LogParser.get_average_speed(tree), "start_time": LogParser.return_time( LogParser.get_first_time_entry(tree)), "max_speed": LogParser.get_max_speed(tree) } polished.append(log_dict) return render_to_response('polished_user.html', {'polished_logs': polished}) else: return HttpResponse("No name given") # TODO sensible error message
def __init__(self): self.NeuralNetwork = Neural_Network() X = np.array(([0, 0], [1, 0], [0, 1], [1, 1]), dtype=float) y = np.array(([0], [1], [1], [1]), dtype=float) y = y / 1 self.TrainNetwork = trainer(self.NeuralNetwork) self.TrainNetwork.train(X, y) #Initialize and train neural Network self.AIMLPs = AIMLParser() self.logObj = LogParser() #Initialize Helper Objects self.logFile = open("/var/log/apache2/error.log", 'r') #log file Open self.readTime = open("time.txt", 'r') self.lastStat = self.readTime.readline() self.updatStat = self.lastStat self.Xin = self.parseFile( self.logFile) ## parse log file form current time self.Xin = [1.0, 1.0] self.ResOut = self.NeuralNetwork.forward(self.Xin) if 1.0 == round(self.ResOut[0]): self.command = self.getCommand('Apache2') self.doOperation() print "Restart successful!" self.logFile.close()
def updt_log_dict(logObjPile, fPile): counter = len(logObjPile) for fpath in fPile: logObjPile[counter] = lp.log_parser(fpath) counter += 1 return logObjPile
def parse_log_files(filePile): logObjPile = {} counter = 0 for fpath in filePile: logObjPile["logfile{}".format(counter)] = lp.log_parser(fpath) counter += 1 return logObjPile
def tqdm_parse_log_files(fPile): logObjPile = {} counter = 0 for fpath in tqdm(fPile): logObjPile[counter] = lp.log_parser(fpath) counter += 1 return logObjPile
def init(): squidDB = Database.MySAR('mysql://*****:*****@localhost/mysar') squidDB.startup() reactor.callLater(120, DBKeepAlive, squidDB) squidhandler = Squid.Parser('/var/log/squid/access.log', squidDB) #eximhandler = LogParser.logFileHandlers('/var/log/exim4/mainlog') sdHandler = LogParser.deferHandler(squidhandler) #edHandler = LogParser.deferHandler(eximhandler) reactor.callInThread(sdHandler.loop)
def ReadFile(): global resList text = request.data ff = text.decode('utf-8').split(",") file_name = ''.join(ff[0][4:]).replace("\"", '').strip() file_format = ff[1].replace("\\n", '').replace("\"", '').strip() file_text = ''.join(ff[2:]).replace("\\r", "") file_text = str(file_text.replace("\\n", "\n").replace("\"", '').strip()) file_text1 = file_text.strip().split("\n") del file_text1[len(file_text1) - 1] log_format = titles[file_format]['log_format'] parser = LogParser(log_format) parser.parse(log_file) rex = titles[file_format]['regex'][0] mainList = [] for line in file_text1: l = re.findall(rex, line) size = len(format) size1 = len(l) if (size1 < size): pass else: l[size - 1] = ' '.join(l[size - 1:]) del l[size:] d = OrderedDict() for k, v in it.zip_longest(format, l): d[k] = v mainList.append(d) resList = mainList #df = pd.DataFrame(mainList, columns=format) #df.to_csv(file_name+"_structured.csv", sep=',', encoding='utf-8', index=False) return jsonify(mainList, format)
def test_polished_user_data(request, user_name=None): if user_name: all_users = models.PisteUser.all() all_users.filter('user_id = ', user_name) logs_user = all_users.get() if logs_user.rawlog_set.count()>0: polished = [] for log_entry in logs_user.rawlog_set: tree=LogParser.get_parse_tree(log_entry.log_data) log_dict={"highest": LogParser.get_highest_altitude(tree), "location": LogParser.get_location(tree), "jumps": LogParser.get_jumps_count(tree), "avg_speed": LogParser.get_average_speed(tree), "start_time": LogParser.return_time(LogParser.get_first_time_entry(tree)), "max_speed": LogParser.get_max_speed(tree)} polished.append(log_dict) return render_to_response('polished_user.html', {'polished_logs': polished}) else: return HttpResponse("No name given") # TODO sensible error message
import sys import Scraper import LogParser import DirMonitoring import PortScanner import AttackDetect try: while True : choice = input(" \n \t \t \t What do you want to do !! \n 1 - LogParser \n 2 - DirrectoryMonitoring\n 3 - PortScanner\n 4 - AttackDetection\n 5 - Scraper\n 6 - Exit\n EnterYourChoice >> ") if choice == '1': LogParser.main() # AttackDetect.main() elif choice=='2': DirMonitoring.main() elif choice=='3': PortScanner.main() elif choice=='4': AttackDetect.main() elif choice=='5': Scraper.main() elif choice == '6': break else : print("Unknown Choice") print('\n<> BYe <>') except KeyboardInterrupt: print(" \n <> BYe <>") sys.exit()
def receive_log(request): if request.method == 'POST': try: user_name = request.POST['user_id'] guid = request.POST['run_guid'] guid = guid[1:len(guid)-1] # remove '{}' characters # parameter binding for user input to be safe user_exists = models.PisteUser.gql("WHERE user_id= :1", user_name) user_ref=None # TODO move the below to LogParser - return dict with all the information from datetime import datetime tree = LogParser.get_parse_tree(request.POST['log_data']) last_time_in_log = datetime.fromtimestamp(LogParser.get_last_time_entry(tree)) first_time_in_log = datetime.fromtimestamp(LogParser.get_first_time_entry(tree)) location = LogParser.get_location(tree) jumps = LogParser.get_jumps_count(tree) lowest_altitude = LogParser.get_lowest_speed(tree) highest_altitude = LogParser.get_highest_altitude(tree) avg_speed = LogParser.get_average_speed(tree) max_speed = LogParser.get_max_speed(tree) lowest_speed = LogParser.get_lowest_speed(tree) # TODO get the following dynamically from the client: board = "Burton Aftermath" # yes, my 2012 season board stance = "goofy" degree_front = 15 degree_back = -15 music_listened = "AC/DC" # fetch matching resort: resort = None results = models.Resort.query(location["lat"], location["lon"], 1, (2, 0)) if len(results)>0: for res in results: resort = res[1] first_time = False if not user_exists.count()>0: first_time = True new_user = models.PisteUser() new_user.user_id = request.POST['user_id'] new_user.last_time_from_log = last_time_in_log new_user.put() user_ref=new_user else: for user in user_exists: user_ref = user user.last_time_from_log = last_time_in_log user.put() raw_deposit = models.RawLog(user_id=user_ref) raw_deposit.guid = guid raw_deposit.log_data = request.POST['log_data'] raw_deposit.first_time_from_log = first_time_in_log raw_deposit.last_time_from_log = last_time_in_log raw_deposit.jumps = jumps raw_deposit.lowest_altitude = lowest_altitude raw_deposit.highest_altitude = highest_altitude raw_deposit.avg_speed = avg_speed raw_deposit.max_speed = max_speed raw_deposit.lowest_speed = lowest_speed except: #TODO move to "global" to catch all app exceptions and tracebacks import sys, traceback exception_traceback = ''.join(traceback.format_exception(*sys.exc_info())) logging.error("START TRACEBACK----------") logging.error(exception_traceback) logging.error("END TRACEBACK----------") # deposit Run, start with first time user logging and create a new Run if first_time: new_run = models.Run(user_id=user_ref) new_run.start_time = first_time_in_log new_run.last_time_pointer = last_time_in_log new_run.start_location_lat = location["lat"] new_run.start_location_lon = location["lon"] new_run.end_location_lat = location["lat"] new_run.end_location_lon = location["lon"] new_run.guid = guid new_run.board = board new_run.stance = stance new_run.degree_front = degree_front new_run.degree_back = degree_back new_run.music_listened = music_listened new_run.resort_id = resort new_run.put() # deposit raw log raw_deposit.run_id = new_run raw_deposit.put() return HttpResponse('{"result": "1"}') # check run_query_results = models.Run.gql("WHERE guid= :1 AND user_id= :2", guid, user_ref) # no results found, create a new Run if run_query_results.count() == 0: new_run = models.Run(user_id=user_ref) new_run.start_time = first_time_in_log new_run.last_time_pointer = last_time_in_log new_run.start_location_lat = location["lat"] new_run.start_location_lon = location["lon"] new_run.end_location_lat = location["lat"] new_run.end_location_lon = location["lon"] new_run.guid = guid new_run.board = board new_run.stance = stance new_run.degree_front = degree_front new_run.degree_back = degree_back new_run.music_listened = music_listened new_run.resort_id = resort new_run.put() # deposit raw log raw_deposit.run_id = new_run raw_deposit.put() else: # TODO perhaps check the weird results also? say minus... # just change the pointer to the last time entry for run in run_query_results: run.last_time_pointer = last_time_in_log run.end_location_lat = location["lat"] run.end_location_lon = location["lon"] run.put() # deposit raw log raw_deposit.run_id = run raw_deposit.put() return HttpResponse('{"result": "1"}') return HttpResponse('{"result": "0"}')
def get_ipscore(log_file_name): # Database Connection database = MySQLdb.connect(HOST, USER, PASSWORD, DB_NAME) cursor = database.cursor() # Get system constants from DB N, Ni, Nu, Ns, t, L, h = ScoreCalculator.get_system_constants(cursor) # Open test log file and read line by line with open(log_file_name, "r") as log_file: for line in log_file: # Define all score values K = 0 # static score S = 0 # source score F = 0 # familiarity score T = 0 # traffic score V = 0 # variation score P = 0 # overall score tns = 0 # tns value tnd = 0 # tnd value n_days = 0 # number of days value out_log_id = -1 # log id ## ## 1 ## Read current log and get attributes ## # Check line attributes and size attributes = line.split(',') size = len(attributes) if size == LogParser.LINE_SIZE and attributes[8].strip() \ and '=,' not in line and '=""' not in line: # Get attributes of the current log date_time, date, time = LogParser.get_datetime(attributes) source_ip = LogParser.get_source(attributes) dest_ip, dest_port = LogParser.get_destination(attributes) action, service = LogParser.get_action_service(attributes) # Parse time of the current log current_time = str(date_time.time()).split(':') hour = current_time[0].strip() minute = current_time[1].strip() second = current_time[2].strip() time_interval = hour + minute[0] + '0' # Set time interval of the current log if int(minute[0]) == 5: if int(hour) == 23: time_interval += '_' + hour + str(59) elif int(hour) < 9: time_interval += '_0' + str(int(hour) + 1) + '00' elif int(hour) >= 9: time_interval += '_' + str(int(hour) + 1) + '00' else: time_interval += '_' + hour + str(int(minute[0]) + 1) + '0' #print date_time, source_ip, dest_ip, dest_port, action, service ## ## 2 ## Check time of current log and decide to calculate score ## If the log is in the first 10 minutes, insert it into ## runtime_tmp table and training data ## Else, calculate a score for current log and do other insertions ## # Define default run time table values dec_action = 'booting' score = -1 debug = 'booting' # Check time of current log to know whether it is in the first 10 min if time_interval != '0000_0010': # Calculate static score K = ScoreCalculator.calculate_static_score(cursor, K, source_ip, dest_ip, dest_port, service) # Calculate source score S = ScoreCalculator.calculate_source_score(cursor, S, service, dest_port, (str(date) + ' ' + str(time)), Ni, Nu, L) # Calculate familiarity score out_log_id, F = ScoreCalculator.calculate_familiarity_score(cursor, service, F, out_log_id, date, source_ip, dest_port, h, L) # Calculate traffic score T = ScoreCalculator.calculate_traffic_score(cursor, database, T, service, dest_port, (str(date) + ' ' + str(time)), out_log_id, t) # Calculate variation score V = ScoreCalculator.calculate_variation_score(cursor, V, tnd, source_ip, dest_port, (str(date) + ' ' + str(time)), Ns, L) # Calculate overall score and set decision, score and debug values P = K + (S+F+T+V)/4.0 if P > L/100.0: dec_action = 'allow' else: dec_action = 'deny' score = P debug = str(K) + ';' + str(S) + ';' + str(F) + ';' + str(T) + ';' + str(V) ## ## 3 ## Insert current log into runtime_tmp table ## insert_tmp_query = insert_query = 'INSERT INTO runtime_tmp (datetime, srcip, dstip, port, service, action, score, debug) \ VALUES("' + (str(date) +' ' + str(time)) + '","' + source_ip + '","' + dest_ip + '",' + str(dest_port) \ + ',"' + service + '","' + dec_action + '",' + str(score) + ',"' + debug + '")' cursor.execute(insert_tmp_query) ## ## 4 ## Insert current log into training tables in DB ## # Insert current traffic into database log_id = LogParser.insert_traffic_into_db(cursor, source_ip, dest_ip, dest_port, service) # Increase count of the current log LogParser.update_log_count(cursor, log_id, date, time_interval) # Close database connection database.close()
def receive_log(request): if request.method == 'POST': try: user_name = request.POST['user_id'] guid = request.POST['run_guid'] guid = guid[1:len(guid) - 1] # remove '{}' characters # parameter binding for user input to be safe user_exists = models.PisteUser.gql("WHERE user_id= :1", user_name) user_ref = None # TODO move the below to LogParser - return dict with all the information from datetime import datetime tree = LogParser.get_parse_tree(request.POST['log_data']) last_time_in_log = datetime.fromtimestamp( LogParser.get_last_time_entry(tree)) first_time_in_log = datetime.fromtimestamp( LogParser.get_first_time_entry(tree)) location = LogParser.get_location(tree) jumps = LogParser.get_jumps_count(tree) lowest_altitude = LogParser.get_lowest_speed(tree) highest_altitude = LogParser.get_highest_altitude(tree) avg_speed = LogParser.get_average_speed(tree) max_speed = LogParser.get_max_speed(tree) lowest_speed = LogParser.get_lowest_speed(tree) # TODO get the following dynamically from the client: board = "Burton Aftermath" # yes, my 2012 season board stance = "goofy" degree_front = 15 degree_back = -15 music_listened = "AC/DC" # fetch matching resort: resort = None results = models.Resort.query(location["lat"], location["lon"], 1, (2, 0)) if len(results) > 0: for res in results: resort = res[1] first_time = False if not user_exists.count() > 0: first_time = True new_user = models.PisteUser() new_user.user_id = request.POST['user_id'] new_user.last_time_from_log = last_time_in_log new_user.put() user_ref = new_user else: for user in user_exists: user_ref = user user.last_time_from_log = last_time_in_log user.put() raw_deposit = models.RawLog(user_id=user_ref) raw_deposit.guid = guid raw_deposit.log_data = request.POST['log_data'] raw_deposit.first_time_from_log = first_time_in_log raw_deposit.last_time_from_log = last_time_in_log raw_deposit.jumps = jumps raw_deposit.lowest_altitude = lowest_altitude raw_deposit.highest_altitude = highest_altitude raw_deposit.avg_speed = avg_speed raw_deposit.max_speed = max_speed raw_deposit.lowest_speed = lowest_speed except: #TODO move to "global" to catch all app exceptions and tracebacks import sys, traceback exception_traceback = ''.join( traceback.format_exception(*sys.exc_info())) logging.error("START TRACEBACK----------") logging.error(exception_traceback) logging.error("END TRACEBACK----------") # deposit Run, start with first time user logging and create a new Run if first_time: new_run = models.Run(user_id=user_ref) new_run.start_time = first_time_in_log new_run.last_time_pointer = last_time_in_log new_run.start_location_lat = location["lat"] new_run.start_location_lon = location["lon"] new_run.end_location_lat = location["lat"] new_run.end_location_lon = location["lon"] new_run.guid = guid new_run.board = board new_run.stance = stance new_run.degree_front = degree_front new_run.degree_back = degree_back new_run.music_listened = music_listened new_run.resort_id = resort new_run.put() # deposit raw log raw_deposit.run_id = new_run raw_deposit.put() return HttpResponse('{"result": "1"}') # check run_query_results = models.Run.gql("WHERE guid= :1 AND user_id= :2", guid, user_ref) # no results found, create a new Run if run_query_results.count() == 0: new_run = models.Run(user_id=user_ref) new_run.start_time = first_time_in_log new_run.last_time_pointer = last_time_in_log new_run.start_location_lat = location["lat"] new_run.start_location_lon = location["lon"] new_run.end_location_lat = location["lat"] new_run.end_location_lon = location["lon"] new_run.guid = guid new_run.board = board new_run.stance = stance new_run.degree_front = degree_front new_run.degree_back = degree_back new_run.music_listened = music_listened new_run.resort_id = resort new_run.put() # deposit raw log raw_deposit.run_id = new_run raw_deposit.put() else: # TODO perhaps check the weird results also? say minus... # just change the pointer to the last time entry for run in run_query_results: run.last_time_pointer = last_time_in_log run.end_location_lat = location["lat"] run.end_location_lon = location["lon"] run.put() # deposit raw log raw_deposit.run_id = run raw_deposit.put() return HttpResponse('{"result": "1"}') return HttpResponse('{"result": "0"}')
#!/usr/bin/env python from LogParser import * import sys import matplotlib.pyplot as plt log_file = str(raw_input("Introduce log file: ")) parser = LogParser(log_file) print(parser.listTags()) while True: tag = str(raw_input("Introduce tag or list of tags separated by commas: ")) if ',' in tag: for x in tag.split(','): vector = parser.vectorFloat(x) if len(vector) > 1: plt.plot(vector) plt.ylabel("time") else: print("Tag not found") plt.legend(tag.split(',')) plt.show() else: vector = parser.vectorFloat(tag) if len(vector) > 1: plt.plot(vector) plt.ylabel(tag) plt.show()
from LogParser import * import os #file_name = input("Enter file Name : ") file_name = "w3c.txt" file_format = "W3C Extended" l = LogParser(file_name, file_format) #l.display()