def run_all(): parse_start = time.time() data = logparse.parse() parse_stop = time.time() analysis_start = time.time() api_analyze.analyze(data) analysis_stop = time.time() print "=============================" print "Parse time: %.2f seconds" % (parse_stop - parse_start) print "Analysis time: %.2f seconds" % (analysis_stop - analysis_start)
def chat_view(request, match_id): match = Matches.objects.filter(match_id=match_id).values() if match.exists(): chat = Chat.objects.filter(match_id=match_id).values('json') if chat.exists(): chat = chat[0] match = match[0] match['date'] = datetime.strptime(str(match['date']), '%Y-%m-%d %H:%M:%S') - timedelta(hours=1) chat['json'] = loads(chat['json']) players = PlayerMatches.objects.filter(match=match_id).order_by('position') return render_to_response('chat.html', {'chat': chat['json'], 'match':match, 'players':players}) else: if download(match_id, match[0]['replay_url']): if parse(match_id): return chat_view(request, match_id) else: return error(request, "The chat logs had trouble somewhere.<br> We all have bad days sometimes. Sorry") else: return error(request, "Downloading the chat log failed.<br> It could be too old (28 days) or too new. Try again soon.") else: return redirect('/match/' + match_id + '/')
def build_view(request, match_id): match = Matches.objects.filter(match_id=match_id).values() if match.exists(): builds = Builds.objects.filter(match_id=match_id).values() if builds.exists(): match = match[0] match['date'] = datetime.datetime.strptime(str(match['date']), '%Y-%m-%d %H:%M:%S') - datetime.timedelta(hours=1) players = PlayerMatches.objects.filter(match=match_id).order_by('position') for build in builds: build['json'] = json.loads(build['json']) return render_to_response('build.html', {'builds': builds, 'match':match, 'players':players}) else: if logparse.download(match_id, match[0]['replay_url']): if logparse.parse(match_id): return build_view(request, match_id) else: return error(request, "The chat logs had trouble somewhere. We all have bad days sometimes.") else: return error(request, "Match replay failed to download. It could be too old (28 days), too new, or S2 hates you") else: return redirect('/match/' + match_id + '/')
if atype == btype: return True def squatdeadtype(t): return t == LiftType.squat or t == LiftType.deadlift def benchpresstype(t): return t == LiftType.bench or t == LiftType.press if squatdeadtype(atype) and squatdeadtype(btype): return True return benchpresstype(atype) and benchpresstype(btype) # If run as a script, make sure that the entirety of exlog is handled. if __name__ == '__main__': import sys import logparse if len(sys.argv) != 2: print(' Usage: %s logfile' % sys.argv[0], file=sys.stderr) sys.exit(1) sessions = logparse.parse(sys.argv[1]) for session in sessions: for lift in session.lifts: if gettype(lift.name) == None: print("Missing: " + lift.name)
btype = gettype(b) if atype == btype: return True def squatdeadtype(t): return t == LiftType.squat or t == LiftType.deadlift def benchpresstype(t): return t == LiftType.bench or t == LiftType.press if squatdeadtype(atype) and squatdeadtype(btype): return True return benchpresstype(atype) and benchpresstype(btype) # If run as a script, make sure that the entirety of exlog is handled. if __name__ == '__main__': import sys import logparse if len(sys.argv) != 2: print(' Usage: %s logfile' % sys.argv[0], file=sys.stderr) sys.exit(1) sessions = logparse.parse(sys.argv[1]) for session in sessions: for lift in session.lifts: if gettype(lift.name) == None: print("Missing: " + lift.name)
def run_test(): # not really sure what was going on here... data = logparse.parse()#api_parse.minimal_parse() f = open('paths.txt','w') for d in data: f.write("%s %s\n" % (d[0], d[1]))