def show_overlay(file): """ Shows overlay. If it wasn't analysed before, analyse now.""" # Replay already analysed if 'replay_dict' in AllReplays[file]: if AllReplays[file]['replay_dict'] != None: sendEvent(AllReplays[file]['replay_dict']) else: logger.info(f"This replay couldn't be analysed {file}") # Replay_dict is missing, analyse replay else: try: replay_dict = analyse_replay(file, PLAYER_HANDLES) if len(replay_dict) > 1: sendEvent(replay_dict) with lock: AllReplays[file]['replay_dict'] = replay_dict with open(analysis_log_file, 'ab') as file: file.write((str(replay_dict)+'\n').encode('utf-8')) if CAnalysis != None: CAnalysis.add_parsed_replay(replay_dict) # No output from analysis else: with lock: AllReplays[file]['replay_dict'] = None except: logger.error(f'Failed to analyse replay: {file}\n{traceback.format_exc()}') with lock: AllReplays[file]['replay_dict'] = None
def show_overlay(file): """ Shows overlay. If it wasn't analysed before, analyse now.""" global ReplayPosition if file in AllReplays.keys(): with lock: ReplayPosition = list(AllReplays.keys()).index(file) # Try to find if the replay is analysed in CAnalysis rhash = get_hash(file) if CAnalysis is not None: data = CAnalysis.get_data_for_overlay(rhash) if data is not None: sendEvent(data) return # Didn't find the replay, analyse try: replay_dict = analyse_replay(file, PLAYER_HANDLES) if len(replay_dict) > 1: sendEvent(replay_dict) if CAnalysis is not None: CAnalysis.add_parsed_replay(replay_dict) else: logger.error('No output from replay analysis') except Exception: logger.error( f'Failed to analyse replay: {file}\n{traceback.format_exc()}') return 'Error'
def move_in_AllReplays(delta): """ Moves across all replays and sends info to overlay to show parsed data """ global ReplayPosition logger.info( f'Attempt to move to {ReplayPosition + delta}/{len(AllReplays)-1}') # Check if valid position newPosition = ReplayPosition + delta if newPosition < 0 or newPosition >= len(AllReplays): logger.info(f'We have gone too far. Staying at {ReplayPosition}') return with lock: ReplayPosition = newPosition # Get replay_dict of given replay key = list(AllReplays.keys())[ReplayPosition] if 'replay_dict' in AllReplays[key]: if AllReplays[key]['replay_dict'] != None: sendEvent(AllReplays[key]['replay_dict']) else: logger.info(f"This replay couldn't be analysed {key}") move_in_AllReplays(delta) else: # Replay_dict is missing, analyse replay try: replay_dict = analyse_replay(key, PLAYER_HANDLES) if len(replay_dict) > 1: sendEvent(replay_dict) with lock: AllReplays[key]['replay_dict'] = replay_dict with open(analysis_log_file, 'ab') as file: file.write((str(replay_dict) + '\n').encode('utf-8')) if CAnalysis != None: CAnalysis.add_parsed_replay(replay_dict) else: # No output from analysis with lock: AllReplays[key]['replay_dict'] = None move_in_AllReplays(delta) except: logger.error( f'Failed to analyse replay: {key}\n{traceback.format_exc()}') with lock: AllReplays[key]['replay_dict'] = None move_in_AllReplays(delta)
def check_replays(): """ Checks every few seconds for new replays """ global AllReplays global session_games global ReplayPosition while True: logger.debug('Checking for replays....') # Check for new replays current_time = time.time() for root, directories, files in os.walk(SETTINGS['account_folder']): for file in files: file_path = os.path.join(root,file) if len(file_path) > 255: file_path = '\\\?\\' + file_path file_path = os.path.normpath(file_path) if file.endswith('.SC2Replay') and not(file_path in AllReplays): with lock: AllReplays[file_path] = {'created':os.path.getmtime(file_path)} if current_time - os.path.getmtime(file_path) < 60: logger.info(f'New replay: {file_path}') replay_dict = dict() try: replay_dict = analyse_replay(file_path,PLAYER_HANDLES) # First check if any commander found if not replay_dict.get('mainCommander') and not replay_dict.get('allyCommander'): logger.info('No commanders found, wont show replay') # Then check if we have good elif len(replay_dict) > 1: logger.debug('Replay analysis result looks good, appending...') with lock: session_games[replay_dict['result']] += 1 # What to send out = replay_dict.copy() out['newReplay'] = True if SETTINGS.get('show_session',False): out.update(session_games) if SETTINGS.get('show_random_on_overlay',False) and len(RNG_COMMANDER) > 0: out.update(RNG_COMMANDER) sendEvent(out) with open(analysis_log_file, 'ab') as file: #save into a text file file.write((str(replay_dict)+'\n').encode('utf-8')) # No output else: logger.error(f'ERROR: No output from replay analysis ({file})') with lock: AllReplays[file_path]['replay_dict'] = replay_dict ReplayPosition = len(AllReplays)-1 except: logger.error(traceback.format_exc()) finally: if len(replay_dict) > 1: upload_to_aom(file_path,replay_dict) # return just parser return replay_dict # Wait while checking if the thread should end early for i in range(6): time.sleep(0.5) if APP_CLOSING: return None
def run_full_analysis(self, progress_callback): """ Run full analysis on all replays """ self.closing = False # Get current status & updated fully_parsed = 0 for r in self.ReplayDataAll: if r.full_analysis: fully_parsed += 1 if fully_parsed == len(self.ReplayDataAll): self.full_analysis_finished = True progress_callback.emit(f'Full analysis completed! {len(self.ReplayDataAll)}/{len(self.ReplayDataAll)} | 100%') return True progress_callback.emit(f'Running... {fully_parsed}/{len(self.ReplayDataAll)} ({100*fully_parsed/len(self.ReplayDataAll):.0f}%)') fully_parsed_at_start = fully_parsed # Start logger.info('Starting full analysis!') start = time.time() idx = 0 eta = '?' for i, r in enumerate(self.ReplayDataAll): # Save cache every now and then if idx >= 20: idx = 0 self.save_cache() # Interrupt the analysis if the app is closing if self.closing: self.save_cache() return False # Analyze those that are not fully parsed yet if not r.full_analysis: try: if not os.path.isfile(r.file): continue full_data = analyse_replay(r.file) full_data['full_analysis'] = True if len(full_data) < 2: continue # Update data idx += 1 fully_parsed += 1 # Calculate eta if (fully_parsed - fully_parsed_at_start) > 10 and (fully_parsed - fully_parsed_at_start) % 3 == 0: eta = (len(self.ReplayDataAll) - fully_parsed) / ((fully_parsed - fully_parsed_at_start) / (time.time() - start)) eta = time.strftime("%H:%M:%S", time.gmtime(eta)) # Update widget with lock: try: formated = self.format_data(full_data) if self.replay_entry_valid(formated): self.ReplayDataAll[i] = formated except Exception: logger.error(traceback.format_exc()) progress_callback.emit( f'Estimated remaining time: {eta}\nRunning... {fully_parsed}/{len(self.ReplayDataAll)} ({100*fully_parsed/len(self.ReplayDataAll):.0f}%)' ) except Exception: logger.error(traceback.format_exc()) if idx > 0: self.save_cache() progress_callback.emit(f'Full analysis completed! {len(self.ReplayDataAll)}/{len(self.ReplayDataAll)} | 100%') logger.info(f'Full analysis completed in {time.time()-start:.0f} seconds!') self.full_analysis_finished = True return True
def run_full_analysis(self): """ Run full analysis on all replays """ self.closing = False # Get current status & updated fully_parsed = 0 for r in self.ReplayDataAll: if 'full_analysis' in r or 'comp' in r: fully_parsed += 1 self.full_analysis_label.setText( f'Running... {fully_parsed}/{len(self.ReplayDataAll)} ({100*fully_parsed/len(self.ReplayDataAll):.0f}%)' ) fully_parsed_at_start = fully_parsed # Start logger.info('Starting full analysis!') start = time.time() idx = 0 eta = '?' for r in self.ReplayDataAll: # Save cache every now and then if idx >= 20: idx = 0 self.save_cache() # Interrupt the analysis if the app is closing if self.closing: self.save_cache() return False # Analyze those that are not fully parsed yet if not 'full_analysis' in r and not 'comp' in r: if not os.path.isfile(r['file']): continue full_data = analyse_replay(r['file']) full_data['full_analysis'] = True if len(full_data) == 0: with lock: r['full_analysis'] = False continue # Update data idx += 1 fully_parsed += 1 # Calculate eta if (fully_parsed - fully_parsed_at_start) > 15 and ( fully_parsed - fully_parsed_at_start) % 3 == 0: eta = (len(self.ReplayDataAll) - fully_parsed) / ( (fully_parsed - fully_parsed_at_start) / (time.time() - start)) eta = time.strftime("%H:%M:%S", time.gmtime(eta)) # Update widget with lock: r.update(self.format_data(full_data)) self.full_analysis_label.setText( f'Estimated remaining time: {eta}\nRunning... {fully_parsed}/{len(self.ReplayDataAll)} ({100*fully_parsed/len(self.ReplayDataAll):.0f}%)' ) if idx > 0: self.save_cache() self.full_analysis_label.setText( f'Full analysis completed! {fully_parsed}/{len(self.ReplayDataAll)} | {100*fully_parsed/len(self.ReplayDataAll):.0f}%' ) logger.info( f'Full analysis completed in {time.time()-start:.0f} seconds!') self.full_analysis_finished = True return True