def getData(l): data = {"data":"", "needCTCSS":False, "allOK":True, "source":"rwd_prospect"} if not os.path.exists('prospect_mp.json'): stany = generuj_json(nie_zapisuj=True) else: stany = json.loads(unicode(open('prospect_mp.json','r').read(),'utf-8')) if stany['ostrzegawczy']!={} or stany['alarmowy']!={}: data['data'] += 'lokalny_komunikat_hydrologiczny ' if stany['alarmowy']: # Sprawdzenie dla których wodowskazów mamy przekroczone # stany alarmowe -- włącz ctcss data['needCTCSS']=True data['data']+=' przekroczenia_stanow_alarmowych ' for rzeka in sorted(stany['alarmowy'].keys()): data['data']+='rzeka %s wodowskaz %s '%(bezpiecznaNazwa(rzeka), \ " wodowskaz ".join(bezpiecznaNazwa(r) for r in sorted(stany['alarmowy'][rzeka])),) if stany['ostrzegawczy']: data['data']+='_ przekroczenia_stanow_ostrzegawczych ' for rzeka in sorted(stany['ostrzegawczy'].keys()): data['data']+='rzeka %s wodowskaz %s '%(bezpiecznaNazwa(rzeka), \ " wodowskaz ".join(bezpiecznaNazwa(r) for r in sorted(stany['ostrzegawczy'][rzeka])),) if os.path.exists('prospect_mp.json'): os.remove('prospect_mp.json') debug.log("PODEST_MP", "finished...") return data
def _runtest(self, target, nclients, nconns, rate): self.nruns += 1 nrun = self.nruns httperfs = [] try: for nclient in range(nclients): user, host = siteconfig.site.get_load_generator() assert(nrun < 100 and nclient < 100) filename = 'httperf_run%02d_%02d.txt' % (nrun, nclient) logfile = open(os.path.join(self.testdir, filename), 'w') debug.verbose('spawning httperf on %s' % host) hp = HTTPerfClient(logfile, user, host, target, nconns, rate) httperfs.append(hp) # loop collecting output from all of them busy_httperfs = list(httperfs) # copy list timeout = datetime.datetime.now() + HTTPERF_TIMEOUT while busy_httperfs: (ready, _, _) = select_timeout(timeout, busy_httperfs) if not ready: raise TimeoutError('waiting for httperfs') for hp in ready: try: hp.read() except EOFError: busy_httperfs.remove(hp) finally: debug.log('cleaning up httperf test...') for hp in httperfs: hp.cleanup()
def build_batting_average_events(self, batting_metrics): event_type = SUMMARIZING team_names = (self.gameData['away_team_name'], self.gameData['home_team_name']) team_types = ["away", "home"] events = [] for i in range(0, len(team_types)): if abs(batting_metrics[i]) > TEAM_AVERAGE_INTERESTINGNESS_THRESHOLD: weight = TEAM_AVERAGE_DIFFERENCE_POINTS * batting_metrics[i] if weight > 0: blurb = random.choice(['were strong at the plate', 'were cleaning up at the plate', 'were characterized by strong at bats', 'shined offensively']) #blurb = "were strong at the plate" short_blurb = "played well" else: blurb = random.choice(['had an off night on offense', 'couldn\'t get in a groove at the plate', 'struggled at the plate', 'had a tough time at bat']) #blurb = "had an off night for offense" short_blurb = "played poorly" events.append(Event(blurb, weight, team_names[i], event_type, None, short_blurb, self.winning_team == team_types[i])) log("weight: %d" % weight) log("blurb: " + team_names[i] + " " + blurb) return events
def build_lead_change_events(self, lead_metrics): event_type = SUMMARIZING team_names = { "away" : self.gameData['away_team_name'], "home" : self.gameData['home_team_name'] } team_types = ["away", "home"] events = [] weight = 0 blurb = '' if lead_metrics['change_count'] == 1: weight = LEAD_CHANGE_TAKE_AND_HOLD_WEIGHT inning = int(lead_metrics['first_change']) k = inning%10 ordinal_val = "%d%s"%(inning,"tsnrhtdd"[(inning/10%10!=1)*(k<4)*k::4]) classic_blurb = "took the lead in the %s and never gave it up" % (ordinal_val) blurb = random.choice([classic_blurb, 'were never behind', 'stayed on top the whole game', 'kept the lead out of reach']) short_blurb = "led from %s" % (ordinal_val) events.append(Event(blurb, weight, team_names[self.winning_team], event_type, None, short_blurb, True)) elif lead_metrics['change_count'] > LEAD_CHANGE_THRESHOLD: weight = LEAD_CHANGE_MAX_WEIGHT * float(lead_metrics['last_change']) / float(self.gameData['status']['inning']) final_inning = int(lead_metrics['last_change']) k = final_inning%10 ordinal_val = "%d%s"%(final_inning,"tsnrhtdd"[(final_inning/10%10!=1)*(k<4)*k::4]) classic_blurb = "battled for the lead and finally held it in the %s inning" % (ordinal_val) blurb = random.choice([classic_blurb,'fought a tough battle, but came out on top', 'won in a close one', 'really had to battle for the W']) short_blurb = "led from %s" % (ordinal_val) events.append(Event(blurb, weight, team_names[self.winning_team], event_type, None, short_blurb, True)) log("weight: %d" % weight) log("blurb: " + team_names[self.winning_team] + " " + blurb) return events
def backupMetaData(repo,timeCachePath=".git_cache_time",permCachePath=".git_cache_meta"): import git basepath=repo.root cwd=os.getcwd() os.chdir(basepath) timecache=open(os.path.join(basepath,timeCachePath),"w") permcache=open(os.path.join(basepath,permCachePath),"w") debug.log("Backing up metadata for repo "+repo.name) for line in repo.getFiles(): filepath=os.path.join(basepath,line) if(os.path.exists(filepath)): times=backupTime(filepath) mode = os.stat(filepath)[ST_MODE] timestamp=(line+","+times[0]+","+times[1]+"\n") permstamp=backupPerms(line,mode) timecache.write(timestamp) permcache.write(permstamp) permcache.flush() permcache.close() timecache.flush() timecache.close() debug.log("Metadata backed up") os.chdir(cwd)
def fetch_preferred_team_overview(self): if not self.is_offday_for_preferred_team(): urllib.urlcleanup() game = self.games[self.game_index_for_preferred_team()] game_overview = mlbgame.overview(game.game_id) debug.log("Preferred Team's Game Status: {}, {} {}".format(game_overview.status, game_overview.inning_state, game_overview.inning)) return game_overview
def send_udp(udp_sock, ip, port, built_msg): if (port <= 0): debug.log("port is less than zero when trying to send_udp message", debug.P_WARNING); return; try: udp_sock.sendto(built_msg, (ip, port)); except socket.error as serr: #socket_manage will handle this now pass;
def display_result(self, y, result, is_current = False, is_marked = False): line, find_info, abs_idx = result if is_current: line_style = self.CANDIDATES_LINE_SELECTED elif is_marked: line_style = self.CANDIDATES_LINE_MARKED else: line_style = self.CANDIDATES_LINE_BASIC keyword_style = self.CANDIDATES_LINE_QUERY + line_style self.display_line(y, 0, line, style = line_style) if find_info is None: return for (subq, match_info) in find_info: for x_offset, subq_len in match_info: try: x_offset_real = display.display_len(line, beg = 0, end = x_offset) self.display.add_string(line[x_offset:x_offset + subq_len], pos_y = y, pos_x = x_offset_real, style = keyword_style) except curses.error as e: debug.log("addnstr", str(e) + " ({0})".format(y))
def reindexObjectSecurity(self, skip_self=False): """update security information in all registered catalogs. """ at = getToolByName(self, TOOL_NAME, None) if at is None: return catalogs = [c for c in at.getCatalogsByType(self.meta_type) if c is not None] path = '/'.join(self.getPhysicalPath()) for catalog in catalogs: for brain in catalog.unrestrictedSearchResults(path=path): brain_path = brain.getPath() if brain_path == path and skip_self: continue # Get the object if hasattr(aq_base(brain), '_unrestrictedGetObject'): ob = brain._unrestrictedGetObject() else: # BBB: Zope 2.7 ob = self.unrestrictedTraverse(brain_path, None) if ob is None: # BBB: Ignore old references to deleted objects. # Can happen only in Zope 2.7, or when using # catalog-getObject-raises off in Zope 2.8 log("reindexObjectSecurity: Cannot get %s from catalog" % brain_path, level=WARNING) continue # Recatalog with the same catalog uid. catalog.reindexObject(ob, idxs=self._cmf_security_indexes, update_metadata=0, uid=brain_path)
def write(self, sock): '''Operation d'ecriture sur le socket. Le mecanisme d'ecriture est gere par une Queue d'operation. Parametre: sock -- instance du socket a ecrire ''' try: msg = self.message_queue[sock].get_nowait() except Queue.Empty: self.outputs.remove(sock) if sock in self.message_queue: try: del self.message_queue[sock] except KeyError: debug.wtf(self, 'GTFO Python !!!') except KeyError: if sock in self.outputs: self.outputs.remove(sock) else: try: sock.send(msg) except socket.error: debug.error(self, 'Error for sending to ' + str(sock)) self.disconnect(sock) else: debug.log(self, 'Sending to ' + str(sock), 3) debug.log(self, 'To ' + str(sock) + '`' + msg + '`', 5)
def parse_line((n, line)): line = line.strip() if len(line) == 0 or line[0:2] == '--': return '' line = line.split('--', 1)[0].strip().split() instr = line[0] if instr[0] == '.': ret = exec_compiler_instr(n, instr[1:], line[1:]) return [''] if ret == None else ret if instr[0] == '`': instr = instr.strip('`') debug.log(" # %d: found raw data '%s...'" % (n, instr[0:10])) raw_data = map(util.chr2binstr, b64decode(instr)) data = [] line = "" for byte in raw_data: if len(line) == 16: data.append(line) line = "" try: int('0b' + byte, 2) line += byte except ValueError, e: line += bin(int('0x' + byte, 16))[2:] debug.log(" # %d: expanded data takes up %d bytes" % (n, len(data)/2)) return data
def pobierzOstrzezenia(domena,stacja): global przekroczenie,debug domena,stacja = (domena.lower(), stacja.upper()) # testowe -- nie używać na produkcji! nie siać zamętu! #url = "http://www.biala.prospect.pl/wizualizacja/punkt_pomiarowy.php?"+\ # "prze=TUBI&rok=2010&miesiac=06&dzien=04&godzina=19&minuta=-3" #url = "http://www.biala.prospect.pl/wizualizacja/punkt_pomiarowy.php?"+\ # "prze=TUBI&rok=2010&miesiac=06&dzien=03&godzina=23&minuta=27" try: url = "http://www.%s.prospect.pl/wizualizacja/punkt_pomiarowy.php?prze=%s"%(domena,stacja) plik = downloadFile(url) wynik = _przekroczenie.findall(plik) if wynik[0]==('Delta', ''): return None elif wynik[0][1] in ('ostrzegawczy','alarmowy'): return wynik[0][1] else: debug.log('PROSPECT-MP', u'Regex nie zwrócił oczekiwanych danych',\ buglevel=5) return None except: debug.log('PROSPECT-MP', u'Regex nie zwrócił oczekiwanych danych',\ buglevel=5) pass return None
def refresh_games(self): debug.log("Updating games for {}/{}/{}".format(self.month, self.day, self.year)) urllib.urlcleanup() attempts_remaining = 5 while attempts_remaining > 0: try: current_day = self.day self.set_current_date() all_games = mlbgame.day(self.year, self.month, self.day) if self.config.rotation_only_preferred: self.games = self.__filter_list_of_games(all_games, self.config.preferred_teams) else: self.games = all_games if current_day != self.day: self.current_game_index = self.game_index_for_preferred_team() self.games_refresh_time = time.time() break except URLError, e: debug.error("URLError: {}".format(e.reason)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME) except ValueError: debug.error("ValueError: Failed to refresh list of games") attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME)
def onPlayBackStarted(self): # workaroung bug, we are sometimes called multiple times. if self.trackId: if self.getProperty(keyTrackId) != self.trackId: self.trackId = None else: warn(self, "Already monitoring song id: %s" % (self.trackId)) return False nid = self.getProperty(keyTrackId) if not nid: warn(self, "No track id set by the player...") return False self.trackId = nid log(self, "play back started from monitor !!!!!!" + nid ) elapsed = 0 while elapsed <= 10: if not self.isPlayingAudio(): self.trackId = None return False if self.getProperty(keyTrackId) != self.trackId: self.trackId = None return False elapsed+=1 xbmc.sleep(1000) api.track_resportStreamingStart(nid) self.trackId = None return False
def add_event(self, event): description = event.description weight = event.weight team_won = event.team_won if team_won == None or weight >= 0: real_weight = abs(weight) else: if team_won: adjusted_shame_weight = -SHAME_WEIGHT else: adjusted_shame_weight = SHAME_WEIGHT real_weight = abs(min(weight + adjusted_shame_weight, 0.0)) # apply the shameeeeee ## TODO: APPLY RELEVANCE + SENTIMENT ANALYSIS relevance = 0.0 if team_won != None: if team_won: sent_db = self.win_team_sentiment else: sent_db = self.lose_team_sentiment for keyword in sent_db: if keyword in description.lower(): relevance = relevance + sent_db[keyword]["relevance"] log("keyword weight %s %f" % (keyword, relevance), "+ ") real_weight = real_weight + (relevance * RELEVANCE_WEIGHT) log('pri %f, PUT "%s"' % (real_weight, description)) self.pq.put_nowait((1.0 - real_weight, event))
def ignoreExpression(self, expression): from ui import prompt_user import fnmatch matches = list() if not os.path.isfile(expression): for root, dirs, files in os.walk(self.root): for filename in files: # print filename if fnmatch.fnmatch(filename, expression): # print ('match'+filename) matches.append(os.path.join(root, filename)) else: matches.append(expression) # probably need to doctor the syntax here debug.log("The following files match the expression:") debug.log(matches) ignore = prompt_user("Would you like to ignore all of these files?") if ignore: self._writeGitIgnore(expression) cmd = "git update-index --assume-unchanged" self._native_exec(cmd) delete = prompt_user("Would you like to delete all of these files?\n" + str(matches)) else: delete = False if delete: self._removeFiles(matches)
def _activate(self): """ Activate texture on GPU """ log("GPU: Activate texture") gl.glBindTexture(self.target, self._handle) if self._need_parameterization: self._parameterize()
def safeSyncBranch(self, **kwargs): # use kwargs # debug.log("""Alright, checking if we need to switch head refs... # debug.log("""Switching head refs""") # # if we do need to switch, we will first back up the current state of the current branch. if kwargs.has_key("cmsg"): cmsg = kwargs["cmsg"] else: cmsg = "Incremental Commit" if self.cachemeta: debug.log("Caching metadata") self.repo.backupMetadata() self.repo.gitAddAll() self.repo.gitCommitAll(cmsg) for remote in self.repo.gkremotes: toSync = self.repo.gkremotes[remote] toSync.pullRebase(self) if toSync.isWriteable(): toSync.push(self) else: debug.warn("Remote :" + toSync.name + " is not writeable - cannot sync!")
def read(self, sock): if sock is self.listener: try: client, data = self.listener.accept(1024) except socket.error: debug.error(self, 'Error for accepting client') else: if not client in self.inputs: self.inputs += [client] debug.log(self, 'Client connected : ' + str(client), 1) if data: debug.log(self, 'Getting data from ' + str(client), 2) debug.log(self, 'From ' + str(client) + '`' + data + '`', 4) self.maj_time(client) self.parse(client, data) else: try: msg = sock.recv(1024) except socket.error: debug.error(self, 'Error for reading on ' + str(sock)) self.disconnect(sock) else: debug.log(self, 'Getting data from ' + str(sock), 2) debug.log(self, 'From ' + str(sock) + '`' + msg + '`', 4) self.maj_time(sock) self.parse(sock, msg)
def disconnect(self, sock): '''Ferme la connexion d'un socket et de tous les sockets lies a celui-ci. Parametre: sock -- socket a ferme ''' if not sock.closed(): debug.log(self, 'Disconnection of ' + str(sock), 1) sock.close() if sock in self.link: self.disconnect(self.link[sock]) try: del self.link[sock] except KeyError: debug.wtf(self, 'Python fait toujours la meme merde.') if sock in self.inputs: self.inputs.remove(sock) if sock in self.outputs: self.outputs.remove(sock) if sock in self.message_queue: try: del self.message_queue[sock] except KeyError: debug.wtf(self, 'Go home Python ! You\'re drunk')
def handle_join(new_tcp_sock, new_udp_sock, add_to_list = True): global clients global num_clients global client_id_inc c = Client(); c.tcp_sock = new_tcp_sock; c.udp_sock = new_udp_sock; c.id = client_id_inc; c.ip = new_tcp_sock.getpeername()[0]; c.c_tcp_port = new_tcp_sock.getpeername()[1]; c.s_tcp_port = new_tcp_sock.getsockname()[1]; c.s_udp_port = new_udp_sock.getsockname()[1]; c.callbacks = []; if (add_to_list): clients.append(c); debug.log("accepted client (client-id: %d, ip: %s, c_tcp_port: %d, c_udp_port: %d, s_tcp_port: %d, s_udp_port: %d)" % (c.id, c.ip, c.c_tcp_port, c.c_udp_port, c.s_tcp_port, c.s_udp_port), debug.P_INFO); num_clients += 1; client_id_inc += 1; accounts.init_client_account(c); msg.send(c.tcp_sock, c, msg.build(_MID.SEND_SERVER_CONNECTION_ESTABLISHED_SUCCESSFULLY));
def main(dirs): for dirname in dirs: debug.log('reprocessing %s' % dirname) debug.verbose('parse %s/description.txt for test' % dirname) testname = test = None f = open(os.path.join(dirname, 'description.txt'), 'r') for line in f: m = re.match(r'test:\s+(.*)', line) if m: testname = m.group(1) break f.close() if not testname: debug.error('unable to parse description for %s, skipped' % dirname) continue debug.verbose('locate test "%s"' % testname) for t in tests.all_tests: if t.name.lower() == testname.lower(): test = t(None) # XXX: dummy options if not test: debug.error('unknown test "%s" in %s, skipped' % (testname, dirname)) continue debug.verbose('reprocess results') harness.process_results(test, dirname)
def generate_json(regions=None, dont_save=False): """Generates povodi_cz.json file and returns its contents (dictionary). This file contains all river stations and the way it's constructed is: :: rv[region][river][station]=awareness_level """ rv = {} if regions is None: regions=['poh1','poh2','poh3', 'pla1','pla2','pla3','pla4','pla5', 'pod1','pod2', 'pmo1','pmo2','pmo3', 'pvl1','pvl2','pvl3', ] for region in regions: #try: if 1==1: rv[region]=get_region(region) #except: else: debug.log('POVODI_CZ',\ "Couldn't download data for region %s"%region,buglevel=5) pass if dont_save==False: json.dump(rv, open('povodi_cz.json','w')) return rv
def read_weights(filename): """ Args: filename: file containing weights Returns: initial_weights_hid: weights for hidden layer read from file initial_weights_out: weights for output layer read from file Notes: Checks for mismatch between number of hidden layer nodes as specified in constants.py and will give errors where appropriate The first line of file will be weights separated by commas with no white space for the hidden layer. The second will be 17 values for the output layer. """ try: with open(filename) as f: initial_weights_hid = [float(item) for item in f.readline().strip('\n').split(',')] debug.log('in read_weights: intial_weights_hid = {}\n'.format(initial_weights_hid)) initial_weights_out = [float(item) for item in f.readline().strip('\n').split(',')] debug.log('in read_weights: intial_weights_out = {}\n'.format(initial_weights_out)) # if there is a mismatch in hidden weights if len(initial_weights_hid) != const.NUM_HIDDEN_NODES: print('Number of hidden weights does not match number of hidden nodes in const.py. Returning to menu.') main() # if there is a mismatch in the output weights if len(initial_weights_out) != len(const.CATEGORIES): print('Number of output weights does not match number of categories in constants.py. Returning to Menu.') main() return initial_weights_hid, initial_weights_out except FileNotFoundError: os.system('cls' if os.name == 'nt' else 'clear') print('File not found. Returning to menu.\n') main()
def generuj_json(nie_zapisuj=False): """Generuje plik prospect_mp.json oraz zwraca jego zawartość. Plik te zawiera informacje o przekroczeniach stanów ostrzegawczych i/lub alarmowych""" #json_file = open('prospect_mp.json','w') stany = {'ostrzegawczy':{}, 'alarmowy':{}} for w in config.wodowskazy: try: domena, rzeka, wodowskaz, stacja = w debug.log('PROSPECT-MP', ', '.join((domena,stacja,))) stan = pobierzOstrzezenia(domena,stacja) # Chłyt debugowy sprawdzający, czy mamy wszystkie sample: wszystkie # rzeki przełączamy na stan ostrzegawczy -- nie zapomnij wyłączyć! #stan='alarmowy' # Koniec chłytu if stan in ('ostrzegawczy','alarmowy'): if not stany[stan].has_key(rzeka): stany[stan][rzeka]=[] stany[stan][rzeka].append(wodowskaz) except: raise debug.log('PROSPECT-MP', u'Pobieranie danych zakończyło się '+\ u'błędem', buglevel=5) pass if nie_zapisuj==False: json.dump(stany, open('prospect_mp.json','w')) return stany
def _native_exec(self, cmd): if not type(cmd) == list: rawcomlist = cmd.split(" ") cmd = list() for each in rawcomlist: cmd.append(each) debug.log(self.cmdrunner.execute(cmd))
def setup_gitconfig(repo): debug.log("Setting up git configuration") guessedRight=prompt_user("Is your (full) name \""+guessUsername()+"\"?") if(not guessedRight): username=prompt_user("Please enter your name",False) else: username=guessUsername()
def p_buffer_snum(b): '''buffer : buffer snum''' if b[2]<-1 or b[2]>1: errorTipos('Valores de buffer deben estar en [-1,1]') b[0] = oper(lambda x, y: x + y, b[1], array([b[2]])) log('p_buffer_snum: %s + %s = %s' % (b[1], b[2], b[0]))
def p_buffer_num(b): '''buffer : num ''' if b[1]<-1 or b[1]>1: errorTipos('Valores de buffer deben estar en [-1,1]') b[0] = array([b[1]], dtype = float) log('p_buffer_num: %s' % b[1])
def send(sock, client_obj, built_msg): if (len(built_msg) < MSG_HEADER_SIZE): debug.log("built message does not contain a full header", debug.P_WARNING); if (sock.type == socket.SOCK_STREAM): send_tcp(sock, built_msg); else: send_udp(sock, client_obj.ip, client_obj.c_udp_port, built_msg);
def processIdle(disable_interscript_idle=False): global _interscript_idle_disabled _scheduler.Idle() if disable_interscript_idle and not _interscript_idle_disabled: log('midi', 'Disabling interscript idle.') _interscript_idle_disabled = True arturia_midi.dispatch_message_to_other_scripts( arturia_midi.INTER_SCRIPT_STATUS_BYTE, arturia_midi.INTER_SCRIPT_DATA1_UPDATE_STATE, arturia_midi.INTER_SCRIPT_DATA2_STATE_IDLE_AVAILABLE)
def needy_modules_critical(needy_modules, time_started, mod_duration): if needy_modules == 0: return False time_spent = get_time_spent(time_started) + mod_duration needy_duration = 40 # Duration before a needy module explodes. time_to_solve = 4 # Approx. time required to solve a needy module. threshold = needy_modules * time_to_solve timeleft = needy_duration - time_spent flavor_str = f"in: {(timeleft - threshold):.1f}s" if timeleft >= threshold else "now!" log(f"Need to solve needy modules {flavor_str}", config.LOG_DEBUG) return timeleft < threshold
def __init__(self, overview): self.away_team = Team(overview.away_name_abbrev, overview.away_team_runs) self.home_team = Team(overview.home_name_abbrev, overview.home_team_runs) self.inning = Inning(overview) self.bases = Bases(overview) self.pitches = Pitches(overview) self.outs = Outs(overview) self.game_status = overview.status debug.log(self)
def solve_password(image, char_model, mod_pos): mod_x, mod_y = mod_pos submit_x, submit_y = mod_x + 154, mod_y + 254 # Use lambda function to translate 'image-local' clicks # from the password solver into 'screen-local' coordinates. click_func = lambda x, y: win_util.click(mod_x + x, mod_y + y) success = password_solver.solve(image, char_model, screenshot_module, click_func) if success: win_util.click(submit_x, submit_y) else: log(f"WARNING: Could not solve 'Password'.", config.LOG_WARNING)
def execute_method_parameter(self): """Excute node method (nm=foo) if present and delete nm key from parameter """ if 'nm' in self.parameters: methodName = self.parameters['nm'] del self.parameters['nm'] log(self, "Executing method on node: " + repr(methodName)) if getattr(self.root, methodName)(): return True return False
def OnLongPressDrumPad(note): global _pad_recording_led, _drop_note if _recorder.IsRecording(): log('midi', 'Stop Recording. Long press detected for %s' % str(note)) _recorder.StopRecording() else: log('midi', 'Start Recording. Long press detected for %s' % str(note)) _drop_note = note _recorder.StartRecording(note) _pad_recording_led = False BlinkLight(note)
def print_wires(wires, destinations): log("Wires:", LOG_DEBUG, module="Wire Sequence") colors = ["Red", "Blue", "Black"] letters = ["A", "B", "C"] for wire, dest in zip(wires, destinations): desc = "" if wire == -1: desc = "Empty -> Empty" else: desc = f"{colors[wire]:5s} -> {letters[dest]}" log(desc, LOG_DEBUG, module="Wire Sequence")
def get_selected_results_with_index(self): results = self.get_marked_results_with_index() if not results: try: index = self.index result = self.results[ index] # EAFP (results may be a zero-length list) results.append((result[0], index, result[2])) except Exception as e: debug.log("get_selected_results_with_index", e) return results
def parse_files(filenames): global options data = {} for i in range(len(filenames)): filename = filenames[i] trace = parse_file(filename) data[filename] = trace log("") return data
def handle_sock_err(sockerr, client_obj): global c_index if (sockerr): del client.clients[c_index] if (sockerr.errno == socket.errno.ECONNRESET): client.handle_leave(client_obj, "HOST_FORCE_QUIT", False) else: errno = sockerr.errno debug.log( "socket error occurred (or not handled for). err: %s" % sockerr.strerror, debug.P_ERROR) client.handle_leave(client_obj, sockerr.strerror, False)
def OnNavigationRightLongPress(self, event): debug.log('OnNavigationRightLongPress', 'Dispatched', event=event) if self._button_hold_action_committed: return # Toggle visibility of mixer panel is_visible = self._toggle_visibility(midi.widMixer) visible_str = 'VISIBLE' if is_visible else 'HIDDEN' self._controller.lights().SetLights({ ArturiaLights.ID_NAVIGATION_RIGHT: ArturiaLights.AsOnOffByte(is_visible) }) self._display_hint(line1='Mixer Panel', line2=visible_str)
def OnGlobalIn(self, event): if arturia_leds.ESSENTIAL_KEYBOARD: if self._punched: # Dispatch to punchOut for essential keyboards since essential only has one punch button. self.OnGlobalOut(event) return self._punched = True debug.log('OnGlobalIn', 'Dispatched', event=event) transport.globalTransport(midi.FPT_PunchIn, midi.FPT_PunchIn, event.pmeFlags) self._controller.lights().SetLights( {ArturiaLights.ID_GLOBAL_IN: ArturiaLights.LED_ON})
def OnNavigationKnobTurned(self, event): delta = self._get_knob_delta(event) debug.log('OnNavigationKnob', 'Delta = %d' % delta, event=event) if self._button_mode == arturia_macros.SAVE_BUTTON: self._change_playlist_track(delta) elif self._button_mode or self._locked_mode: self._macros.on_macro_actions( self._button_mode | self._locked_mode, arturia_macros.NAV_WHEEL, delta) self._button_hold_action_committed = True else: self._navigation.UpdateValue(delta)
def run(self, build, machine, testdir): ncores = machine.get_ncores() for delay in [0, 1, 5, 10, 25, 50, 100, 250, 500]: for i in range(2, ncores + 1): debug.log('running %s on %d/%d cores, delay %d' % (self.name, i, ncores, delay)) modules = self.get_modules(build, machine) modules.add_module("phases_scale_bench", [i, delay, PHASESCALE_TIMEOUT]) self.boot(machine, modules) for line in self.collect_data(machine): yield line
def set_value(self, id, column_name, value): debug.log("set {0} id:{1} value:{2}".format(column_name, id, value)) self.delete_xml(id) c = self.connection.cursor() if self.has_value(id): c.execute('update voice set {0}=? where id=?'.format(column_name), (value, id)) else: c.execute( 'insert into voice (id, {0}) values (?, ?)'.format( column_name), (id, value)) self.connection.commit()
def execute_action(self): selected_actions = self.model_action.get_selected_results_with_index() if selected_actions and self.args_for_action: for name, _, act_idx in selected_actions: try: action = self.actions[act_idx] if action: action.act([arg for arg, _, _ in self.args_for_action], self) except Exception as e: debug.log("execute_action", e)
def determine_alignment(img, bbox): min_y, max_y, min_x, max_x = bbox offset_x = img.shape[1] // 12 if scan_for_red(img, min_x - offset_x): log("Serial number is left aligned.", config.LOG_DEBUG, "Serial Number") return 1 # Left alignment. elif scan_for_red(img, max_x + offset_x): log("Serial number is right aligned.", config.LOG_DEBUG, "Serial Number") return -1 return 0
def get_serial_number(img, model): masks, alignment = get_characters(img) if not alignment: log("ERROR: Could not determine alignment of serial number", LOG_WARNING, "Serial Number") return None masks = np.array([ dataset_util.reshape(mask, config.CHAR_INPUT_DIM[1:]) for mask in masks ]) prediction = classifier.predict(model, masks) best_pred = classifier_util.get_best_prediction(prediction) return create_serial_string(best_pred, alignment) # Return actual string.
def __init__(self, matrix, canvas, games, config): """Initializes a GameRender """ self.matrix = matrix self.canvas = canvas self.games = games self.config = config self.current_scrolling_text_pos = self.canvas.width self.creation_time = time.time() self.scroll_finished = False self.data_needs_refresh = True debug.log(self)
def size2chs(n, getgeometry=0): lba = n / 512 for hpc in (16, 32, 64, 128, 255): c, h, s = lba2chs(lba, hpc) if c < 1024: break if DEBUG & 1: log("size2chs: calculated Heads Per Cylinder: %d", hpc) if not getgeometry: return c, h, s else: # partition that fits in the given space # full number of cylinders, HPC and SPT to use return c - 1, hpc, 63
def receive(sock, crlf): data = '' while not data.endswith(crlf): read = sock.recv(1) if len(read) < 1: debug.log('nothing to read: closing socket...') close(sock) data = data + read data = data[:-2] debug.log('recieved ' + str(sock.getpeername()) + ': ' + str(data)) return data
def seek(self, offset, whence=0): if DEBUG & 1: log("partion.seek(%016Xh, %d)", offset, whence) if whence == 1: self.pos += offset elif whence == 2: if self.size: self.pos = self.size + offset else: self.pos = offset if self.pos < 0: self.pos = 0 if self.pos > self.size: self.pos = self.size self.disk.seek(self.pos + self.offset)
def _make_build_dir(self, build_dir=None): if build_dir is None: build_dir = os.path.join(self.options.buildbase, self.name.lower()) self.build_dir = build_dir debug.verbose('creating build directory %s' % build_dir) try: os.makedirs(build_dir) except OSError, e: if e.errno == errno.EEXIST: debug.log("reusing existing build in directory %s" % build_dir) else: raise
def rand_truncate(files_set): n = randint(1, len(files_set) / 2) for i in range(n): f = choice(files_set) j = randint(f.fp.File.filesize / 6, f.fp.File.filesize / 2) if DEBUG & 1: log("truncating %s from %d to %d", f.name, f.fp.File.filesize, j) f.fp.ftruncate(j, 1) if hasattr(f, 'sha1'): f.sha1 = hashlib.sha1(RandFile.Buffer[:j]).hexdigest() return n
def getData(l): data = { "data": "", "needCTCSS": False, "allOK": True, "source": "" } # given by welcome message regions = get_config_regions() if not os.path.exists('povodi_cz.json'): regions = generate_json(regions=regions.keys(), dont_save=False) else: regions = json.loads( unicode(open('povodi_cz.json', 'r').read(), 'utf-8')) awarenesses = {} for region in regions.keys(): for river in regions[region]: for station in regions[region][river].keys(): station_name, level = regions[region][river][station] if [region, station] in config.stations and level > 0: if not awarenesses.has_key(str(level)): awarenesses[str(level)] = {} if not awarenesses[str(level)].has_key(safe_name(river)): awarenesses[str(level)][safe_name(river)] = [] awarenesses[str(level)][safe_name(river)].\ append(safe_name(regions[region][river][station][0])) awalvls = [ '', 'stopien_czuwania', 'stopien_gotowosci', 'stopien_zagrozenia', 'stopien_ekstremalnych_powodzi' ] if awarenesses != {}: data[ 'data'] += 'komunikat_hydrologiczny_czeskiego_instytutu_hydrometeorologicznego' for level in sorted(awarenesses.keys())[::-1]: if level > 1: data['needCTCSS'] = True data['data'] += ' ' + awalvls[int(level)] for river in sorted(awarenesses[level].keys()): data['data'] += ' ' + 'rzeka' + ' ' + river for station in sorted(awarenesses[level][river]): data['data'] += ' ' + 'wodowskaz' + ' ' + station if os.path.exists('povodi_cz.json'): os.remove('povodi_cz.json') debug.log("POVODI_CZ", "finished...") return data
def solve_needy_discharge(image, mod_pos): if not needy_features.is_active(image): log("Needy Discharge is not active.", config.LOG_DEBUG, "Needy Discharge") return mod_x, mod_y = mod_pos time_to_drain = needy_discharge_solver.solve(image) x_top, y_top = mod_x + 230, mod_y + 92 win_util.mouse_move(x_top, y_top) win_util.mouse_down(x_top, y_top) sleep(time_to_drain) win_util.mouse_up(x_top, y_top)
def _activate(self): """Activate the program as part of current rendering state.""" log("GPU: Activating program") gl.glUseProgram(self.handle) for uniform in self._uniforms.values(): if uniform.active: uniform.activate() for attribute in self._attributes.values(): if attribute.active: attribute.activate()
def __setitem__ (self, index, value): "Sets the value stored in a given block index" if index < 0: index += self.size self.decoded[index] = value dsp = index*4 pos = self.offset+dsp if DEBUG&4: log("%s: set BAT[0x%X]=0x%X @0x%X", self.stream.name, index, value, pos) opos = self.stream.tell() self.stream.seek(pos) value = struct.pack(">I", value) self.stream.write(value) self.stream.seek(opos) # rewinds
def input_new_project(app_data): """Open a dialog and create project with default companies and trades. Args: app_data (api.AppData): Application data containing the project data """ input_project_args = dlg.open_project_dialog(app_data=app_data) if input_project_args: app_data.new_project(input_project_args) """ logging """ debug.log( f"New project created: {app_data.project.identifier}, {str(app_data.project.uid)}" )
def get_details_async(model, holder): duration_masks = get_duration_characters() if len(duration_masks) != 3: log(f"WARNING: Bomb duration string length != 3 (len={len(get_duration_characters)}).", config.LOG_WARNING) module_masks = get_module_characters() masks = duration_masks + module_masks masks = np.array([dataset_util.reshape(mask, config.CHAR_INPUT_DIM[1:]) for mask in masks]) prediction = classifier.predict(model, masks) best_pred = classifier_util.get_best_prediction(prediction) labels = [classifier.LABELS[p] for p in best_pred] holder.append(format_time(labels[:3])) holder.append(fix_number(labels[3:5]))