def refresh_games(self): debug.log("Updating games for {}/{}/{}".format(self.month, self.day, self.year)) urllib.urlcleanup() attempts_remaining = 5 while attempts_remaining > 0: try: current_day = self.day self.set_current_date() all_games = mlbgame.day(self.year, self.month, self.day) if self.config.rotation_only_preferred: self.games = self.__filter_list_of_games(all_games, self.config.preferred_teams) else: self.games = all_games if current_day != self.day: self.current_game_index = self.game_index_for_preferred_team() self.games_refresh_time = time.time() break except URLError, e: debug.error("URLError: {}".format(e.reason)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME) except ValueError: debug.error("ValueError: Failed to refresh list of games") attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME)
def LaunchLandShip(fgname,faction,typ,numlaunched=1): key = MakeFGKey (fgname,faction) ships=ReadStringList (ccp,key) debug.debug('LaunchLandShip: fg:%s fac:%s typ:%s, num:%s', fgname, faction, typ, numlaunched) for num in xrange (ShipListOffset(),len(ships),PerShipDataSize()): if (typ == ships[num]): try: ntobegin=int(ships[num+1]) nactive=int(ships[num+2]) debug.debug("attempting launch for ship %s, begin %s, act %s)", typ, ntobegin, nactive) nactive-=numlaunched # Happens regularly -Patrick # In the first system, nactive seems to always be 0 for all ships. # In other systems, this isn't always true. # This doesn't really seem to matter, though. # Klauss: turned debug.error into debug.debug if it happens so often # to clean up non-debug logs if (nactive<0): nactive=0 debug.debug('error more ships launched than in FG %s', fgname) if (nactive>ntobegin): nactive=ntobegin debug.debug('error ships %s landed that never launched', typ) Director.putSaveString(ccp,key,num+2,str(nactive)) except: debug.error('error in FG data (str->int)')
def write(self, sock): '''Operation d'ecriture sur le socket. Le mecanisme d'ecriture est gere par une Queue d'operation. Parametre: sock -- instance du socket a ecrire ''' try: msg = self.message_queue[sock].get_nowait() except Queue.Empty: self.outputs.remove(sock) if sock in self.message_queue: try: del self.message_queue[sock] except KeyError: debug.wtf(self, 'GTFO Python !!!') except KeyError: if sock in self.outputs: self.outputs.remove(sock) else: try: sock.send(msg) except socket.error: debug.error(self, 'Error for sending to ' + str(sock)) self.disconnect(sock) else: debug.log(self, 'Sending to ' + str(sock), 3) debug.log(self, 'To ' + str(sock) + '`' + msg + '`', 5)
def displayText(room,textlist,enqueue=False): #debug.warn("Displaying campaign text "+str(textlist)) if VS.isserver(): return if room==-1: debug.error("Room is -1!!!") import Base room=Base.GetCurRoom() func=Base.MessageToRoom if enqueue: func=Base.EnqueueMessageToRoom if type(textlist) is str: if textlist!='': debug.debug('*** Base.message('+textlist+')') func(room,textline(textlist)) else: if textlist and len(textlist): debug.debug('*** Base.message('+str(textlist[0])+')') #Base.MessageToRoom(room,str(textlist[0])) stri='' if enqueue: for x in textlist: func(room,textline(x)) else: for x in textlist: stri+=textline(x)+"\n" func(room,stri);
def read(self, sock): if sock is self.listener: try: client, data = self.listener.accept(1024) except socket.error: debug.error(self, 'Error for accepting client') else: if not client in self.inputs: self.inputs += [client] debug.log(self, 'Client connected : ' + str(client), 1) if data: debug.log(self, 'Getting data from ' + str(client), 2) debug.log(self, 'From ' + str(client) + '`' + data + '`', 4) self.maj_time(client) self.parse(client, data) else: try: msg = sock.recv(1024) except socket.error: debug.error(self, 'Error for reading on ' + str(sock)) self.disconnect(sock) else: debug.log(self, 'Getting data from ' + str(sock), 2) debug.log(self, 'From ' + str(sock) + '`' + msg + '`', 4) self.maj_time(sock) self.parse(sock, msg)
def getBestMatch(self, stories, varlist): """From the provided list of stories, return the item who's \"scale\" most closely matches that of the given event (minimise variance).""" kw_stories = list() for story in stories: if story[1] == varlist[NewsManager.EVENT_KEYWORD_INDEX]: kw_stories.append(story) if not len(kw_stories): debug.error("ERROR: NO KEYWORD STORIES AVAILABLE FOR "+str(varlist)) debug.error(varlist) return False if len(kw_stories) == 1: return kw_stories[0] scale_stories = list() scale_stories.append(kw_stories[0]) diff = abs(int(1000*scale_stories[0][0]) - int(1000*varlist[NewsManager.EVENT_SCALE_INDEX])) kw_stories.pop(0) for story in kw_stories: if abs(int(1000*scale_stories[0][0]) - int(1000*varlist[NewsManager.EVENT_SCALE_INDEX])) < diff: scale_stories = list() scale_stories.append(story) elif abs(int(1000*scale_stories[0][0]) - int(1000*varlist[NewsManager.EVENT_SCALE_INDEX])) == diff: scale_stories.append(story) return scale_stories[varlist[NewsManager.RANDOM_INTEGER_INDEX] % len(scale_stories)]
def GetShipsInFG(fgname,faction): ships = ReadStringList (ccp,MakeFGKey(fgname,faction)) if (len(ships)<=ShipListOffset()): return [] try: count=int(ships[0]) except: debug.error('bad flightgroup record %s', ships) launchnum = DefaultNumShips() if (launchnum>count): launchnum=count nent = (len(ships) - ShipListOffset()) / PerShipDataSize() retn = [0] * nent for i in xrange(_prob_round(launchnum*(0.7+vsrandom.random()+0.3))): which = vsrandom.randrange(count) for j in xrange(nent): pos = j*PerShipDataSize()+ShipListOffset() which -= int(ships[pos+2]) if which <= 0: retn[j] += 1 break ret = [] for i in xrange(nent): if retn[i]: pos = i*PerShipDataSize()+ShipListOffset() ret.append((ships[pos],retn[i])) return ret
def setDocked(self,isdocked): plr=VS.getCurrentPlayer() if not self.checkPlayer(plr): debug.error("campaign.setDocked(%s) called for invalid player %d"%(str(isdocked),plr)) return player = self.players[plr] player.docked = isdocked
def run_test(build, machine, test, path): # Open files for raw output from the victim and log data from the test raw_file_name = os.path.join(path, RAW_FILE_NAME) debug.verbose('open %s for raw output' % raw_file_name) raw_file = open(raw_file_name, 'w') # run the test, dumping the output to the raw file as we go try: debug.verbose('harness: setup test') test.setup(build, machine, path) debug.verbose('harness: run test') starttime = datetime.datetime.now() for out in test.run(build, machine, path): # timedelta for the time this line was emitted from the start of the run timestamp = datetime.datetime.now() - starttime # format as string, discarding sub-second precision timestr = str(timestamp).split('.', 1)[0] # filter output line of control characters filtered_out = filter(lambda c: c in string.printable, out.rstrip()) # debug filtered output along with timestamp debug.debug('[%s] %s' % (timestr, filtered_out)) # log full raw line (without timestamp) to output file raw_file.write(out) debug.verbose('harness: output complete') except KeyboardInterrupt: # let the user know that we are on our way out debug.error('Interrupted! Performing cleanup...') raise finally: raw_file.close() debug.verbose('harness: cleanup test') test.cleanup(machine)
def main(dirs): for dirname in dirs: debug.log('reprocessing %s' % dirname) debug.verbose('parse %s/description.txt for test' % dirname) testname = test = None f = open(os.path.join(dirname, 'description.txt'), 'r') for line in f: m = re.match(r'test:\s+(.*)', line) if m: testname = m.group(1) break f.close() if not testname: debug.error('unable to parse description for %s, skipped' % dirname) continue debug.verbose('locate test "%s"' % testname) for t in tests.all_tests: if t.name.lower() == testname.lower(): test = t(None) # XXX: dummy options if not test: debug.error('unknown test "%s" in %s, skipped' % (testname, dirname)) continue debug.verbose('reprocess results') harness.process_results(test, dirname)
def CountSystems(sys): fac =VS.GetGalaxyFaction(sys) if fac in systemcount: systemcount[fac]+=1 else: systemcount[fac]=1 debug.error("FATAL ERROR "+fac+" not in list;")
def getPOV(self, varlist): """Returns the corresponding POV_* class variable for the reaction of the dockedat faction to the status of the event.""" relatdef = VS.GetRelation(self.dockedat_faction,varlist[self.DEFENDER_INDEX]) relatagg = VS.GetRelation(self.dockedat_faction,varlist[self.AGGRESSOR_INDEX]) success = varlist[NewsManager.AGGRESSOR_SUCCESS_INDEX] if (relatdef <= -self.POV_CUTOFF and relatagg <= -self.POV_CUTOFF) or (relatdef >= self.POV_CUTOFF and relatagg >= self.POV_CUTOFF): return self.POV_NEUTRAL elif relatdef > relatagg: if success == self.SUCCESS_WIN: return self.POV_BAD elif success == self.SUCCESS_LOSS: return self.POV_GOOD elif success == self.SUCCESS_DRAW: return self.POV_GOOD elif relatdef < relatagg: if success == self.SUCCESS_WIN: return self.POV_GOOD elif success == self.SUCCESS_LOSS: return self.POV_BAD elif success == self.SUCCESS_DRAW: return self.POV_BAD else: debug.error("ERROR: VS is returning -0 for relationship relatagg number") return self.POV_NEUTRAL
def isDocked(self): plr=VS.getCurrentPlayer() if not self.checkPlayer(plr): debug.error("campaign.isDocked() called for invalid player %d"%(str(isdocked),plr)) return False player = self.players[plr] return player.docked
def gotoChoice(self,room,num): realcurrent = self.campaign.checkCurrentNode() if realcurrent != self: debug.error("node.gotoChoice called, but this is not the current node.") debug.debug("This node "+str(self)+"; text: "+str(self.text)) debug.debug("Real current node "+str(realcurrent)+"; text: "+str(realcurrent.text)) return ["failure","At an incorrect node"] return self.campaign.setCurrentNode(room,num)
def getNode(self, nam=str()): """Returns the required Node, if it exists.""" name = str(nam) if name == str(): name = self.ROOT_KEY if name in self.nodes: return self.nodes[name] debug.error("Error: Node with name \'%s\' does not exist."%name)
def __safeInvoke(self, function, *args, **kwargs): try: return function(*args, **kwargs) except Exception as e: trace = sys.exc_traceback e.stack_trace = trace exc = _Exception(e, trace) debug.error(function, e, throw=False) return exc
def connect(self): try: self.create_socket() self.sock.connect((config.connection.server, config.connection.port)) self.register() except socket.error as ex: debug.error('Failed to connect to IRC server.', ex) Events.disconnect() else: self.listen()
def _save_cache_data(self, data): cache = self.get_cache_path() if not cache: error(self, "Cache is not set") s = None with open(cache, 'wb') as f: s = pickle.dump(data, f, protocol = pickle.HIGHEST_PROTOCOL) f.flush() os.fsync(f) return s
def set_cache(self): id = "1348794" from cache.favorites import Cache_favorites if not id: try: id = self.get_parameter('nid') except: pass if not id: error(self, "Cannot set cache without id") return False self.cache = Cache_favorites(id) return True
def addNode(self, node, name=str()): """Add a Node to the conversation. Only one RootNode can exist in a conversation at any time.""" if isinstance(node, RootNode): self.nodes[self.ROOT_KEY] = node elif isinstance(node, Node): if name==str(): debug.error("Error: Node must be added with a name argument.") else: self.nodes[name] = node else: debug.error("Error: Node is not of a valid type.")
def significantUnits(): ret=[] iter= VS.getUnitList() iter.advanceNSignificant(0) while (iter.notDone()): un = iter.current() debug.debug('Found sig unit: '+un.getName()+' ('+un.getFullname()+')') if not un.isSignificant(): debug.error('Unit '+un.getName()+' ('+un.getFullname()+') is not significant!') ret.append(iter.current()) iter.advanceSignificant() return ret
def lookupInfo(self, var, tag): """Returns the information corresponding to the given var and tag pair.""" try: return self.vars[var][tag] except: try: results = self.dynamic_data.getFactionData(self.vars[var]['faction'], tag) except: st = 'ERROR_%s_%s'%(var,tag)#this is in case there is a typo like 'docketat' debug.error(st) results=['ERROR_%s_%s'%(var,tag)] return results[self.rand_int % len(results)]
def __get_config(self, base_filename): filename = "{}.json".format(base_filename) reference_filename = "{}.example".format(filename) reference_config = self.read_json(reference_filename) if not reference_filename: debug.error("Invalid {} reference config file. Make sure {} exists.".format(base_filename, base_filename)) sys.exit(1) custom_config = self.read_json(filename) if custom_config: new_config = deep_update(reference_config, custom_config) return new_config return reference_config
def CountFactionShipsInSystem(faction,system): count=0 st='' for fgs in FGsInSystem (faction,system): st+=fgs+' ' ships=ReadStringList (ccp,MakeFGKey (fgs,faction)) for num in xrange(ShipListOffset()+2,len(ships),PerShipDataSize()): try: count+= int(ships[num]) except: debug.error('number ships '+ships[num] + ' not read') debug.debug('OFFICIALCOUNT %s is %s', st, count) return count
def SimulatedDukeItOut (fgname,faction,enfgname,enfaction): ally=fg_util.LandedShipsInFG(fgname,faction) enemy=fg_util.LandedShipsInFG(enfgname,enfaction) if (len(enemy) and len(ally)): allyvictim = vsrandom.randrange(0,len(ally)) allystats = faction_ships.GetStats(ally[allyvictim][0]) envictim = vsrandom.randrange(0,len(enemy)) enstats = faction_ships.GetStats(enemy[envictim][0]) endam = HowMuchDamage(enemy,vsrandom.uniform(0,allystats[1])) if (enfgname==fgname and enfaction==faction): debug.error("FAULT FAULT FAULT") ApplyDamage(enfgname,enfaction,enemy,envictim,enstats,HowMuchDamage(ally,vsrandom.uniform(0,enstats[1])),fgname,faction) ApplyDamage(fgname,faction,ally,allyvictim,allystats,endam,enfgname,enfaction)
def getFactionData(self, faction, variable): """Return the variable information stored for this faction.""" if variable in self.faction_dict["alltags"]: try: return self.faction_dict[faction][variable] except: # raise ValueError("Invalid Faction Specified") debug.error("ERROR: FACTION LOOKUP ERROR faction %s variable %s" % (faction, variable)) return self.faction_dict['unknown'][variable] else: debug.error("ERROR: VARIABLE LOOKUP ERROR faction %s variable %s" % (faction, variable)) return "VARIABLE LOOKUP ERROR"
def __get_colors(self, base_filename): filename = "ledcolors/{}.json".format(base_filename) reference_filename = "{}.example".format(filename) reference_colors = self.read_json(reference_filename) if not reference_colors: debug.error("Invalid {} reference color file. Make sure {} exists in ledcolors/".format(base_filename, base_filename)) sys.exit(1) custom_colors = self.read_json(filename) if custom_colors: debug.info("Custom '{}.json' colors found. Merging with default reference colors.".format(base_filename)) new_colors = deep_update(reference_colors, custom_colors) return new_colors return reference_colors
def listen(self): while True: try: data = self.sock.recv(1024).decode('utf-8') for line in (line for line in data.split('\r\n') if line): debug.irc(line) if len(line.split()) >= 2: Events.handle(line) except (UnicodeDecodeError,UnicodeEncodeError): pass except Exception as ex: debug.error('Unexpected error occured.', ex) break Events.disconnect()
def GetNewFGName(faction): factionnr=faction_ships.factionToInt(faction) global numericalfaction if(factionnr>=len(fgnames)): debug.error("Faction "+faction+" unable to create fgname") numericalfaction+=1 return "Alpha_"+str(numericalfaction) if (not len(fgnames[factionnr])): fgnames[factionnr]=fg_util.TweakFGNames(origfgnames[factionnr]) fg_util.origfgoffset+=1 k=vsrandom.randrange(0,len(fgnames[factionnr])); #pop returns item inside array fgname=fgnames[factionnr][k] del fgnames[factionnr][k] return fgname
def mouseMoveEvent(self, graph, event): if self.edge: if not self.node: debug.error("No node !!!", Exception(), "modes.connect") return False self.node.setPos(self.graphics.mapToScene(event.pos())) self.edge.adjust() debug.info("Moving edge {0}", (self.edge,), "modes.connect") elif super().mouseMoveEvent(graph, event): return True return True
def add_center_via(self, layers, offset, size=[1, 1], mirror="R0", rotate=0): """ Add a three layer via structure by the center coordinate accounting for mirroring and rotation. """ import contact via = contact.contact(layer_stack=layers, dimensions=size) debug.check(mirror == "R0", "Use rotate to rotate vias instead of mirror.") height = via.height width = via.width if rotate == 0: corrected_offset = offset + vector(-0.5 * width, -0.5 * height) elif rotate == 90: corrected_offset = offset + vector(0.5 * height, -0.5 * width) elif rotate == 180: corrected_offset = offset + vector(-0.5 * width, 0.5 * height) elif rotate == 270: corrected_offset = offset + vector(-0.5 * height, 0.5 * width) else: debug.error("Invalid rotation argument.", -1) self.add_mod(via) self.add_inst(name=via.name, mod=via, offset=corrected_offset, mirror=mirror, rotate=rotate) # We don't model the logical connectivity of wires/paths self.connect_inst([]) return via
def loop_search(self): while True: try: query = random.choice(config.boost_keywords) for item in api.search(q='#' + query, count=50, lang='en', result_type='recent'): if not item.user.following and not item.favorited: try: api.create_favorite(item.id) api.create_friendship(item.user.screen_name) self.favorites += 1 self.follows += 1 debug.alert('Followed a booster twitter!') except tweepy.TweepError as ex: debug.error( 'Unknown error occured in the search loop!', ex) time.sleep(30) except tweepy.TweepError as ex: debug.error('Error occured in the search loop!', ex) finally: time.sleep(60 * 15)
def parse_output(filename, key): """Parses a hspice output.lis file for a key value""" if OPTS.spice_version == "xa": # customsim has a different output file name full_filename = "{0}xa.meas".format(OPTS.openram_temp) else: # ngspice/hspice using a .lis file full_filename = "{0}{1}.lis".format(OPTS.openram_temp, filename) try: f = open(full_filename, "r") except IOError: debug.error( "Unable to open spice output file: {0}".format(full_filename), 1) contents = f.read() # val = re.search(r"{0}\s*=\s*(-?\d+.?\d*\S*)\s+.*".format(key), contents) val = re.search( r"{0}\s*=\s*(-?\d+.?\d*[e]?[-+]?[0-9]*\S*)\s+.*".format(key), contents) if val != None: debug.info(3, "Key = " + key + " Val = " + val.group(1)) return val.group(1) else: return "Failed"
def loop_follow(self): while True: try: followers = api.followers_ids(me.screen_name) friends = api.friends_ids(me.screen_name) non_friends = [ friend for friend in followers if friend not in friends ] debug.action('Following back {0} supporters...'.format( len(non_friends))) for follower in non_friends: api.create_friendship(follower) self.follows += 1 debug.alert('Followed back a follower!') if self.follows >= self.max_follows: break if self.send_message: api.send_direct_message(screen_name=follower, text=self.message) time.sleep(30) except tweepy.TweepError as ex: debug.error('Error occured in the follow loop!', ex) finally: time.sleep(60 * 15)
def setup_paths(): """ Set up the non-tech related paths. """ debug.info(2,"Setting up paths...") global OPTS try: OPENRAM_HOME = os.path.abspath(os.environ.get("OPENRAM_HOME")) except: debug.error("$OPENRAM_HOME is not properly defined.",1) debug.check(os.path.isdir(OPENRAM_HOME),"$OPENRAM_HOME does not exist: {0}".format(OPENRAM_HOME)) # Add all of the subdirs to the python path # These subdirs are modules and don't need to be added: characterizer, verify subdirlist = [ item for item in os.listdir(OPENRAM_HOME) if os.path.isdir(os.path.join(OPENRAM_HOME, item)) ] for subdir in subdirlist: full_path = "{0}/{1}".format(OPENRAM_HOME,subdir) debug.check(os.path.isdir(full_path), "$OPENRAM_HOME/{0} does not exist: {1}".format(subdir,full_path)) sys.path.append("{0}".format(full_path)) if not OPTS.openram_temp.endswith('/'): OPTS.openram_temp += "/" debug.info(1, "Temporary files saved in " + OPTS.openram_temp)
def refresh_games(self): """ Refresh the current list of games of the day. self.games : List of all the games happening today self.pref_games : List of games which the preferred teams are ordered by priority. If the user want's to rotate only his preferred games between the periods and during the day, save those only. Lastly, If if not an Off day for the pref teams, reorder the list in order of preferred teams and load the first game as the main event. """ attempts_remaining = 5 while attempts_remaining > 0: try: self.games = nhl_api.day(self.year, self.month, self.day) self.pref_games = filter_list_of_games(self.games, self.pref_teams) if self.config.preferred_teams_only and self.pref_teams: self.games = self.pref_games if not self.is_pref_team_offday(): self.pref_games = prioritize_pref_games( self.pref_games, self.pref_teams) self.check_all_pref_games_final() self.current_game_id = self.pref_games[ self.current_game_index].game_id # Remove the current game id (Main event) form the list of games. if self.config.live_mode: game_list = [] for game in self.games: if game.game_id != self.current_game_id: game_list.append(game) self.games = game_list self.network_issues = False break except ValueError as error_message: self.network_issues = True debug.error( "Failed to refresh the list of games. {} attempt remaining." .format(attempts_remaining)) debug.error(error_message) attempts_remaining -= 1 sleep(NETWORK_RETRY_SLEEP_TIME) except IndexError as error_message: debug.error(error_message) debug.info( "All preferred games are Final, showing the top preferred game" ) self.current_game_index = 0 self.all_pref_games_final = True self.refresh_games()
def sp_write_file(self, sp, usedMODS): """ Recursive spice subcircuit write; Writes the spice subcircuit from the library or the dynamically generated one""" if not self.spice: # recursively write the modules for i in self.mods: if self.contains(i, usedMODS): continue usedMODS.append(i) i.sp_write_file(sp, usedMODS) if len(self.insts) == 0: return if self.pins == []: return # write out the first spice line (the subcircuit) sp.write("\n.SUBCKT {0} {1}\n".format(self.name, " ".join(self.pins))) # every instance must have a set of connections, even if it is empty. if len(self.insts) != len(self.conns): debug.error( "{0} : Not all instance pins ({1}) are connected ({2}).". format(self.name, len(self.insts), len(self.conns))) debug.error("Instances: \n" + str(self.insts)) debug.error("-----") debug.error("Connections: \n" + str(self.conns), 1) for i in range(len(self.insts)): # we don't need to output connections of empty instances. # these are wires and paths if self.conns[i] == []: continue sp.write("X{0} {1} {2}\n".format(self.insts[i].name, " ".join(self.conns[i]), self.insts[i].mod.name)) sp.write(".ENDS {0}\n".format(self.name)) else: # write the subcircuit itself # Including the file path makes the unit test fail for other users. #if os.path.isfile(self.sp_file): # sp.write("\n* {0}\n".format(self.sp_file)) sp.write("\n".join(self.spice)) sp.write("\n")
def import_tech(): global OPTS debug.info(2,"Importing technology: " + OPTS.tech_name) if OPTS.tech_name == "": OPTS.tech_name = OPTS.config.tech_name # environment variable should point to the technology dir OPTS.openram_tech = os.path.abspath(os.environ.get("OPENRAM_TECH")) + "/" + OPTS.tech_name if not OPTS.openram_tech.endswith('/'): OPTS.openram_tech += "/" debug.info(1, "Technology path is " + OPTS.openram_tech) try: filename = "setup_openram_{0}".format(OPTS.tech_name) # we assume that the setup scripts (and tech dirs) are located at the # same level as the compielr itself, probably not a good idea though. path = "{0}/setup_scripts".format(os.environ.get("OPENRAM_TECH")) sys.path.append(os.path.abspath(path)) __import__(filename) except ImportError: debug.error("Nonexistent technology_setup_file: {0}.py".format(filename)) sys.exit(1)
def check(self, mode): # check profile config self.aProfile = [] # stop if selected (mode) profile are disabled if mode != '0' and 'false' in sProfile[int(mode)]: debug.notify(ADDON_LANG(32103) + ' (' + sName[int(mode)] + ')') debug.notice( '[CHECK]: This profile is disabled in addon settings - ' + str(mode)) return False # check if profile have settings file for key in sProfile: if 'true' in sProfile[key]: if not xbmcvfs.exists(ADDON_PATH_DATA + 'profile' + str(key) + '.json'): debug.notify( ADDON_LANG(32101) + ' ' + str(key) + ' (' + sName[key] + ')') debug.error('[PROFILE FILE]: not exist for profile - ' + str(key)) return False self.aProfile.append(str(key))
def create_test_cycles(self): """Returns a list of key time-points [ns] of the waveform (each rising edge) of the cycles to do a timing evaluation. The last time is the end of the simulation and does not need a rising edge.""" #Using this requires setting at least one port to target for simulation. if len(self.targ_write_ports) == 0 and len(self.targ_read_ports) == 0: debug.error("No port selected for characterization.", 1) self.set_stimulus_variables() #Get any available read/write port in case only a single write or read ports is being characterized. cur_read_port = self.get_available_port(get_read_port=True) cur_write_port = self.get_available_port(get_read_port=False) debug.check(cur_read_port != None, "Characterizer requires at least 1 read port") debug.check(cur_write_port != None, "Characterizer requires at least 1 write port") #Create test cycles for specified target ports. write_pos = 0 read_pos = 0 while True: #Exit when all ports have been characterized if write_pos >= len(self.targ_write_ports) and read_pos >= len( self.targ_read_ports): break #Select new write and/or read ports for the next cycle. Use previous port if none remaining. if write_pos < len(self.targ_write_ports): cur_write_port = self.targ_write_ports[write_pos] write_pos += 1 if read_pos < len(self.targ_read_ports): cur_read_port = self.targ_read_ports[read_pos] read_pos += 1 #Add test cycle of read/write port pair. One port could have been used already, but the other has not. self.gen_test_cycles_one_port(cur_read_port, cur_write_port)
def refresh_playoff(self): """ Currently the series ticker request all the games of a series everytime its asked to load on screen. This create a lot of delay between showing each series. TODO: Add a refresh function to the Series object instead and trigger a refresh only at specific time in the renderer.(End of a game, new day) """ attempts_remaining = 5 while attempts_remaining > 0: try: # Get the plaoffs data from the nhl api self.playoffs = nhl_api.playoff(self.status.season_id) # Check if there is any rounds avaialable and grab the most recent one available. if self.playoffs.rounds: self.current_round = self.playoffs.rounds[str( self.playoffs.default_round)] self.current_round_name = self.current_round.names.name if self.current_round_name == "Stanley Cup Qualifier": self.current_round_name = "Qualifier" if self.playoffs.default_round == 4: self.stanleycup_round = True debug.info("defaultround number is : {}".format( self.playoffs.default_round)) try: # Grab the series of the current round of playoff. self.series = self.current_round.series # Check if prefered team are part of the current round of playoff self.pref_series = prioritize_pref_series( filter_list_of_series(self.series, self.pref_teams), self.pref_teams) # If the user as set to show his favorite teams in the seriesticker if self.config.seriesticker_preferred_teams_only and self.pref_series: self.series = self.pref_series except AttributeError: debug.error( "The {} Season playoff has not started yet or is unavailable" .format(self.playoffs.season)) self.isPlayoff = False break self.isPlayoff = True break except ValueError as error_message: self.network_issues = True debug.error( "Failed to refresh the list of Series. {} attempt remaining." .format(attempts_remaining)) debug.error(error_message) attempts_remaining -= 1 sleep(NETWORK_RETRY_SLEEP_TIME)
def check_golden_data(self, data, golden_data, error_tolerance=1e-2): """ This function goes through two dictionaries, key by key and compares each item. It uses relative comparisons for the items and returns false if there is a mismatch. """ # Check each result data_matches = True for k in data.keys(): if type(data[k]) == list: for i in range(len(data[k])): if not self.isclose(k, data[k][i], golden_data[k][i], error_tolerance): data_matches = False else: self.isclose(k, data[k], golden_data[k], error_tolerance) if not data_matches: import pprint data_string = pprint.pformat(data) debug.error( "Results exceeded {:.1f}% tolerance compared to golden results:\n" .format(error_tolerance * 100) + data_string) return data_matches
def run_drc(cell_name, gds_name, extract=True, final_verification=False): """Run DRC check on a cell which is implemented in gds_name.""" global num_drc_runs num_drc_runs += 1 # Copy file to local dir if it isn't already if os.path.dirname(gds_name) != OPTS.openram_temp.rstrip('/'): shutil.copy(gds_name, OPTS.openram_temp) # Copy .magicrc file into temp dir magic_file = OPTS.openram_tech + "mag_lib/.magicrc" if os.path.exists(magic_file): shutil.copy(magic_file, OPTS.openram_temp) else: debug.warning("Could not locate .magicrc file: {}".format(magic_file)) write_magic_script(cell_name, extract, final_verification) (outfile, errfile, resultsfile) = run_script(cell_name, "drc") # Check the result for these lines in the summary: # Total DRC errors found: 0 # The count is shown in this format: # Cell replica_cell_6t has 3 error tiles. # Cell tri_gate_array has 8 error tiles. # etc. try: f = open(outfile, "r") except FileNotFoundError: debug.error( "Unable to load DRC results file from {}. Is magic set up?".format( outfile), 1) results = f.readlines() f.close() errors = 1 # those lines should be the last 3 for line in results: if "Total DRC errors found:" in line: errors = int(re.split(": ", line)[1]) break else: debug.error("Unable to find the total error line in Magic output.", 1) # always display this summary if errors > 0: for line in results: if "error tiles" in line: debug.info(1, line.rstrip("\n")) debug.error("DRC Errors {0}\t{1}".format(cell_name, errors)) else: debug.info(1, "DRC Errors {0}\t{1}".format(cell_name, errors)) return errors
def check_arguments(self): """Checks if arguments given for write_stimulus() meets requirements""" try: int(self.probe_address, 2) except ValueError: debug.error("Probe Address is not of binary form: {0}".format(self.probe_address),1) if len(self.probe_address) != self.addr_size: debug.error("Probe Address's number of bits does not correspond to given SRAM",1) if not isinstance(self.probe_data, int) or self.probe_data>self.word_size or self.probe_data<0: debug.error("Given probe_data is not an integer to specify a data bit",1)
def installer(self): # need to add some defensive code around this tag_name = self.latest.get("tag_name") debug("Installing {}".format(tag_name)) new_plugin_dir = os.path.join(os.path.dirname(Release.plugin_dir), "EDMC-Canonn-{}".format(tag_name)) debug("Checking for pre-existence") if os.path.isdir(new_plugin_dir): error("Download already exists: {}".format(new_plugin_dir)) plug.show_error("Canonn upgrade failed") return False try: debug("Downloading new version") download = requests.get( "https://github.com/canonn-science/EDMC-Canonn/archive/{}.zip". format(tag_name), stream=True) z = zipfile.ZipFile(StringIO.StringIO(download.content)) z.extractall(os.path.dirname(Release.plugin_dir)) except: error("Download failed: {}".format(new_plugin_dir)) plug.show_error("Canonn upgrade failed") return False #If we got this far then we have a new plugin so any failures and we will need to delete it debug("disable the current plugin") try: os.rename(Release.plugin_dir, "{}.disabled".format(Release.plugin_dir)) debug("Renamed {} to {}".format( Release.plugin_dir, "{}.disabled".format(Release.plugin_dir))) except: error("Upgrade failed reverting: {}".format(new_plugin_dir)) plug.show_error("Canonn upgrade failed") shutil.rmtree(new_plugin_dir) return False if self.rmbackup.get() == 1: config.set('Canonn:RemoveBackup', "{}.disabled".format(Release.plugin_dir)) debug("Upgrade complete") Release.plugin_dir = new_plugin_dir self.installed = True return True
def report_status(): """ Check for valid arguments and report the info about the SRAM being generated """ # Check if all arguments are integers for bits, size, banks if type(OPTS.word_size)!=int: debug.error("{0} is not an integer in config file.".format(OPTS.word_size)) if type(OPTS.num_words)!=int: debug.error("{0} is not an integer in config file.".format(OPTS.sram_size)) if type(OPTS.num_banks)!=int: debug.error("{0} is not an integer in config file.".format(OPTS.num_banks)) if not OPTS.tech_name: debug.error("Tech name must be specified in config file.") print("Output files are " + OPTS.output_name + ".(sp|gds|v|lib|lef)") print("Technology: {0}".format(OPTS.tech_name)) print("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size, OPTS.num_words, OPTS.num_banks)) if not OPTS.check_lvsdrc: print("DRC/LVS/PEX checking is disabled.")
def run(self): debug("getting whiteList") url = "https://us-central1-canonn-api-236217.cloudfunctions.net/whitelist" r = requests.get(url) if not r.status_code == requests.codes.ok: error("whiteListGetter {} ".format(url)) error(r.status_code) error(r.json()) results = [] else: results = r.json() self.callback(results)
def run(self): # don't bother sending beta if self.is_beta == 'N': debug("sending gSubmitKill") url = "https://us-central1-canonn-api-236217.cloudfunctions.net/submitKills?cmdrName={}&systemName={}&isBeta={}&reward={}&victimFaction={}".format( self.cmdr, self.system, self.is_beta, self.reward, self.victimFaction) r = requests.get(url) if not r.status_code == requests.codes.ok: error("gSubmitKills {} ".format(url)) error(r.status_code) error(r.json())
def __game_index_for(self, team_name): team_index = 0 team_idxs = [i for i, game in enumerate(self.games) if team_name in [game.away_team, game.home_team]] if len(team_idxs) > 0: attempts_remaining = 5 while attempts_remaining > 0: try: team_index = next((i for i in team_idxs if Status.is_live(mlbgame.overview(self.games[i].game_id))), team_idxs[0]) self.network_issues = False break except URLError, e: self.network_issues = True debug.error("Networking Error while refreshing live game status of {}. {} retries remaining.".format(team_name,attempts_remaining)) debug.error("URLError: {}".format(e.reason)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME) except ValueError: self.network_issues = True debug.error("Value Error while refreshing live game status of {}. {} retries remaining.".format(team_name,attempts_remaining)) debug.error("ValueError: Failed to refresh overview for {}".format(self.current_game().game_id)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME)
def RemoveShipFromFG(fgname, faction, type, numkill=1, landed=0): key = MakeFGKey(fgname, faction) leg = Director.getSaveStringLength(ccp, key) debug.debug("Scanning %d units...", leg) for i in range(ShipListOffset() + 1, leg, PerShipDataSize()): if (Director.getSaveString(ccp, key, i - 1) == str(type)): debug.debug("Removing unit %s", type) numships = 0 numlandedships = 0 try: numships = int(Director.getSaveString(ccp, key, i)) numlandedships = int(Director.getSaveString(ccp, key, i + 1)) except: debug.error("unable to get savestring %s from FG %s %s %s", i, fgname, faction, type) if (numships > numkill): numships -= numkill if (numships < numlandedships): if (landed == 0): debug.debug( 'trying to remove launched ship %s but all are landed', type) landed = 1 return 0 #failur Director.putSaveString(ccp, key, i, str(numships)) if (landed and numlandedships > 0): Director.putSaveString(ccp, key, i + 1, str(numlandedships - numkill)) else: numkill = numships numships = 0 for j in range(i - 1, i + PerShipDataSize() - 1): Director.eraseSaveString(ccp, key, i - 1) if (numships >= 0): try: totalnumships = int(Director.getSaveString(ccp, key, 0)) totalnumships -= numkill if (totalnumships >= 0): Director.putSaveString(ccp, key, 0, str(totalnumships)) if (totalnumships == 0): debug.debug("Removing %s FG %r", faction, fgname) DeleteFG(fgname, faction) else: debug.error('error...removing too many ships') except: debug.error('error, flight record %r corrupt', fgname) return numkill debug.debug('cannot find ship to delete in %s fg %r', faction, fgname) return 0
def refresh_games(self): debug.log("Updating games for {}/{}/{}".format(self.month, self.day, self.year)) urllib.urlcleanup() attempts_remaining = 5 while attempts_remaining > 0: try: current_day = self.day self.set_current_date() all_games = mlbgame.day(self.year, self.month, self.day) if self.config.rotation_only_preferred: self.games = self.__filter_list_of_games( all_games, self.config.preferred_teams) else: self.games = all_games if current_day != self.day: self.current_game_index = self.game_index_for_preferred_team( ) self.games_refresh_time = time.time() self.network_issues = False break except URLError, e: self.network_issues = True debug.error( "Networking error while refreshing the master list of games. {} retries remaining." .format(attempts_remaining)) debug.error("URLError: {}".format(e.reason)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME) except ValueError: self.network_issues = True debug.error( "Value Error while refreshing master list of games. {} retries remaining." .format(attempts_remaining)) debug.error("ValueError: Failed to refresh list of games") attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME)
def deleteUserData(): dbcon = dbTrayServer.dbTray() try: hostname = socket.gethostname() except: debug.error(sys.exc_info()) return (0) try: dbcon.execute("delete from users where host=\"" + hostname + "\"") except: debug.error(sys.exc_info()) try: dbcon.execute("delete from users where user=\"" + username + "\"") except: debug.error(sys.exc_info())
def refresh_games(self): attempts_remaining = 5 while attempts_remaining > 0: try: all_games = nflparser.get_all_games() if self.config.rotation_only_preferred: self.games = self.__filter_list_of_games( all_games, self.config.preferred_teams) # if rotation is disabled, only look at the first team in the list of preferred teams elif not self.config.rotation_enabled: self.games = self.__filter_list_of_games( all_games, [self.config.preferred_teams[0]]) else: self.games = all_games self.games_refresh_time = t.time() self.network_issues = False break except Exception as e: self.network_issues = True debug.error( "Networking error while refreshing the master list of games. {} retries remaining." .format(attempts_remaining)) debug.error("Exception: {}".format(e)) attempts_remaining -= 1 t.sleep(NETWORK_RETRY_SLEEP_TIME) except ValueError: self.network_issues = True debug.error( "Value Error while refreshing master list of games. {} retries remaining." .format(attempts_remaining)) debug.error("ValueError: Failed to refresh list of games") attempts_remaining -= 1 t.sleep(NETWORK_RETRY_SLEEP_TIME) # # If we run out of retries, just move on to the next game if attempts_remaining <= 0 and self.config.rotation_enabled: self.advance_to_next_game()
def release_pull(self): self.latest = {} r = requests.get( "https://api.github.com/repos/canonn-science/EDMC-Canonn/releases/latest" ) latest = r.json() #debug(latest) if not r.status_code == requests.codes.ok: error("Error fetching release from github") error(r.status_code) error(r.json()) else: self.latest = latest debug("latest release downloaded")
def run(self): debug("sending gSubmitCodex") url = "https://us-central1-canonn-api-236217.cloudfunctions.net/submitHD?cmdrName={}".format( self.cmdr) url = url + "&systemName={}".format(self.system) url = url + "&x={}".format(self.x) url = url + "&y={}".format(self.y) url = url + "&z={}".format(self.z) url = url + "&z={}".format(self.eddatetime) r = requests.get(url) if not r.status_code == requests.codes.ok: error("gSubmitHD {} ".format(url)) error(r.status_code) error(r.json())
def refresh_overview(self): urllib.urlcleanup() attempts_remaining = 5 while attempts_remaining > 0: try: self.overview = mlbgame.overview(self.current_game().game_id) self.__update_layout_state() self.needs_refresh = False self.print_overview_debug() self.network_issues = False break except URLError, e: self.network_issues = True debug.error("Networking Error while refreshing the current overview. {} retries remaining.".format(attempts_remaining)) debug.error("URLError: {}".format(e.reason)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME) except ValueError: self.network_issues = True debug.error("Value Error while refreshing current overview. {} retries remaining.".format(attempts_remaining)) debug.error("ValueError: Failed to refresh overview for {}".format(self.current_game().game_id)) attempts_remaining -= 1 time.sleep(NETWORK_RETRY_SLEEP_TIME)
def __get_config(self, base_filename, error=None): # Look and return config.json file filename = "{}.json".format(base_filename) (reference_config, error) = self.read_json(filename) if not reference_config: if (error): debug.error(error) else: debug.error( "Invalid {} config file. Make sure {} exists in config/". format(base_filename, base_filename)) sys.exit(1) if base_filename == "config": # Validate against the config.json debug.error("INFO: Validating config.json.....") conffile = "config/config.json" schemafile = "config/config.schema.json" confpath = get_file(conffile) schemapath = get_file(schemafile) (valid, msg) = validateConf(confpath, schemapath) if valid: debug.error("INFO: config.json passes validation") else: debug.warning( "WARN: config.json fails validation: error: [{0}]".format( msg)) debug.warning( "WARN: Rerun the nhl_setup app to create a valid config.json" ) sys.exit(1) return reference_config
def __init__(self, name, inverting=False, fanout=0, size_list=None, height=None, add_wells=True): debug.info(1, "creating pdriver {}".format(name)) self.stage_effort = 3 self.height = height self.inverting = inverting self.size_list = size_list self.fanout = fanout if not size_list and self.fanout == 0: debug.error("Either fanout or size list must be specified.", -1) if self.size_list and self.fanout != 0: debug.error("Cannot specify both size_list and fanout.", -1) if self.size_list and self.inverting: debug.error("Cannot specify both size_list and inverting.", -1) # Creates the netlist and layout pgate.pgate.__init__(self, name, height, add_wells)
def __init__(self, name, neg_polarity=False, fanout=0, size_list=None, height=None): debug.info(1, "creating pdriver {}".format(name)) self.stage_effort = 3 self.height = height self.neg_polarity = neg_polarity self.size_list = size_list self.fanout = fanout if not size_list and self.fanout == 0: debug.error("Either fanout or size list must be specified.", -1) if self.size_list and self.fanout != 0: debug.error("Cannot specify both size_list and fanout.", -1) if self.size_list and self.neg_polarity: debug.error("Cannot specify both size_list and neg_polarity.", -1) # Creates the netlist and layout pgate.pgate.__init__(self, name, height)
def refresh_playoff(self): attempts_remaining = 5 while attempts_remaining > 0: try: # Get the plaoffs data from the nhl api self.playoffs = nhl_api.playoff(self.status.season_id) # Check if there is any rounds avaialable and grab the most recent one available. if self.playoffs.rounds: self.current_round = self.playoffs.rounds[str( self.playoffs.default_round)] self.current_round_name = self.current_round.names.name if self.current_round_name == "Stanley Cup Qualifier": self.current_round_name = "Qualifier" debug.info("defaultround number is : {}".format( self.playoffs.default_round)) try: # Grab the series of the current round of playoff. self.series = self.current_round.series # Check if prefered team are part of the current round of playoff self.pref_series = prioritize_pref_series( filter_list_of_series(self.series, self.pref_teams), self.pref_teams) # If the user as set to show his favorite teams in the seriesticker if self.config.seriesticker_preferred_teams_only and self.pref_series: self.series = self.pref_series except AttributeError: debug.error( "The {} Season playoff has to started yet or unavailable" .format(self.playoffs.season)) break except ValueError as error_message: self.network_issues = True debug.error( "Failed to refresh the list of Series. {} attempt remaining." .format(attempts_remaining)) debug.error(error_message) attempts_remaining -= 1 sleep(NETWORK_RETRY_SLEEP_TIME)