def main(argv): parser = argparse.ArgumentParser(description='Create or update cfn resource schema') parser.add_argument('--update', action='store_true') parser.add_argument('--type', metavar='TYPE', help='Restrict parsing resource type properties only to' ' type TYPE. Example: --type AWS::ApiGateway::RestApi') parser.add_argument('dest', nargs='?', help='Write resulting schema into FILE' ' instead of just printing it') args = parser.parse_args(argv[1:]) sess = CacheControl(requests.Session(), cache=FileCache('.web_cache')) requests.get = sess.get stage1 = 'resource-stage1.json' if args.update: if not args.dest: print >> sys.stderr, ('Error: if --update is given, `dest` must be' ' specified too') return 2 stage1_schema = tools.load(stage1) resource_schema = tools.load(args.dest) resource_schema['definitions']['resource_template'] = \ stage1_schema['definitions']['resource_template'] else: resource_schema = tools.load(stage1) resource_type_names = tools.get_all_resource_type_names() tools.update_all_resource_patterns_by_name( resource_schema, resource_type_names ) if args.type: resource_type_names = [args.type] for resource_type_name in resource_type_names: print >> sys.stderr, resource_type_name resource_properties.set_resource_properties(resource_schema, resource_type_name) del resource_schema['definitions']['resource_template'] all_properties = resource_properties.all_res_properties() resource_schema['definitions']['property_types'] = all_properties for rpt_name, rpt_schema in all_properties.items(): print >> sys.stderr, rpt_name resource_properties.set_resource_property_type_properties( resource_schema, rpt_name ) tweak_resource_schema.apply_all_tweaks(resource_schema) if args.dest: tools.write(resource_schema, args.dest) else: print tools.print_(resource_schema) return 0
def run(self, combined=False, save=False, kpkname='kpk', path4kpks='./', path2nofz=None, **kwargs): self.log += '# going to make P(k) while combined:{}\n'.format(combined) nofzb = np.load(path2nofz).item() if combined: gal = np.concatenate(self.gal) ran = np.concatenate(self.ran) nofz = iusp(nofzb['znz'][0], nofzb['znz'][1]) kpk = self.helper(gal, ran, nofz, **kwargs) else: kpk = [] for i, (gal_i, ran_i) in enumerate(zip(self.gal, self.ran)): nofz = iusp(nofzb['znz'][0][i], nofzb['znz'][1][i]) kpk_i = self.helper(gal_i, ran_i, nofz, **kwargs) kpk.append(kpk_i) self.output = {} self.output['combined'] = combined self.output['kpk'] = kpk self.output['kwargs'] = kwargs if save: ouname = kpkname + '_combined' + str(combined)[0] self.log += '# {} is written under {}\n'.format(ouname, path4kpks) self.output['log'] = self.log write(path4kpks, ouname, self.output, fmt='npy')
def spawnTeam(self, ai, size=TEAM_SIZE): (teamX, teamY) = (random.choice(range(len(self.tiles[0]))), random.choice(range(len(self.tiles)))) leader = self.spawnPerson(ai, teamX, teamY) if leader: write("Leader "+str((teamX, teamY))+": "+leader.name) dist = 1 visited = [(teamX, teamY)] for i in range(1, size): personSpawned = False for d in range(dist, 100): #write(d) for x in range(teamX-d, teamX+d+1): relX = x - teamX for y in range(teamY-d, teamY+d+1): relY = y - teamY if (x, y) in visited: continue if math.sqrt(relX*relX + relY*relY) <= d: visited.append((x, y)) tile = self.getTile(x, y) #write(math.sqrt(relX*relX + relY*relY)) if tile and not tile.isWall: member = self.spawnPerson(ai, x, y) if member: personSpawned = True write("- member "+str((x, y))+": "+member.name) dist = d break if personSpawned: break if personSpawned: break else: self.spawnTeam(ai, size)
def doPlanScouting(): chartTiles = [pos for (pos, tile) in m.chart.tilesBorder().items() if tile.unknown] write(list(map(str,[(pos, bool(m.chart.getTile(*pos).unknown)) for pos in chartTiles]))) targetTile = min(chartTiles, key = lambda pos: sqrt(pos[0]**2 + pos[1]**2)) write(targetTile) m.plan = planMoveTo(*targetTile) return do(FOLLOW_THE_PLAN)
def get_delta_rbs(cats=[1, 2], **kwargs): ranmap = ft.read( '/Users/mthecosmologist/analyses/eboss/ebossY1v5_10_7/sector_tsr_ssr_mask_p8_p7.fits' ) irs = ft.read( '/Volumes/Mehdi_Passport/NERSC_archive/other_files/dr3.1/heal_rbs_256.fits' ) gal = [] for i in cats: gal.append( ft.read(g(str(i)), lower=True, columns=kwargs['galcolnames'])) gal = np.concatenate(gal) galmask = (gal['sector_tsr'] > kwargs['tsrmin']) & (gal['sector_ssr'] > kwargs['ssrmin']) galmask &= (gal['z'] > kwargs['zlim'][0]) & (gal['z'] < kwargs['zlim'][1]) galmask &= (gal['z_reliable']) & (~gal['isdupl']) gal = gal[galmask] galm = cat(gal['ra'], gal['dec'], gal['z'], np.ones(gal['ra'].size)) irs256 = systematic(irs, ranmap) ebossrbs = ngalsys(galm, 256) ebossrbs.prepare_inputs(irs256, selection_function=ranmap) ebossrbs.digitize_ngalsys(np.logspace(1.8, 3, 16)) ebossrbs.processjack() write('/Users/mthecosmologist/analyses/eboss/ebossY1v5_10_7/', 'delta_rbs', ebossrbs.output, fmt='npy')
def run(self, combined=False, save=False, nofzname='nofz', path4nofz='./', **kwargs): self.log += '# going to make n(z) while combined:{}\n'.format(combined) if combined: gal = np.concatenate(self.gal) ran = np.concatenate(self.ran) z, nz = self.helper(gal, ran, **kwargs) z = z[:-1] else: z = [] nz = [] for gal_i, ran_i in zip(self.gal, self.ran): z_i, nz_i = self.helper(gal_i, ran_i, **kwargs) z.append(z_i[:-1]) nz.append(nz_i) self.output = {} self.output['znz'] = (z, nz) self.output['combined'] = combined self.output['kwargs'] = kwargs if save: ouname = nofzname + '_combined' + str(combined)[0] self.log += '# {} is written under {}\n'.format(ouname, path4nofz) self.output['log'] = self.log write(path4nofz, ouname, self.output, fmt='npy')
def execute(): global level, persons perform = performAction(level) for person in persons: person.onTile.person = None person.onTile.broadcast = None for intentions in ACTION_ORDER: for person in persons: if person.intention in intentions: #write(person.name + " does " + INTENTION_DESCRIPTION[person.intention] + ".") person.lastAction = person.intention perform[person.intention](person) person.onTile.person = person deaths = [] for person in persons: #write("%s is%s alive" % (person.name, " not"[4*person.isAlive:])) if not person.isAlive: write(person.name + " has died.") deaths.append(person) person.onTile.person = None person.onTile.resources += 1 + person.isCarrying for person in deaths: persons.remove(person)
def main(argv): sess = CacheControl(requests.Session(), cache=FileCache('.web_cache')) requests.get = sess.get schema = tools.load('schema.json') schema['definitions']['Parameter']['properties'] = parse_parameters() tools.write(schema, 'schema.json')
def plan(): global level, persons def tileData(personX, personY, relTileX, relTileY): #returns data about a tile as an integer tile = level.getTile(personX + relTileX, personY + relTileY) if not tile: tile = Tile(True) tileBytes = [ ((relTileX % 16) << 4) + (relTileY % 16), (tile.isWall << 5) + (tile.hasPerson() << 4) + tile.resources ] return tileBytes def broadcastData(personX, personY, relTileX, relTileY): #returns data about a broadcast as an int tile = level.getTile(personX + relTileX, personY + relTileY) if not tile or not tile.hasBroadcast(): return False #write(level.getTile(personX, personY).person.name + " should hear some at " + str((personX + relTileX, personY + relTileY)) + " shout '" + tile.broadcast + "'") bcBytes = [((relTileX % 16) << 4) | (relTileY % 16)] #0-3 x-pos, 4-7 y-pos for c in tile.broadcast: bcBytes.append(ord(c) % 256) return bcBytes + [0x80] for person in persons: inputData = "0x" inputData += padHex(person.lastAction << 1 + person.isCarrying)[2:4] #0-6 lastAction, 7 isCarrying #write(inputData) inputData += "88" #indicates end of personData broadcasts = [] #list of positions from which this person can hear broadcasts xs = list(range(-7, 8)) random.shuffle(xs) ys = list(range(-7, 8)) random.shuffle(ys) for x in xs: for y in ys: if math.sqrt(x*x + y*y) <= VIEW_DISTANCE: nextTileData = tileData(person.x, person.y, x, y) inputData += listHex(nextTileData)[2:] broadcasts.append((x, y)) #broadcasts from (x, y) can be heard by this person #write(inputData) inputData += "88" #indicates end of tiles random.shuffle(broadcasts) for pos in broadcasts: nextBroadcastData = broadcastData(person.x, person.y, pos[0], pos[1]) if nextBroadcastData: inputData += listHex(nextBroadcastData)[2:] #write(inputData) inputData += "88" #indicates end of broadcasts write(person.name + ":", indent=2) setWriteIndent(3) person.broadcast = None (person.intention, variable, person.memory) = person.ai.think(inputData, person.memory) if person.intention is BROADCAST: person.broadcast = str(variable) setWriteIndent(1) write()
def run(ticks=10): initialize() t = 0 setWriteIndent(1) while not ticks or t < ticks: if input("\nPress enter for tick " + str(t+1) + "\n"): break write("\nTICK " + str(t+1) + ":\n\n", indent = 0) tick() t += 1 write()
def _main(): names = list( map(lambda x: re.findall(r"https:\/\/(?:www\.)?(.*?)\/", x)[0], URLS)) ctime = datetime.now().strftime("%Y-%m-%d_%H_%M") NAME = "Tennis-" + ctime + ".xlsx" FULL_PATH = os.path.join(PATH, "data", NAME) log.info("Chrome browser opening") # driver.get( URLS[0] ) for url in URLS: driver.execute_script("window.open('{}');".format(url)) time.sleep(2.5) driver.switch_to.window(driver.window_handles[0]) driver.close() for e, tab in enumerate(driver.window_handles): driver.switch_to.window(tab) webdriver.ActionChains(driver).send_keys(Keys.ESCAPE).perform() time.sleep(4) log.info("%d to go", len(driver.window_handles) - e) log.info("Start!!!") time.sleep(10) # import pdb;pdb.set_trace() Urls = [] init = 1 while True: temp = [] for tab in driver.window_handles: driver.switch_to.window(tab) html = driver.page_source domain = driver.execute_script("return window.location.hostname") for key, func in func_dict.items(): if key in domain: result = func(html) temp.append(result) log.info("Domain: %s Result: %s", domain, result) if init: Urls.append(domain) if init: write(FULL_PATH, Urls) init = 0 write(FULL_PATH, temp) log.info("====== Sleep: %s ======", TIMEOUT) time.sleep(TIMEOUT)
def add(self): logger.info("Adding paper from zotero connector") rawinput = self.read_input() data = json.loads(rawinput.decode('utf8')) bib_items = [zotero_data_to_bibtex(item) for item in data['items']] tools.write(bib_items, os.path.expanduser(out_file), "braces", False) self.send_response(201) # Created self.set_zotero_headers() # return the JSON data back self.wfile.write(rawinput)
def main(argv): sess = CacheControl(requests.Session(), cache=FileCache('.web_cache')) requests.get = sess.get resource_schema = tools.load(sys.argv[1]) apply_all_tweaks(resource_schema) if len(argv) == 3 and argv[2].endswith('json'): tools.write(resource_schema, argv[1]) else: print tools.print_(resource_schema) return 0
def main(): try: input = raw_input except NameError: pass print('Block request http://www.douyu.com/lapi/live/getPlay/(roomid)') print('Enter getPlay request query:') req=input() if req: rtmp_url=getRtmpUrl(req) print('### rtmp_url: ',rtmp_url) tmpl=tools.read(conf.manual_tmpl_path).replace('<rtmp_url>',rtmp_url) else: tmpl=tools.read(conf.now_tmpl_path) tools.write(conf.douyutv_plug_path,tmpl) print('\n\n copy douyutv.py plug ok') print('streamlink http://www.douyutv.com/cold medium -o ')
def resolve(): global level, persons plannedMovement = {} isLegal = actionIsLegal(level, plannedMovement) for person in persons: plannedMovement[person.x, person.y] = (person, []) write(str(person.x).zfill(2) + ", " + str(person.y).zfill(2)+ ": " + person.name + " intends to " + INTENTION_DESCRIPTION[person.intention] + ".", indent=2) for intentions in ACTION_ORDER: for person in persons: if person.intention in intentions and not isLegal[person.intention](person): person.intention = WAIT if intentions is MOVE: plannedMovement = resolveMoves(plannedMovement)
def run(self, combined=False, save=False, **kwargs): self.log += '# going to make n(z) while combined:{}\n'.format(combined) if combined: gal = np.concatenate(self.gal) ran = np.concatenate(self.ran) z,nz = self.helper(gal, ran, **kwargs) else: z=[] nz=[] for gal_i, ran_i in zip(self.gal, self.ran): z_i,nz_i = self.helper(gal_i, ran_i, **kwargs) z.append(z_i) nz.append(nz_i) self.output = {} self.output['znz'] = (z, nz) self.output['combined'] = combined self.output['kwargs'] = **kwargs if save: write(path4nofz, nofzname+'_combined'+str(combined)[0], self.output, fmt='npy')
def log(self, out, model_file=None): ''' ClinerModel::log() Log training information of model. @param out. Either a filename or file channel to output the log string. @param model_file. A path to optionally identify where the model was saved. @return None ''' if not self._log: log = self.__log_str(model_file) else: log = self._log # depending on whether it is already opened as a channel if isinstance(out, type(sys.stdout)): write(out, '%s\n' % log) else: with open(out, 'a') as f: write(f, '%s\n' % log)
def tilesBorder(self): tilesSet = set(self.tiles.keys()) for tile in tilesSet: write((tile, bool(self.getTile(*tile)))) tilesBorderSet = set() for pos in tilesSet: tilesBorderSet = tilesBorderSet.union(set(Tile.neighbours(*pos).keys())) #print(set(Tile.neighbours(*pos).keys())) write() for tile in tilesBorderSet: write((tile, bool(self.getTile(*tile)))) return {pos: self.getTile(*pos) for pos in tilesBorderSet}
def downloadPDF(pdfURL, pdfPath, logPath='', warningPath=''): """下载pdf :param pdfURL: pdf下载地址 :param pdfPath: pdf存储地址 :param logPath: log日志地址 :param warningPath: warning日志地址 :return: 存储pdf成功返回True,失败返回False """ headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.96 Safari/537.36', } #pdfURL为空 if pdfURL == '' or pdfURL == None: warningInfo = 'Failed to download the paper, for pdfURL is none' tools.warning(warningInfo, warningPath) return False #下载pdf pdf = tools.requestsGet(pdfURL, headers=headers, logPath=logPath, warningPath=warningPath) # 下载pdf失败 if pdf == '': warningInfo = 'Failed to download the {0} from the page {1}'.format( pdfPath, pdfURL) tools.warning(warningInfo, warningPath) return False #下载成功 logInfo = 'Successfully download {0}'.format(pdfPath) tools.log(logInfo, logPath) #写入成功则返回True,失败返回False return tools.write(pdf.content, pdfPath, mode='wb', logPath=logPath, warningPath=warningPath)
def save(self): if self.title: tools.write(self.title+'.txt',mode='w',buffer='<title>'+self.title+'</title>\n\n'+self.description+self.text)
def getPapers(folderPath, MyWebdriver, isDownload, minTime=20, maxTime=50): infoPath = os.path.join(folderPath, 'paperInfo.xml') # print(infoPath) logPath = os.path.join(folderPath, 'log.txt') warningPath = os.path.join(folderPath, 'warning.txt') with open(infoPath, 'r') as f: paperInfo = f.read() f.close() xml = etree.fromstring(paperInfo) hits = xml.xpath('//hits')[0] # 该年份论文的出版信息 publisher = xml.xpath('/Year/attribute::publisher')[0] print('{0}出版信息: {1}'.format(folderPath, publisher)) # 该年份的总论文数 paperNum = int(hits.get('total')) print('{0}论文总数:{1}'.format(folderPath, paperNum)) # 该年份的论文已处理数 paperCompleted = int(hits.get('completed')) print('{0}已处理论文数:{1}'.format(folderPath, paperCompleted)) # 下载完全则输出completed信息 if (paperCompleted >= paperNum): print('{0} has completed!!!'.format(folderPath)) return #info标签内是一篇论文的信息 hitTags = xml.xpath('//hit') current = 0 for hitTag in hitTags: infoTag = hitTag.find('./info') #论文title title = infoTag.find('./title').text #有pages标签的才是一篇论文 if (infoTag.find('./pages')) is None: continue else: current += 1 hasSolved = hitTag.get('hasSolved') #论文未被处理过 if (hasSolved == None or hasSolved == 'False'): #收录论文的网址集合 urls = [] for eeTag in infoTag.findall('./ee'): urls.append(eeTag.text) #无收录论文的网址 if len(urls) == 0: waitingTime = 1 else: waitingTime = random.randint(minTime, maxTime) print('\n{0}/{1} <{2}>'.format(current, paperNum, title)) logInfo = warningInfo = '<{0}>\n'.format(title) tools.log(logInfo, logPath, hasTime=False, isPrint=False) tools.warning(warningInfo, warningPath, hasTime=False, isPrint=False) #获取论文信息 infos = downloader.getPaperInfo(urls, title, MyWebdriver, logPath=logPath, warningPath=warningPath) # 更新xml中的info tools.updateInfo(infoTag, infos, logPath=logPath, warningPath=warningPath) # print(etree.tostring(info)) #论文已处理过 else: print('{0}/{1} <{2}> has solved already'.format(current, paperNum, title)) continue # 下载pdf hasDownloadPDF = hitTag.get('hasDownloadPDF') # print(hasDownloadPDF) if isDownload and ((hasDownloadPDF == 'False') or (hasDownloadPDF == None)): #pdf文件路径 pdfPath = os.path.join(folderPath, tools.toFilename(title)) # print(pdfPath) #检测论文是否已经存在 if not os.path.exists(pdfPath): #获取pdfURL pdfURLTag = infoTag.find('./pdfURL') pdfURL = pdfURLTag.text if pdfURLTag is not None else '' if downloader.downloadPDF(pdfURL, pdfPath, logPath=logPath, warningPath=warningPath): hitTag.set('hasDownloadPDF', 'True') else: hitTag.set('hasDownloadPDF', 'True') print('The paper has download already') # 更新completed值 paperCompleted += 1 hits.set('completed', str(paperCompleted)) hitTag.set('hasSolved', 'True') #若更新xml文档失败,则回退paperCompleted和hasSolved if not tools.write(etree.tostring(xml), infoPath, mode='wb', logPath=logPath, warningPath=warningPath): paperCompleted -= 1 hits.set('completed', str(paperCompleted)) hitTag.set('hasSolved', 'False') print('Sleeping {0}S...'.format(waitingTime)) tools.log('\n', logPath=logPath, hasTime=False, isPrint=False) tools.warning('\n', warningPath=warningPath, hasTime=False, isPrint=False) time.sleep(waitingTime)
"dates": list(tools.date_range()), "duration": { "min": float("inf"), "max": 0, }, "views": { "min": float("inf"), "max": 0, }, "comments": { "min": float("inf"), "max": 0, }, "languages": { "min": float("inf"), "max": 0, }, } for row in tools.read_raw(): for theme in tools.tags_to_themes(tools.get_tags(row)): data["themes"].add(theme) for key in ("duration", "views", "comments", "languages"): data[key]["min"] = min(data[key]["min"], getattr(tools, "get_" + key)(row)) data[key]["max"] = max(data[key]["max"], getattr(tools, "get_" + key)(row)) data["themes"] = list(data["themes"]) tools.write(OUTPUT_PATH, data)
def broadcast(person): write("%s says: %s" % (person.name, person.broadcast)) person.onTile.broadcast = person.broadcast
def think(inputData, memory): global m #Load memory m = Memory.load(memory) #Enterpret input #print(m.me.name + ": " + inputData) (meBytes, tileBytes, bcBytes) = [Input.hexToBytes(hexData) for hexData in Input.split(inputData)][:3] (lastAction, isCarrying) = Input.parseMe(meBytes) tiles = Input.parseTiles(tileBytes) broadcasts = Input.parseBC(bcBytes) m.x += (lastAction is RIGHT) - (lastAction is LEFT) m.y += (lastAction is DOWN) - (lastAction is UP) m.chart.update(tiles, m.x, m.y) #Figure out what to do def do(setState = None): if setState is not None: m.state = setState def lookupBCs(broadcasts, topic=""): return {bc[0]: bc[1] for bc in [bc2.split("|%s" % topic) for bc2 in broadcasts.values() if bc2.find("|%s" % topic) >= 0]} def formatBC(message, topic=""): return "%s%s" % (topic, message) def doIntroductions(): m.state = DISTRIBUTE_ROLES rank = random.randint(0, 999) m.stateData["rank"] = rank return BROADCAST, formatBC(str(rank), "RANK") doIntroductions.val = INTRODUCTIONS def doDistributeRoles(): ranks = list(map(int, lookupBCs(broadcasts, "RANK").values())) if(max(ranks) != m.stateData["rank"]): return do(RECEIVE_ROLE) if(ranks.count(max(ranks)) > 1): return do(INTRODUCTIONS) names = list(lookupBCs(broadcasts).keys()) for name in names: member = Person(name) if(names.index(name) < len(names)/2): member.role = SCOUT else: member.role = GATHERER m.team[name] = member m.me.role = CHIEF m.team[m.me.name] = m.me broadcast = "&".join(["%s=%d" % (p.name, p.role) for p in m.team.values() if p is not m.me]) m.state = CREATE_PLAN return BROADCAST, formatBC(broadcast, "ROLES") doDistributeRoles.val = DISTRIBUTE_ROLES def doReceiveRole(): membersList = list(lookupBCs(broadcasts, "ROLES").values()) if len(membersList) == 1: members = membersList[0].split('&') for member in members: (name, roleStr) = member.split('=') m.team[name] = Person(name, int(roleStr)) if(m.me.name in m.team): m.me = m.team[m.me.name] return do(CREATE_PLAN) return BROADCAST, "Hey!" doReceiveRole.val = RECEIVE_ROLE def doMoveAround(): return random.choice(MOVE), "" doMoveAround.val = MOVE_AROUND def doCreatePlan(): return do({ NONE: DO_NOTHING, CHIEF: PLAN_CHIEFING, GATHERER: PLAN_GATHERING, SCOUT: PLAN_SCOUTING }[m.me.role]) doCreatePlan.val = CREATE_PLAN def doFollowPlan(): if len(m.plan) == 0: return do(MOVE_AROUND) return m.plan.pop(), "" doFollowPlan.val = FOLLOW_THE_PLAN def doPlanChiefing(): m.plan = [PICKUP, LEFT, PUTDOWN, RIGHT]*10 + [WAIT] return do(FOLLOW_THE_PLAN) doPlanChiefing.val = PLAN_CHIEFING def doPlanGathering(): m.plan = [PICKUP, DOWN, PUTDOWN, UP]*8 + [WAIT]*3 return do(FOLLOW_THE_PLAN) doPlanGathering.val = PLAN_GATHERING def doPlanScouting(): chartTiles = [pos for (pos, tile) in m.chart.tilesBorder().items() if tile.unknown] write(list(map(str,[(pos, bool(m.chart.getTile(*pos).unknown)) for pos in chartTiles]))) targetTile = min(chartTiles, key = lambda pos: sqrt(pos[0]**2 + pos[1]**2)) write(targetTile) m.plan = planMoveTo(*targetTile) return do(FOLLOW_THE_PLAN) doPlanScouting.val = PLAN_SCOUTING def planMoveTo(x, y): start = (m.x, m.y) goal = (x, y) closedSet = [] openSet = [start] cameFrom = {} def heuristicCostEstimate(node): return sqrt((goal[0]-node[0])**2 + (goal[1]-node[1])**2) def reconstructPath(node): if node in cameFrom: return reconstructPath(cameFrom[node][0]) + [cameFrom[node][1]] else: return [] def neighbourNodes(node): availableNodes = {n: d for (n, d) in Tile.neighbours(*node).items() \ if (n in m.chart.tiles and \ not m.chart.tiles[n].wall and \ not m.chart.tiles[n].person) or \ n == goal} return {} gScore = {start: 0} fScore = {start: heuristicCostEstimate(start)} while len(openSet) > 0: current = min(openSet, key = lambda node: fScore[node]) if current == goal: return reconstructPath(goal) openSet.remove(current) closedSet.append(current) for (neighbour, direction) in neighbourNodes(current): if neighbour in closedSet: continue tentativeGScore = gScore[current] + 1 if neighbour not in openSet or tentativeGScore < gScore[neighbour]: cameFrom[neighbour] = (current, direction) gScore[neighbour] = tentativeGScore fScore[neighbour] = gScore[neighbour] + heuristicCostEstimate(neighbour) if neighbour not in openSet: openSet.append(neighbour) return [] doState = {func.val: func for func in [ doIntroductions, doDistributeRoles, doReceiveRole, doMoveAround, doCreatePlan, doFollowPlan, doPlanChiefing, doPlanGathering, doPlanScouting ]} if m.state in doState: return doState[m.state]() else: return WAIT, "" (intention, broadcast) = do() write("NAME: " + m.me.name) write("ROLE: " + ROLE_NAMES[m.me.role]) write("STATE: " + str(m.state)) write("PLAN: " + str(list(map(lambda x: INTENTION_DESCRIPTION[x], m.plan)))) write(m.chart) return intention, "%s|%s" % (m.me.name, broadcast), m
def __log_str(self, model_file=None): ''' ClinerModel::__log_str() Build a string of information about training for the model's log file. @param model_file. A path to optionally identify where the model was saved. @return A string of the model's training information ''' assert self._is_trained, 'ClinerModel not trained' with io.StringIO() as f: write(f, u'\n') write(f, '-' * 40) write(f, u'\n\n') if model_file: write(f, 'model : %s\n' % os.path.abspath(model_file)) write(f, u'\n') if self._use_lstm: write(f, u'modeltype: LSTM\n') else: write(f, u'modeltype: CRF\n') if 'hyperparams' in self._score: for name, value in self._score['hyperparams'].items(): write(f, u'\t%-10s: %s\n' % (name, value)) write(f, u'\n') print_str(f, 'features', self._features) write(f, u'\n') write(f, u'\n') write(f, 'training began: %s\n' % self._time_train_begin) write(f, 'training ended: %s\n' % self._time_train_end) write(f, u'\n') write(f, u'scores\n') print_vec(f, 'train precision', self._score['train']['precision']) print_vec(f, 'train recall ', self._score['train']['recall']) print_vec(f, 'train f1 ', self._score['train']['f1']) write(f, self._score['train']['conf']) if 'dev' in self._score: print_vec(f, u'dev precision ', self._score['dev']['precision']) print_vec(f, u'dev recall ', self._score['dev']['recall']) print_vec(f, u'dev f1 ', self._score['dev']['f1']) write(f, self._score['dev']['conf']) if 'test' in self._score: print_vec(f, u'test precision ', self._score['test']['precision']) print_vec(f, u'test recall ', self._score['test']['recall']) print_vec(f, u'test f1 ', self._score['test']['f1']) write(f, self._score['test']['conf']) if 'history' in self._score: for label, vec in self._score['history'].items(): print_vec(f, '%-16s' % label, vec) write(f, u'\n') if self._training_files: write(f, u'\n') write(f, u'Training Files\n') if len(self._training_files) < 200: print_files(f, self._training_files) else: write(f, '\t%d files\n' % len(self._training_files)) write(f, u'\n') write(f, u'-' * 40) write(f, u'\n\n') # get output as full string contents = f.getvalue() return contents
def getTile(self, x, y): if (x, y) in self.tiles: write(bool(self.tiles[x, y].unknown)) return self.tiles[x, y] else: return Tile.unknown()
if os.path.isdir(install_dir): shutil.rmtree(install_dir) os.mkdir(install_dir) # load header header = template_dir + os.sep + header_file if os.path.exists(header) and os.path.isfile(header): header = tools.read(header) else: header = u'' # copy stylesheet if existing css = template_dir + os.sep + style_file if os.path.exists(css) and os.path.isfile(css): text = tools.read(css) tools.write(install_dir + os.sep + style_file, text, 'ascii') # must save as ascii because Jave CSS import in JEditorPane does not read utf-8 # locate all lookup files print "locating markdown files" folders, files = tools.locate(lookup_files) number = len(files) print "found %d files" %number # create new Markdown parser object md = markdown.Markdown(extensions = ['footnotes'], output_format = 'html4') # loop over files for k in range(number): f = files[k] d = folders[k]
def write(self): write() write('+' + ''.join(['-']*len(self.tiles[0])) + '+') for row in self.tiles: write('|', False) for tile in row: c = ' ' if tile.isWall: c = '#' else: if tile.hasPerson(): c = tile.person.ai.ai_name[0] if tile.person.isCarrying: c = c.upper() else: c = c.lower() elif tile.hasResources(): c = str(hex(tile.resources))[2] write(c, False) write('|') write('+' + ''.join(['-']*len(self.tiles[0])) + '+')
def save(self): if self.title: tools.write(self.title + '.txt', mode='w', buffer='<title>' + self.title + '</title>\n\n' + self.description + self.text)
def writeBig(self): write() write('+' + ''.join(['-']*(len(self.tiles[0]))*3) + '+') for i in range(len(self.tiles)*2): row = self.tiles[i//2] write('|', False) for j in range(len(row)*3): tile = row[j//3] c = ' ' if tile.isWall: c = '#' else: if tile.hasPerson(): c = tile.person.ai.ai_name[0] if tile.person.isCarrying: c = c.upper() else: c = c.lower() elif tile.hasResources(): c = str(hex(tile.resources))[2] write(c, False) write('|') write('+' + ''.join(['-']*(len(self.tiles[0]))*3) + '+')