def search(self): output = self._config["DEFAULT"]["output"] print( "Ingrese las palabras a buscar, separadas por blancos para búsqueda múltiple" ) terms = str(input()).split() start = time.time() try: search = Search(output) results = search.search_in_ii(terms) for term, docs in results.items(): print("---- Resultados de la búsqueda %s ----" % term) if docs is not None: for doc in docs: print(doc) print("Resultados: %s" % len(docs)) else: print("No se encontraron resultados.") end = time.time() print("La búsqueda demoró %s segundos" % (end - start)) except FileNotFoundError: print( "No se encontró en disco el índice invertido, debe generarlo con la opción 2" )
def test_opencart_search(browser, base_url, value): opencart_main_page = Search(browser, base_url) opencart_main_page.go_to_site() opencart_main_page.enter_word(value) opencart_main_page.click_on_the_search_button() elements = opencart_main_page.search_results() for elem in elements: assert value in elem.text
def search(): my_search = Search() document_name = my_search.searching(e1.get()) if len(document_name) > 0: url = "D:\SearchEngine\\Dataset\\"+document_name webbrowser.open_new_tab(url) else: webbrowser.open_new_tab("Error.html")
def testComplex(self): search = Search() results = search.performSearch('WAS iSeries') print(len(results)) #self.assertTrue(len(results)>0) #self.assertTrue(len(results)==1) for result in results: print(result.get_title())
def get(self): search = Search(webapp2.RequestHandler) url_linktext = 'test' disce_values = {'url_linktext': url_linktext} template = JINJA_ENVIRONMENT.get_template( '/resources/www/results.html') self.response.write(template.render(disce_values))
def update_required_items(spoiler): worlds = spoiler.worlds # get list of all of the progressive items that can appear in hints # all_locations: all progressive items. have to collect from these # item_locations: only the ones that should appear as "required"/WotH all_locations = [ location for world in worlds for location in world.get_filled_locations() ] # Set to test inclusion against item_locations = { location for location in all_locations if location.item.majoritem and not location.locked and location.item.name != 'Triforce Piece' } # if the playthrough was generated, filter the list of locations to the # locations in the playthrough. The required locations is a subset of these # locations. Can't use the locations directly since they are location to the # copied spoiler world, so must compare via name and world id if spoiler.playthrough: translate = lambda loc: worlds[loc.world.id].get_location(loc.name) spoiler_locations = set( map(translate, itertools.chain.from_iterable(spoiler.playthrough.values()))) item_locations &= spoiler_locations # Skip even the checks _maybe_set_light_arrows = lambda _: None else: _maybe_set_light_arrows = maybe_set_light_arrows required_locations = [] search = Search([world.state for world in worlds]) for location in search.iter_reachable_locations(all_locations): # Try to remove items one at a time and see if the game is still beatable if location in item_locations: old_item = location.item location.item = None # copies state! This is very important as we're in the middle of a search # already, but beneficially, has search it can start from if not search.can_beat_game(): required_locations.append(location) location.item = old_item _maybe_set_light_arrows(location) search.state_list[location.item.world.id].collect(location.item) # Filter the required location to only include location in the world required_locations_dict = {} for world in worlds: required_locations_dict[world.id] = list( filter(lambda location: location.world.id == world.id, required_locations)) spoiler.required_locations = required_locations_dict
def main(): rospy.init_node('gate_task') sm = smach.StateMachine(outcomes=['done', 'fail']) with sm: sm.userdata.search_object = 'Gate' smach.StateMachine.add('SEARCH', Search(), transitions={'Success': 'done', 'Failure': 'fail'}) sm.userdata.type = 'gateManuever' sm.userdata.args = {} smach.StateMachine.add('MOVE', Move()) sm.execute()
def testInitial(self): search = Search() results = search.performSearch('terrible') self.assertTrue(len(results) > 0) self.assertTrue(len(results) == 1) for result in results: print(result.get_title()) results = search.performSearch('parrafa') self.assertTrue(len(results) > 0) self.assertTrue(len(results) == 4) for result in results: print(result.get_title())
def getResult(self, component, message, device_id): if component == "search": obj1 = InterestManager(message, device_id) obj2 = Search(obj1.new_data) print(message) print(obj1.new_data) self.result["results"] = obj2.searching() elif component == "location": obj = Location_finder(message) self.result = obj.find() else: pass return self.result
def __init__(self, file_name_train, file_name_test, trans=False, data_type=""): self.trunc_size = 20 self.read_ali_data(file_name_train, file_name_test) if trans: self.trans_tfidf() self.cx = lambda a, b: round( np.inner(a, b) / (LA.norm(a) * LA.norm(b) + 0.0000001), 3) self.search = Search("../../data/ecommerce") self.search_error = 0 self.data_type = data_type
def set_entrances_based_rules(worlds): # Use the states with all items available in the pools for this seed complete_itempool = [item for world in worlds for item in world.get_itempool_with_dungeon_items()] search = Search([world.state for world in worlds]) search.collect_all(complete_itempool) search.collect_locations() for world in worlds: for location in world.get_locations(): if location.type == 'Shop': # If All Locations Reachable is on, prevent shops only ever reachable as child from containing Buy Goron Tunic and Buy Zora Tunic items if not world.check_beatable_only: if not search.can_reach(location.parent_region, age='adult'): forbid_item(location, 'Buy Goron Tunic') forbid_item(location, 'Buy Zora Tunic')
def FinalPresentation(Query): D = Search.Search(Query) Sort_based_on_ranks = sorted(D, key=D.get, reverse=True) for key in Sort_based_on_ranks: url = key code = requests.get(url,stream = True) with open("temp.html","wb") as html: html.write(code.content) data = open("temp.html",'r') soup = BeautifulSoup(data, 'html.parser') print(soup.h1.get_text()+"\n"+url+"\n\n\n") if(len(Sort_based_on_ranks)==0): print("I'm sorry! The webpages included in the project does not have any related information")
def test(self): # run indexing process #indexer = Indexer() #indexer.run() #print("index completed") #perform search search = Search() results = search.performSearch('WAS iSeries') print('hists: ' + str(len(results))) self.assertTrue(len(results)) #recover fragments fr = FragmentRecover() for result in results: print(result) fgm = fr.retrieve(result.get_path(), result.get_order()) print(fgm.get_source_file()) print(fgm.get_order()) print(fgm.get_text()) self.assertTrue(fgm.get_text())
def main(): search = Search() h = 600 w = 800 g = window(w, h) while True: event, value = g.window.read() # See if user wants to quit or window was closed if event == sg.WINDOW_CLOSED: break if event == 'Search': result = search.reg_Search(value[0]) elif event == 'Random Search': result = search.search_rand() else: continue if result is None: print("Article not found") else: print("The article is", result) webbrowser.open(result)
from Analyse import Analyser from Search import Search from Article import Article search = Search( """cnkiUserKey=37077a91-56b1-e799-a114-60fa62079318; Ecp_ClientId=5191028193401977778; RsPerPage=20; amid=348964eb-d453-43e3-9711-e86275d43b31; UM_distinctid=16e31049dfb23b-0f39744427f5db-b363e65-144000-16e31049dfc58d; Hm_lvt_6e967eb120601ea41b9d312166416aa6=1573027077,1573262387; LID=WEEvREcwSlJHSldRa1FhdXNXaEhoOGhSL2kwK1J2aFJEakc2UzhWRjFxQT0=$9A4hF_YAuvQ5obgVAqNKPCYcEjKensW4IQMovwHtwkF4VYPoHbKxJw!!; ASP.NET_SessionId=dj0jzj0pzhvqcjstare1xoii; SID_kns=123119; SID_klogin=125141; SID_crrs=125134; KNS_SortType=; Ecp_session=1; _pk_ref=%5B%22%22%2C%22%22%2C1573630944%2C%22https%3A%2F%2Fwww.cnki.net%2F%22%5D; _pk_ses=*; SID_kns_new=123116; __lfcc=1; SID_krsnew=125133""", """https://kns.cnki.net/kns/brief/brief.aspx?curpage={}&RecordsPerPage=20&QueryID=5&ID=&turnpage=1&tpagemode=L&dbPrefix=SCDB&Fields=&DisplayMode=listmode&PageName=ASP.brief_default_result_aspx&ctl=746420fc-66a8-4f48-bfb2-dba1c939b543&Param=NVSM%e5%85%b3%e9%94%ae%e8%af%8d+%3d+%27%e5%ae%b6%e5%9b%bd%e6%83%85%e6%80%80%27&isinEn=1&""", 500) Links = search.getEnableLink() with open("Summary_Nation.txt", 'w', encoding='utf-8') as f: outPut = '' for link in Links: art = Article(link) outPut = outPut + art.getSummary() f.write(outPut) an = Analyser("Summary_Nation.txt", "Nation_words.txt", "NationSql", "NationCloud.html") # an = Analyser("Summary_Fans.txt", "Fans_words.txt", "FansSql", "FansCloud.html") an.analyse("(seq > 15)")
def create_playthrough(spoiler): worlds = spoiler.worlds if worlds[0].check_beatable_only and not State.can_beat_game( [world.state for world in worlds]): raise RuntimeError('Uncopied is broken too.') # create a copy as we will modify it old_worlds = worlds worlds = copy_worlds(worlds) # if we only check for beatable, we can do this sanity check first before writing down spheres if worlds[0].check_beatable_only and not State.can_beat_game( [world.state for world in worlds]): raise RuntimeError( 'Cannot beat game. Something went terribly wrong here!') search = RewindableSearch([world.state for world in worlds]) # Get all item locations in the worlds item_locations = search.progression_locations() # Omit certain items from the playthrough internal_locations = { location for location in item_locations if location.internal } # Generate a list of spheres by iterating over reachable locations without collecting as we go. # Collecting every item in one sphere means that every item # in the next sphere is collectable. Will contain every reachable item this way. logger = logging.getLogger('') logger.debug('Building up collection spheres.') collection_spheres = [] entrance_spheres = [] remaining_entrances = set(entrance for world in worlds for entrance in world.get_shuffled_entrances()) while True: search.checkpoint() # Not collecting while the generator runs means we only get one sphere at a time # Otherwise, an item we collect could influence later item collection in the same sphere collected = list(search.iter_reachable_locations(item_locations)) if not collected: break # Gather the new entrances before collecting items. collection_spheres.append(collected) accessed_entrances = set( filter(search.spot_access, remaining_entrances)) entrance_spheres.append(accessed_entrances) remaining_entrances -= accessed_entrances for location in collected: # Collect the item for the state world it is for search.state_list[location.item.world.id].collect(location.item) logger.info('Collected %d spheres', len(collection_spheres)) # Reduce each sphere in reverse order, by checking if the game is beatable # when we remove the item. We do this to make sure that progressive items # like bow and slingshot appear as early as possible rather than as late as possible. required_locations = [] for sphere in reversed(collection_spheres): for location in sphere: # we remove the item at location and check if the game is still beatable in case the item could be required old_item = location.item # Uncollect the item and location. search.state_list[old_item.world.id].remove(old_item) search.unvisit(location) # Generic events might show up or not, as usual, but since we don't # show them in the final output, might as well skip over them. We'll # still need them in the final pass, so make sure to include them. if location.internal: required_locations.append(location) continue location.item = None # An item can only be required if it isn't already obtained or if it's progressive if search.state_list[old_item.world.id].item_count( old_item.name) < old_item.special.get('progressive', 1): # Test whether the game is still beatable from here. logger.debug('Checking if %s is required to beat the game.', old_item.name) if not search.can_beat_game(): # still required, so reset the item location.item = old_item required_locations.append(location) # Reduce each entrance sphere in reverse order, by checking if the game is beatable when we disconnect the entrance. required_entrances = [] for sphere in reversed(entrance_spheres): for entrance in sphere: # we disconnect the entrance and check if the game is still beatable old_connected_region = entrance.disconnect() # we use a new search to ensure the disconnected entrance is no longer used sub_search = Search([world.state for world in worlds]) # Test whether the game is still beatable from here. logger.debug( 'Checking if reaching %s, through %s, is required to beat the game.', old_connected_region.name, entrance.name) if not sub_search.can_beat_game(): # still required, so reconnect the entrance entrance.connect(old_connected_region) required_entrances.append(entrance) # Regenerate the spheres as we might not reach places the same way anymore. search.reset() # search state has no items, okay to reuse sphere 0 cache collection_spheres = [] entrance_spheres = [] remaining_entrances = set(required_entrances) collected = set() while True: # Not collecting while the generator runs means we only get one sphere at a time # Otherwise, an item we collect could influence later item collection in the same sphere collected.update(search.iter_reachable_locations(required_locations)) if not collected: break internal = collected & internal_locations if internal: # collect only the internal events but don't record them in a sphere for location in internal: search.state_list[location.item.world.id].collect( location.item) # Remaining locations need to be saved to be collected later collected -= internal continue # Gather the new entrances before collecting items. collection_spheres.append(list(collected)) accessed_entrances = set( filter(search.spot_access, remaining_entrances)) entrance_spheres.append(accessed_entrances) remaining_entrances -= accessed_entrances for location in collected: # Collect the item for the state world it is for search.state_list[location.item.world.id].collect(location.item) collected.clear() logger.info('Collected %d final spheres', len(collection_spheres)) # Then we can finally output our playthrough spoiler.playthrough = OrderedDict( (str(i + 1), {location: location.item for location in sphere}) for i, sphere in enumerate(collection_spheres)) if worlds[0].entrance_shuffle != 'off': spoiler.entrance_playthrough = OrderedDict( (str(i + 1), list(sphere)) for i, sphere in enumerate(entrance_spheres))
@return None """ with requests.Session() as s: for url in url_lst: html = s.get(url, headers=headers).text save_pdf(s, html, login=True) if __name__ == '__main__': username = '' password = '' # 搜索得到url text = 'TEST' search = Search(method='Title', category='result') url_lst = [search.run(text, batch=False)] # 根据CNKI导出的endnote文件(txt),解析标题,并搜索url,可能搜索不到标题 # endnote = '' # with open('{}.txt'.format(endnote), 'r', encoding='utf-8') as f: # data = f.read() # title_lst = re.findall(r'%T (.+)\n', data) # search = Search(method='Title', category='result') # url_lst = [ # search.run(text, batch=False) for title in title_lst # ] # 手动设定 # url_lst = [ # 'http://gb.oversea.cnki.net/kcms/detail/detail.aspx?recid=&FileName=JDEW603.003&DbName=CJFD9697&DbCode=CJFD',
async def post(self): req_data = [] req_metadata = [] # 检测请求body部分否存在'data'与'metadata'键 try: req_data = json.loads(self.request.body)['data'] req_data = bson.json_util.loads(json.dumps(req_data)) req_metadata = json.loads(self.request.body)['metadata'] req_metadata = bson.json_util.loads(json.dumps(req_metadata)) except Exception as e: # HTTP响应内容 res = {'code': 1, 'err_msg': '数据格式错误'} # 将错误信息写入输出缓冲区 self.write(res) # 将输出缓冲区的信息输出到socket self.flush() # 结束HTTP请求 self.finish() if req_data and req_metadata: if 'file' in req_metadata: cursor = Search(req_data).to_query(self.settings['db']) # 不返回文件的情况 if not req_metadata['file']: # MotorCursor,这一步不进行I/O result = [] try: # to_list()每次缓冲length条文档,执行I/O for doc in await cursor.to_list( length=CONFIG.TO_LIST_BUFFER_LENGTH): result += [doc] # 将BSON格式结果转换成JSON格式 result = json.loads(bson.json_util.dumps(result)) # HTTP响应内容 res = { 'code': 0, 'data': result, 'count': { 'n_record': len(result) } } self.write(res) self.flush() self.finish() except Exception: res = {'code': 1, 'err_msg': '数据格式错误'} self.write(res) self.flush() self.finish() # 返回文件的情况 else: if 'tree' in req_metadata: if req_metadata['tree']: has_error = False try: res_uuid = helpers.get_UUID() has_result = False # 树形结构参数 tree_group_type = req_metadata[ "tree_group_type"] tree_attr_proj = req_metadata["tree_attr_proj"] tree_path_attr = req_metadata["path"] show_raw_data = req_metadata["show_raw_data"] # csv 文件 header fieldnames = [ 'node_id', 'children', 'is_leaf', 'is_root' ] for attr in tree_attr_proj: for group_type in tree_group_type: fieldnames.append( attr.replace('.', '_') + '_' + group_type) except Exception as e: if not has_result: res = {'code': 1, 'err_msg': '数据格式错误'} has_error = True self.write(res) self.flush() self.finish() if not has_error: try: with open(('files/' + res_uuid + '.csv'), 'w', newline='') as f: writer = None # 储存根节点 root_nodes = {} try: for doc in await cursor.to_list( length=CONFIG. TO_LIST_BUFFER_LENGTH): doc = json.loads( bson.json_util.dumps(doc)) # 提取数据内相应的树的路径 doc_tree_path = core.get_path_from_data( doc, tree_path_attr) if not doc_tree_path: raise ValueError("路径不存在") # 如果这是第一条数据 if not has_result: # 如果这条数据有不为空的树的路径 if doc_tree_path: res = { 'code': 0, 'data': { 'uuid': res_uuid } } self.write(res) self.flush() self.finish() has_result = True fieldnames += list( doc.keys()) writer = csv.DictWriter( f, fieldnames= fieldnames, extrasaction= 'ignore') # 写入csv字段名称 writer.writeheader() root_nodes[str( doc_tree_path[0] )] = TreeNode( doc, doc_tree_path, tree_attr_proj, show_raw_data) # 如果制定的树的路径不存在,跳过数据 else: pass # 如果不是第一条数据 else: # 如果这条数据有不为空的树的路径 if doc_tree_path: # 如果根节点已经存在 if str(doc_tree_path[0] ) in root_nodes: root_nodes[str( doc_tree_path[ 0] )].insert_data( doc, doc_tree_path, tree_attr_proj, show_raw_data) # 如果根节点不存在 else: root_nodes[str( doc_tree_path[ 0] )] = TreeNode( doc, doc_tree_path, tree_attr_proj, show_raw_data) # 如果制定的树的路径不存在,跳过数据 else: pass for root_key in root_nodes.keys(): root_nodes[root_key].set_root() root_nodes[ root_key].recursive_write_tree( writer) except Exception as e: if not has_result: res = { 'code': 1, 'err_msg': '数据格式错误' } has_error = True self.write(res) self.flush() self.finish() except Exception: # 罕见的24内出现两个UUID1相同情况 pass if not has_result: if not has_error: # HTTP响应内容 res = {'code': 5, 'err_msg': '搜索无结果'} self.write(res) self.flush() self.finish() else: res_uuid = helpers.get_UUID() has_result = False has_error = False # 创建csv文件 with open(('files/' + res_uuid + '.csv'), 'w', newline='') as f: fieldnames = [] writer = None try: for doc in await cursor.to_list( length=CONFIG.TO_LIST_BUFFER_LENGTH ): doc = json.loads( bson.json_util.dumps(doc)) # 如果有符合条件的数据 if not has_result: res = { 'code': 0, 'data': { 'uuid': res_uuid } } self.write(res) self.flush() self.finish() has_result = True fieldnames = list(doc.keys()) writer = csv.DictWriter( f, fieldnames=fieldnames, extrasaction='ignore') # 写入csv字段名称 writer.writeheader() writer.writerow(doc) except Exception: res = {'code': 1, 'err_msg': '数据格式错误'} has_error = True self.write(res) self.flush() self.finish() if not has_result: if not has_error: # HTTP响应内容 res = {'code': 5, 'err_msg': '搜索无结果'} self.write(res) self.flush() self.finish() else: res = {'code': 1, 'err_msg': '数据格式错误'} self.write(res) self.flush() self.finish() else: res = {'code': 1, 'err_msg': '数据格式错误'} self.write(res) self.flush() self.finish()
def distribute_items_restrictive(window, worlds, fill_locations=None): song_locations = [ world.get_location(location) for world in worlds for location in [ 'Song from Composers Grave', 'Song from Impa', 'Song from Malon', 'Song from Saria', 'Song from Ocarina of Time', 'Song from Windmill', 'Sheik in Forest', 'Sheik at Temple', 'Sheik in Crater', 'Sheik in Ice Cavern', 'Sheik in Kakariko', 'Sheik at Colossus' ] ] shop_locations = [ location for world in worlds for location in world.get_unfilled_locations() if location.type == 'Shop' and location.price == None ] # If not passed in, then get a shuffled list of locations to fill in if not fill_locations: fill_locations = [location for world in worlds for location in world.get_unfilled_locations() \ if location not in song_locations and \ location not in shop_locations and \ location.type != 'GossipStone'] world_states = [world.state for world in worlds] window.locationcount = len(fill_locations) + len(song_locations) + len( shop_locations) window.fillcount = 0 # Generate the itempools shopitempool = [ item for world in worlds for item in world.itempool if item.type == 'Shop' ] songitempool = [ item for world in worlds for item in world.itempool if item.type == 'Song' ] itempool = [ item for world in worlds for item in world.itempool if item.type != 'Shop' and item.type != 'Song' ] if worlds[0].shuffle_song_items: itempool.extend(songitempool) fill_locations.extend(song_locations) songitempool = [] song_locations = [] # add unrestricted dungeon items to main item pool itempool.extend([ item for world in worlds for item in world.get_unrestricted_dungeon_items() ]) dungeon_items = [ item for world in worlds for item in world.get_restricted_dungeon_items() ] random.shuffle( itempool ) # randomize item placement order. this ordering can greatly affect the location accessibility bias progitempool = [item for item in itempool if item.advancement] prioitempool = [ item for item in itempool if not item.advancement and item.priority ] restitempool = [ item for item in itempool if not item.advancement and not item.priority ] cloakable_locations = shop_locations + song_locations + fill_locations all_models = shopitempool + dungeon_items + songitempool + itempool worlds[0].settings.distribution.fill( window, worlds, [shop_locations, song_locations, fill_locations], [ shopitempool, dungeon_items, songitempool, progitempool, prioitempool, restitempool ]) itempool = progitempool + prioitempool + restitempool # set ice traps to have the appearance of other random items in the item pool ice_traps = [item for item in itempool if item.name == 'Ice Trap'] # Extend with ice traps manually placed in plandomizer ice_traps.extend( location.item for location in cloakable_locations if (location.name in location_groups['CanSee'] and location.item is not None and location.item.name == 'Ice Trap' and location.item.looks_like_item is None)) junk_items = remove_junk_items.copy() junk_items.remove('Ice Trap') major_items = [ item for (item, data) in item_table.items() if data[0] == 'Item' and data[1] and data[2] is not None ] fake_items = [] if worlds[0].settings.ice_trap_appearance == 'major_only': model_items = [item for item in itempool if item.majoritem] if len( model_items ) == 0: # All major items were somehow removed from the pool (can happen in plando) model_items = ItemFactory(major_items) elif worlds[0].settings.ice_trap_appearance == 'junk_only': model_items = [item for item in itempool if item.name in junk_items] if len(model_items) == 0: # All junk was removed model_items = ItemFactory(junk_items) else: # world[0].settings.ice_trap_appearance == 'anything': model_items = [item for item in itempool if item.name != 'Ice Trap'] if len( model_items ) == 0: # All major items and junk were somehow removed from the pool (can happen in plando) model_items = ItemFactory(major_items) + ItemFactory(junk_items) while len(ice_traps) > len(fake_items): # if there are more ice traps than model items, then double up on model items fake_items.extend(model_items) for random_item in random.sample(fake_items, len(ice_traps)): ice_trap = ice_traps.pop(0) ice_trap.looks_like_item = random_item # Start a search cache here. search = Search([world.state for world in worlds]) # We place all the shop items first. Like songs, they have a more limited # set of locations that they can be placed in, so placing them first will # reduce the odds of creating unbeatable seeds. This also avoids needing # to create item rules for every location for whether they are a shop item # or not. This shouldn't have much affect on item bias. if shop_locations: logger.info('Placing shop items.') fill_ownworld_restrictive(window, worlds, search, shop_locations, shopitempool, itempool + songitempool + dungeon_items, "shop") # Update the shop item access rules for world in worlds: set_shop_rules(world) search.collect_locations() # If there are dungeon items that are restricted to their original dungeon, # we must place them first to make sure that there is always a location to # place them. This could probably be replaced for more intelligent item # placement, but will leave as is for now if dungeon_items: logger.info('Placing dungeon items.') fill_dungeons_restrictive(window, worlds, search, fill_locations, dungeon_items, itempool + songitempool) search.collect_locations() # places the songs into the world # Currently places songs only at song locations. if there's an option # to allow at other locations then they should be in the main pool. # Placing songs on their own since they have a relatively high chance # of failing compared to other item type. So this way we only have retry # the song locations only. if not worlds[0].shuffle_song_items: logger.info('Placing song items.') fill_ownworld_restrictive(window, worlds, search, song_locations, songitempool, progitempool, "song") search.collect_locations() fill_locations += [ location for location in song_locations if location.item is None ] # Put one item in every dungeon, needs to be done before other items are # placed to ensure there is a spot available for them if worlds[0].one_item_per_dungeon: logger.info('Placing one major item per dungeon.') fill_dungeon_unique_item(window, worlds, search, fill_locations, progitempool) search.collect_locations() # Place all progression items. This will include keys in keysanity. # Items in this group will check for reachability and will be placed # such that the game is guaranteed beatable. logger.info('Placing progression items.') fill_restrictive(window, worlds, search, fill_locations, progitempool) search.collect_locations() # Place all priority items. # These items are items that only check if the item is allowed to be # placed in the location, not checking reachability. This is important # for things like Ice Traps that can't be found at some locations logger.info('Placing priority items.') fill_restrictive_fast(window, worlds, fill_locations, prioitempool) # Place the rest of the items. # No restrictions at all. Places them completely randomly. Since they # cannot affect the beatability, we don't need to check them logger.info('Placing the rest of the items.') fast_fill(window, fill_locations, restitempool) # Log unplaced item/location warnings for item in progitempool + prioitempool + restitempool: logger.error('Unplaced Items: %s [World %d]' % (item.name, item.world.id)) for location in fill_locations: logger.error('Unfilled Locations: %s [World %d]' % (location.name, location.world.id)) if progitempool + prioitempool + restitempool: raise FillError('Not all items are placed.') if fill_locations: raise FillError('Not all locations have an item.') if not search.can_beat_game(): raise FillError('Cannot beat game!') worlds[0].settings.distribution.cloak(worlds, [cloakable_locations], [all_models]) for world in worlds: for location in world.get_filled_locations(): # Get the maximum amount of wallets required to purchase an advancement item. if world.maximum_wallets < 3 and location.price and location.item.advancement: if location.price > 500: world.maximum_wallets = 3 elif world.maximum_wallets < 2 and location.price > 200: world.maximum_wallets = 2 elif world.maximum_wallets < 1 and location.price > 99: world.maximum_wallets = 1 # Get Light Arrow location for later usage. if location.item and location.item.name == 'Light Arrows': location.item.world.light_arrow_location = location
choice = input( "What would you like to do? ('a' = add student, 's' = search student, 'u' = update student, 'd' = delete student, 'v' = view all, 'q' = quit) " ) choices = Choices() #instance of the choices class #add student if choice == 'a': choices.addStudent() continuing = input( "Continue? (y/n) ") #check if user wants to continue if continuing == "n": break #search for student elif choice == "s": new_search = Search() #instance of search class search_choice = input( "What would you like to search by? ('m' = Major, 'g' = GPA, 'a' = Advisor) " ) if search_choice.lower() == "m": major = input("Enter the student's major: ") input_param = (major, ) #insert input into tuple new_search.searchByMajor(input_param) elif search_choice.lower() == "g": gpa = input("Enter the student's GPA: ") input_param = (gpa, ) #insert input into tuple new_search.searchByGpa(input_param) elif search_choice.lower() == "a": advisor = input("Enter the name of the student's advisor: ") input_param = (advisor, ) #insert input into tuple new_search.searchByAdvisor(input_param)
def find_light_arrows(spoiler): search = Search([world.state for world in spoiler.worlds]) for location in search.iter_reachable_locations( search.progression_locations()): search.collect(location.item) maybe_set_light_arrows(location)
def __init__(self, logger, translate): self.translate = translate self.search = Search(logger, translate)
#!/usr/bin/env python import falcon import logging.config from Search import Search logging.config.fileConfig('config/logging.ini') app = falcon.API() app.add_route("/", Search())
if option == 1: startingStation = UI.getStation() # startingStation = { # 'id': '1', # 'latitude': '51.5028', # 'longitude': '-0.2801', # 'name': 'Acton Town' # } goalStation = UI.getStation('goal') # goalStation = { # 'id': '187', # 'latitude': '51.6476', # 'longitude': '-0.1318', # 'name': 'Oakwood' # } searchPriority = UI.getSearchPriority() # searchPriority = 2 searchAlgorithm = Search() successNode = searchAlgorithm.heuristicSearch(searchPriority, startingStation, goalStation) print("\rRoute:") successNode.backTrack() print(f"\nTotal cost: {successNode.cost}") print('\n') input("Press enter to continue...") elif option == 2: exit = True
# coding=utf-8 import re import time from Search import Search if __name__ == '__main__': t1 = time.time() sea = Search() t2 = time.time() print "搜索引擎初始化完毕", t2 - t1 queries = u'site:(站内搜索) 全部搜索 "完整搜索" (包含搜索) -(排除搜索)' '''site:(sohu.com) "罗一笑" (苹果) -(深圳)''' while queries: queries = raw_input().decode('utf-8') try: site_search_url = re.findall('site:\((.*?)\)', queries)[0] queries = queries.replace(unicode('site:(%s)' % site_search_url), '') except IndexError: site_search_url = None try: except_search_query = re.findall('-\((.*?)\)', queries)[0] queries = queries.replace(unicode('-(%s)' % except_search_query), '') except IndexError: except_search_query = None try: include_search_query = re.findall('\((.*?)\)', queries)[0] queries = queries.replace(unicode('(%s)' % include_search_query),
except Exception, e: pass host = '' if hostname.startswith("www") == True: parts = hostname.split(".") for part in parts: if part != 'www': host += part + '.' host = host[:-1] else: host = hostname search = Search(host) search.process() emails = search.get_emails() hosts = search.get_hostnames() full = [] print "\n\n[+] Emails:" print "------------------" if emails == []: print "No emails found" else: for email in emails: print email print "\n[+] Hosts:" print "------------------------------------"
print('p:' + str(PROB)) print('Total Solutions:' + str(total_solutions)) print('Total Time:' + str(total_time)) print('Average Solved:' + str(average_solved)) print('Total Path Length:' + str(total_path_length)) print('Average Path Length:' + str(average_path_length)) """ print("--------------------------------\nA* Euclidean") for x in range(0, ITERATIONS): current_map = Map(DIM, PROB) start_time = time.time() current_map.results = Search(current_map).A_star("euclidean") current_time = round(time.time() - start_time, 20) if (current_map.results['Status'] == 'Found Path'): total_solutions += 1 total_cells_visited += int(current_map.results['# of Visited Cells']) total_time += current_time #if(current_map.results['# of Visited Cells'] != 'n/a'): #total_cells_visited += int(current_map.results['# of Visited Cells']) print("Time: ", current_time) average_solved = round(total_solutions / ITERATIONS, 7) average_cells_visited = round(total_cells_visited / total_solutions, 4) print('p:' + str(PROB))
def main(): flag = 0 lasx = 0 lasy = 0 NumofEnmy = 0 NumofTurn = 1 pygame.init() global Mps for i in range(len(Mps)): Mps[i] = list(Mps[i]) screen = pygame.display.set_mode((xSiz * 50, ySiz * 50), 0, 32) for i in range(len(Mps)): for j in range(len(Mps[0])): block = pygame.image.load(dic1[Mps[i][j]]).convert() screen.blit(block, cor(j, i)) if Mps[i][j] == 'A': NumofEnmy += 1 pygame.display.update() while True: for event in pygame.event.get(): if event == QUIT: exit() elif event.type == MOUSEBUTTONDOWN: localtion_keys = list(event.pos) localtion_keys[0] = localtion_keys[0] / 50 * 50 localtion_keys[1] = localtion_keys[1] / 50 * 50 if flag == 0: # chose one to move if Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] == 'A' and ( localtion_keys[0] / 50, localtion_keys[1] / 50) not in locked: # making the enmy and enmy's rounding get red block = pygame.image.load('pic/red_enmy.png').convert() screen.blit(block, localtion_keys) for i in range(4): nexx = localtion_keys[0] / 50 + dx[i] nexy = localtion_keys[1] / 50 + dy[i] if nexx >= 0 and nexx < xSiz and nexy >= 0 and nexy < ySiz and Mps[ nexy][nexx] != 'x': block = pygame.image.load( dic2[Mps[nexy][nexx]]).convert() screen.blit(block, [nexx * 50, nexy * 50]) flag = 1 lasx = localtion_keys[0] / 50 lasy = localtion_keys[1] / 50 elif flag == 1: # chose the coordinate to move for i in range(4): # back to normal arrounding the enmy nexx = lasx + dx[i] nexy = lasy + dy[i] if nexx >= 0 and nexx < xSiz and nexy >= 0 and nexy < ySiz: block = pygame.image.load( dic1[Mps[nexy][nexx]]).convert() screen.blit(block, [nexx * 50, nexy * 50]) if localtion_keys[0] / 50 == lasx and localtion_keys[ 1] / 50 == lasy: # don't move block = pygame.image.load('pic/enmy.png').convert() screen.blit(block, localtion_keys) locked[(localtion_keys[0] / 50, localtion_keys[1] / 50)] = 1 # lock it until next turn flag = 0 continue move_success = 0 if Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] != 'x' and Mps[ localtion_keys[1] / 50][localtion_keys[0] / 50] != 'A': # move to a new coordinate and check if can move for i in range(4): nexx = localtion_keys[0] / 50 + dx[i] nexy = localtion_keys[1] / 50 + dy[i] if nexx == lasx and nexy == lasy: if Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] == '.': # normal move #print nexx, nexy, localtion_keys block = pygame.image.load( 'pic/none.png').convert() #'A' -> '.' screen.blit(block, [lasx * 50, lasy * 50]) Mps[lasy][lasx] = '.' block = pygame.image.load( 'pic/enmy.png').convert() #'.' -> 'A' screen.blit(block, localtion_keys) Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] = 'A' locked[(localtion_keys[0] / 50, localtion_keys[1] / 50)] = 1 elif Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] == '$': # destory a source block = pygame.image.load( 'pic/none.png').convert() screen.blit(block, [lasx * 50, lasy * 50]) Mps[lasy][lasx] = '.' block = pygame.image.load( 'pic/enmy.png').convert() screen.blit(block, localtion_keys) Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] = 'A' locked[(localtion_keys[0] / 50, localtion_keys[1] / 50)] = 1 elif Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] == 'D': # attack block = pygame.image.load( 'pic/none.png').convert() screen.blit(block, [lasx * 50, lasy * 50]) Mps[lasy][lasx] = '.' block = pygame.image.load( 'pic/none.png').convert() screen.blit(block, localtion_keys) Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] = '.' NumofEnmy -= 1 elif Mps[localtion_keys[1] / 50][ localtion_keys[0] / 50] == '@': # attack enmy in source block = pygame.image.load( 'pic/none.png').convert() screen.blit(block, [lasx * 50, lasy * 50]) Mps[lasy][lasx] = '.' block = pygame.image.load( 'pic/sor.png').convert() screen.blit(block, localtion_keys) Mps[localtion_keys[1] / 50][localtion_keys[0] / 50] = '$' NumofEnmy -= 1 move_success = 1 if move_success == 0: # return the begining states if unsuccessful move block = pygame.image.load('pic/enmy.png').convert() screen.blit(block, [lasx * 50, lasy * 50]) flag = 0 # chose next enmy to move pygame.display.update() if len(locked) == NumofEnmy: # finished move if NumofEnmy == 0: print 'Game over !' break print 'computer turn(' + str(NumofTurn) + ')...' NumofTurn += 1 locked.clear() # using search Mps = Search(Mps) #Mps = var.Get_Nex() #var.PrintTree(var.root) # check the number of enmys and redraw the map NumofEnmy = 0 for i in range(len(Mps)): for j in range(len(Mps[0])): if Mps[i][j] == 'A': NumofEnmy += 1 block = pygame.image.load(dic1[Mps[i][j]]).convert() screen.blit(block, cor(j, i)) #Wp = var.root.W #Lp = var.root.L #print 'rate of wining: ',1.0 * Wp / (1.0 + Wp + Lp) * 100.0, '%' print 'your turn...' pygame.display.update()
def run(): logger=Logger.getInstance() logger.logInfo(' --------- Initiating CLI ----------') print('============================Local Indexer===============================') print('\r\n') ac = 'n' while(ac.upper()=='N'): print('\r\n') criteria = input("ingrese criterio de busqueda :") logger.logInfo('criteria: ' + criteria) #perform search search = Search() results = search.performSearch(criteria) ac='o' while(ac.upper()=='O'): print('hists: ' + str(len(results))) print('criterio: ' + criteria) logger.logInfo('hists: ' + str(len(results))) #recover fragments fr = FragmentRecover() i = 0 for result in results: print(str(i) + " | " + result.get_title()) i = i + 1 if (i==0): print("No se hallaron resultados") ac="N" break print("-----------------") print("\n\r") print("\n\r") #result input and validation fg="" while (fg=="" or int(fg)>=i): fg = input("seleccione resultado: ") try: int(fg) except ValueError: fg="" print("\n\r") print("\n\r") print('========================================================================') fgm = fr.recover(results[int(fg)].get_path(), results[int(fg)].get_order()) print(fg + " | " + results[int(fg)].get_title()) print('fragmento ' + str(fgm.get_order()) + ' extension chars: ' + str(len(fgm.get_text()))) print('------------------------------------------------------------------------') print(LoinxCLI.display_text(fgm.get_text(),criteria,results[int(fg)].get_path())) print('------------------------------------------------------------------------') print(fg + " | " + results[int(fg)].get_title()) print('fragmento ' + str(fgm.get_order()) + ' extension chars: ' + str(len(fgm.get_text()))) print('------------------------------------------------------------------------') print('ver (o)tro resultado (t)exto completo (n)ueva busqueda (f)inalizar ') print('========================================================================') ac="" while(ac.upper() not in ['O','T','N','F']): ac = input() if (ac.upper()=='T'): LoinxCLI.openFile(results[int(fg)].get_path(),fgm.get_order()) ac='o'
import os import time from Game import Game from Search import Search import Config as cfg game = Game(cfg.BOARD_SIZE, cfg.SNAKE_LEN) search = Search(game) times = [] try: i = 1 while True: start = time.time() # moves = search.depth_first() # moves = search.breadth_first() moves = search.a_star() stop = time.time() delta = stop - start times.append(delta) print(game) while moves: move = moves.pop() os.system('clear') game.move_snake(move) print(f' Score: {i}') print(f' Nodes: {len(search.visited_states)}') print(f' Time : {delta:0.4f}')