def simulate_main(triple): fast_search = 'true' obj_a = Argument(triple[1].lower().strip()) obj_b = Argument(triple[2].lower().strip()) aspects = [Aspect(triple[3].lower(), 5)] model = 'bow' # model = 'infersent' load_config() if aspects: json_compl_triples = request_es_triple(obj_a, obj_b, aspects) json_compl = request_es_ML(fast_search, obj_a, obj_b) if aspects: all_sentences = extract_sentences(json_compl_triples) all_sentences.extend(extract_sentences(json_compl)) else: all_sentences = extract_sentences(json_compl) remove_questions(all_sentences) prepared_sentences = prepare_sentence_DF(all_sentences, obj_a, obj_b) classification_results = classify_sentences(prepared_sentences, model) final_dict = evaluate(all_sentences, prepared_sentences, classification_results, obj_a, obj_b, aspects) a_aspect_score = 0 if triple[3] in final_dict['object1']['points']: a_aspect_score = final_dict['object1']['points'][triple[3]] b_aspect_score = 0 if triple[3] in final_dict['object2']['points']: b_aspect_score = final_dict['object2']['points'][triple[3]] return [a_aspect_score, b_aspect_score]
def run_astr(p): results = init_results() progress = len(puzzles) * len(heuristics) i = 0 for puzzle in puzzles: for heur in heuristics: astr = ASTR(search_strategy=heur) dim, lay = load_config(os.path.join("puzzles", puzzle)) astr.model.load_layout(dim, lay) t = time() path = astr.run() t = round((time() - t) * 1000, 3) # get the depth from 4x4_depth_00000.txt depth = int(puzzle.split('_')[1]) if path == -1: continue result = { "path_length": len(path), "frontier": len(astr.frontier) + len(astr.explored), "explored": len(astr.explored), "depth": len(path), "time": t } results["A*"][heur][depth].append(result) print("A* progress: {}%".format(round((i / progress) * 100, 2)), end='\r', flush=True) i += 1 # avg_whole, avg_orders return process_results("A*", results)
def setUp(self): self.config = main.load_config("data/config.json") self.mailer = MailClient( self.config['bot_email'], self.config['bot_name'], self.config['mailing_list'], self.config['smtp']['host'], self.config['smtp']['port'], self.config['smtp']['safe'], self.config['imap']['host'], self.config['imap']['port'], self.config['imap']['safe'], self.config['account_imap']['user'], self.config['account_imap']['password'], self.config['account_smtp']['user'], self.config['account_smtp']['password']) self.pull_request = PullRequest( "A modest change", "", "", "pull_author", 1, "A pull request", "pull:master", 0, 0, 1, "2017-11-09T09:56:43Z", 1, 2, 3, [ Commit("commit_author", "author@example", "committer_name", "committer@example", "", "Message of the commit", 2, 3, 5, []) ], []) self.pull_request.diff_content = "#Here there is a real diff#" self.comment = Comment(1, "author_name", "guy@example", "2017-11-09T09:56:43Z", "This is a comment on a pull request", None, None) self.github_client = GithubClient("", "", "", "", None) self.github_client.send_comment_from_email = MagicMock( return_value=True) pass
def run_bfs(p): results = init_results() progress = len(puzzles) * len(orders) i = 0 for puzzle in puzzles: for order in orders: bfs = BFS(search_order=order) dim, lay = load_config(os.path.join("puzzles", puzzle)) bfs.model.load_layout(dim, lay) t = time() path = bfs.run() t = round((time() - t) * 1000, 3) # get the depth from 4x4_depth_00000.txt depth = int(puzzle.split('_')[1]) if path == -1: continue result = { "path_length": len(path), "frontier": len(bfs.frontier) + len(bfs.explored), "explored": len(bfs.explored), "depth": len(path), "time": t } results["BFS"][order][depth].append(result) print("BFS progress: {}%".format(round((i / progress) * 100, 2)), end='\r', flush=True) i += 1 # avg_whole, avg_orders return process_results("BFS", results)
def run_dfs(p): results = init_results() i = 0 progress = len(p) * 8 for puzzle in p: # for puzzle in ["4x4_01_00001.txt", "4x4_02_00001.txt", "4x4_03_00001.txt", "4x4_04_00001.txt", "4x4_05_00001.txt", "4x4_06_00001.txt", "4x4_07_00001.txt"]: for order in orders: dfs = DFS(search_order=order) dim, lay = load_config(os.path.join("puzzles", puzzle)) dfs.model.load_layout(dim, lay) t = time() path = dfs.run() t = round((time() - t) * 1000, 3) depth = int(puzzle.split('_')[1]) # print(r, puzzle, order) if path == -1: continue result = { "path_length": len(path), "frontier": len(dfs.frontier) + len(dfs.explored), "explored": len(dfs.explored), "depth": dfs.deepest, "time": t } results["DFS"][order][depth].append(result) print("DFS progress: {}%".format(round((i / progress) * 100, 2)), end='\r', flush=True) i += 1 return results
def _run(args): if args.config_file: id2word, word2id, max_len = load_config(args.config_file) else: raise FileNotFoundError("Config path is none") nn = AutoEncoder(len(id2word), args.embedding_size, args.hidden_size, max_len) nn.model.load_weights(args.model_path) predict_loop(PredictModel(nn, (id2word, word2id, max_len)))
def load_model(): config = load_config(FLAGS.config_file) logger = get_logger(FLAGS.log_file) # limit GPU memory tf_config = tf.ConfigProto() tf_config.gpu_options.allow_growth = True with tf.Session(config=tf_config) as sess: model = create_model(sess, Model, FLAGS.ckpt_path, config, logger) print("加载成功") return model
def main(_): config = load_config(FLAGS.config_file) logger = get_logger(FLAGS.log_file) # limit GPU memory tf_config = tf.ConfigProto() tf_config.gpu_options.allow_growth = True with open(FLAGS.map_file, "rb") as f: _, _, tag_to_id, id_to_tag = pickle.load(f) with tf.Session(config=tf_config) as sess: model = create_model(sess, Model, FLAGS.ckpt_path, config, logger) while True: line = input("input sentence, please:") result = model.evaluate_line( sess, input_from_line(line, FLAGS.max_seq_len, tag_to_id), id_to_tag) print(result['entities'])
def on_generate(self, widget): if self.project_root is None: pass # TODO: show dialog saying project root must be set os.chdir(self.project_root) output = main.generate_templates(*main.load_config('commands.yaml')) for i in range(self.nb.get_n_pages()): self.nb.remove_page(-1) for k, v in output.items(): tv = Gtk.TextView() tv.get_buffer().set_text(v) tv.set_editable(False) tv.set_monospace(True) tv.modify_font(Pango.FontDescription('Monospace 9')) sw = Gtk.ScrolledWindow() sw.add(tv) self.nb.append_page(sw, Gtk.Label(k)) self.nb.set_size_request(400, 400) self.nb.show_all()
def show_docker_info(client_id): cli = client_session[client_id] print(cli) cli_info = main.show_cli_info(cli) return json.dumps(cli_info) # Pull image from docker registry. # Methods: GET # Args: # [client_id]: Recognize your docker client session. # [name]: pull image name, eg:(private.registry/com/os/centos:7). # return: # pull result. @app.route('/pull/<client_id>/<host>/<folder>/<name>/<tag>', methods=['GET']) @cross_origin(origin='*', headers=['Content- Type', 'Authorization']) def pull_image(client_id, host, folder, name, tag): cli = client_session[client_id] image_name = host + '/' + folder + '/' + name + ':' + tag pull_result = main.pull_docker_image(cli, image_name) return pull_result # Start a server from here. if __name__ == '__main__': # load env config env = os.environ.get("ENV") config = main.load_config(env) print("load env is:" + env) app.run(host=config['host'], port=config['port'])
def save(obj): """Save obj in the database.""" logger.debug('Saving %r.', obj) session.add(obj) try: session.commit() except Exception as e: logger.exception(e) session.rollback() raise e query = session.query(DBObject) load_config() if config.system_object_id is not None: system_object = query.filter( DBObject.id == config.system_object_id ).one() logger.info('System object: %r.', system_object) else: system_object = DBObject(name='System') save(system_object) config.system_object_id = system_object.id logger.info('Created system object: %r.', system_object) if config.arch_wizard_id is not None: arch_wizard = query.filter( DBObject.player.is_(True),
def setUp(self) -> None: self.config = main.load_config({}, main.DEFAULT_CONFIG_PATH) self.full_log_file_path = os.path.join(self.config["log_dir"], self.log_file_name) with open(self.full_log_file_path, 'w+') as f: for line in gen_lines(100): f.write(line)
def test_read_config(self): self.assertTrue(os.path.exists(main.DEFAULT_CONFIG_PATH)) config = main.load_config({}, main.DEFAULT_CONFIG_PATH) self.assertEqual(int(config["report_size"]), 1000)
def test_location(self): self.assertEqual(self.data["location"], self.article.location) def test_download_date(self): # We are faking the download date so I'd rather not test for it. pass def test_meta_lang(self): # Not grabbing meta_lang yet. pass #self.assertEqual(self.data["meta_lang"], self.article.meta_lang) def test_pub_date(self): self.assertEqual(str(self.data["recent_pub_date"]), str(self.article.pub_date)) if __name__ == '__main__': config = load_config(path.join("configs", "test-file-generator.json")) data = [filename for filename, url in config["args"]["known_good_urls"]] print("Working...") suites = [] for d in data: json_file = path.join("test_files", d) print("Loading: %s" % json_file) test_suite = BaseSiteTest(json_file) suites.append(test_suite) runner = unittest.TextTestRunner() for suite in suites: runner.run(suite)
def test_config_loading(): c = load_config('doesnt_exist') assert isinstance(c, dict)
def wis_browser(f, d, progress=None): # Default settings # headless = True browser_pause = False timer_expect_navigation = 10000 timer_incorrect_creds = 2000 timer_mylocker = 60000 c = load_config() coachid = c['coachid'] config = c['config'] if config.has_section('Browser'): logger.info("Config 'Browser' section found") if config.has_option('Browser', 'headless'): logger.info("'Browser' section has 'headless' option") try: headless = config.getboolean('Browser', 'headless') except Exception as err: logger.error( "Error trying to get boolean value from Browser headless option." ) logger.info(f"Setting headless = {headless}") logger.info(f"Setting timer_expect_navigation = {timer_expect_navigation}") logger.info(f"Setting timer_incorrect_creds = {timer_incorrect_creds}") logger.info(f"Setting timer_mylocker = {timer_mylocker}") logger.info(f"Setting browser_pause = {browser_pause}") global storage_state storage_state = check_for_stored_cookies(coachid) if storage_state == "" or "auth_to_store_cookies" in f: file = os.path.join(myconfig.cookies_directory_path, f"browser_cookie_{coachid}.json") if path.exists(file): logger.info(f"Deleting current cookie file {file}") os.remove(file) headless = False logger.info("Session requires interactive login.") logger.info(f"Setting headless = {headless}") with sync_playwright() as p: if myconfig.os_platform == "Windows": logger.debug( f"Configuring Playwright Browser Path for {myconfig.os_platform}" ) browser_path = Path( sys.modules['playwright'].__file__ ).parent / 'driver' / 'package' / '.local-browsers' / 'firefox-1316' / 'firefox' / 'firefox.exe' elif myconfig.os_platform == "Linux": logger.debug( f"Configuring Playwright Browser Path for {myconfig.os_platform}" ) browser_path = Path( sys.modules['playwright'].__file__ ).parent / 'driver' / 'package' / '.local-browsers' / 'firefox-1316' / 'firefox' / 'firefox' else: logger.error(f"{myconfig.os_platform} is not supported!") return False logger.info(f"Browser path = {browser_path}") logger.info(f"Browser path is valid? = {browser_path.exists()}") if not browser_path.exists(): logger.error("Browser path is not valid!!!") return False browser = p.firefox.launch(headless=headless, executable_path=browser_path) custom_headers = { 'User-Agent': 'gdrecruit', 'Application-Name': f'{myconfig.application_name} ({myconfig.version})' } if "auth_to_store_cookies" in f or storage_state == "": logger.info( f"Opening non-headless browser in order to have user complete auth process and store cookies." ) context = browser.new_context() page = context.new_page() page.set_extra_http_headers(custom_headers) page.set_viewport_size({"width": 1900, "height": 1200}) try: page.goto(f"https://{myconfig.main_url}/locker/", timeout=timer_expect_navigation) except TimeoutError as err: logger.error( f"Exception during authentication section: {err.__class__}" ) logger.error(f"Exception = {err}") return False except Exception as err: logger.error(f"e.message = {err.message}") if err.message == "NS_BINDING_ABORTED": logger.error(f"Ignoring {err} exception") pass else: logger.error( f"Exception following WIS Authentication attempt: {err.__class__}" ) logger.error(f"Exception = {err}") return False # Click text=Login page.click("text=Login") # Click input[name="username"] logger.info("Clicking on WIS username field...") page.click("input[name=\"username\"]") try: logger.info(f"Waiting for My Locker...") page.wait_for_selector("h1:has-text(\"My Locker\")", timeout=timer_mylocker) except TimeoutError as err: logger.error(f"Timeout waiting for My Locker: {err.__class__}") logger.error(f"Exception = {err}") logger.debug("progress.emit(999999)") progress.emit(999999) return False except Exception as err: if err.message == "NS_BINDING_ABORTED": logger.error(f"Ignoring {err} exception") cookiefile = f"browser_cookie_{coachid}.json" logger.info(f"Store cookie state in {cookiefile}") storage_state = context.storage_state() with open(cookiefile, "w") as write_file: json.dump(storage_state, write_file) logger.debug("progress.emit(1)") progress.emit(1) else: logger.error( f"Exception during select text 'My Locker' section: {err.__class__}" ) logger.error(f"Exception = {err}") logger.debug("progress.emit(999999)") progress.emit(999999) return False else: logger.info( "Found 'My Locker' so authentication was successful.") time.sleep(10) cookiefile = os.path.join(myconfig.cookies_directory_path, f"browser_cookie_{coachid}.json") logger.info(f"Store cookie state in {cookiefile}") storage_state = context.storage_state() with open(cookiefile, "w") as write_file: json.dump(storage_state, write_file) logger.debug("progress.emit(1)") progress.emit(1) return True finally: context.close() browser.close() logger.info("Playwright browser closed.") else: context = browser.new_context(storage_state=storage_state) page = context.new_page() page.set_extra_http_headers(custom_headers) page.set_viewport_size({"width": 1900, "height": 1200}) #page.pause() logger.info( f"Going to page https://{myconfig.main_url}/locker/ ...") page.goto(f"https://{myconfig.main_url}/locker/") if page.query_selector("text=Login"): page.click("text=Login") try: logger.info(f"Waiting for My Locker...") page.wait_for_selector("h1:has-text(\"My Locker\")", timeout=timer_mylocker) except TimeoutError as err: logger.error(f"Timeout waiting for My Locker: {err.__class__}") logger.error(f"Exception = {err}") return False except Exception as err: if err.message == "NS_BINDING_ABORTED": logger.error(f"Ignoring {err} exception") else: logger.error( f"Exception during select text 'My Locker' section: {err.__class__}" ) logger.error(f"Exception = {err}") context.close() browser.close() logger.info("Playwright browser closed.") return False else: logger.info( "Found 'My Locker' so authentication was successful.") openDB(d) dbname = d.databaseName() logger.info( f"Before scraping recruits: Database name = {d.databaseName()} Connection name = {d.connectionName()} Tables = {d.tables()}" ) logger.info(f"DB is valid: {d.isValid()}") logger.info(f"DB is open: {d.isOpen()}") logger.info(f"DB is open error: {d.isOpenError()}") teamID = re.search(r"(\d{5})", dbname) cookie_teamID = { 'name': 'wispersisted', 'value': f'gd_teamid={teamID.group()}', 'domain': f'{myconfig.main_url}', 'path': '/', 'expires': -1, 'httpOnly': False, 'sameSite': 'None', 'secure': False } logger.debug(f"teamid = {teamID}") logger.debug(f"teamid.group() = {teamID.group()}") try: logger.info(f"Setting cookie for teamid = {teamID}") logger.debug(f"cookie_teamID = {cookie_teamID}") context.add_cookies([cookie_teamID]) except Exception as e: logger.error( f"Exception adding wispersisted cookie: {e.__class__}") else: cookie_debug = context.cookies(f"https://{myconfig.main_url}") logger.debug( f"Browser cookies for {myconfig.main_url} = {cookie_debug}" ) # See closed issue #55. Would be good to add additional checks to confirm the cookie changes were actually made. # For instance, iterate through the list of cookies in cookie_debug searching for the expected value of wispersisted. # If wispersisted doesn't exist or doesn't match then throw exception. #page.pause() if "grab_watched_recruits" in f: logger.info("In grab_watched_recruits section of WISBrowser") # Thread progress emit signal indicating WIS Auth is complete progress.emit(1) logger.info("Loading Recruiting Summary page ...") try: with page.expect_navigation(): page.goto(f"https://{myconfig.main_url}/gd/recruiting") # assert page.url == "https://www.whatifsports.com/gd/recruiting" progress.emit(2) page.wait_for_load_state(state='networkidle') # Click h3:has-text("Recruiting Summary") page.click("h3:has-text(\"Recruiting Summary\")") except Exception as e: logger.error( f"Exception loading Recruiting Summary Page: {e.__class__}" ) recruit_summary = "" else: # Grab page contents to parse and return recruit_summary = BeautifulSoup(page.content(), "lxml") logger.info("Grabbed Recruiting Summary page content") finally: context.close() browser.close() logger.info("Playwright browser closed.") if d.isOpen(): d.close() return recruit_summary else: team_division = myconfig.wis_gd_df.division[int( teamID.group())] # Dictionary lookup based on division and whether or not 'grab higher division' was enabled division_to_page_mapping = { 'D-IA': { False: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=1&positions=1,2,3,4,5,6,7,8,9,10', True: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=1,2&positions=1,2,3,4,5,6,7,8,9,10' }, 'D-IAA': { False: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=2&positions=1,2,3,4,5,6,7,8,9,10', True: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=1,2&positions=1,2,3,4,5,6,7,8,9,10' }, 'D-II': { False: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=3&positions=1,2,3,4,5,6,7,8,9,10', True: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=2,3&positions=1,2,3,4,5,6,7,8,9,10' }, 'D-III': { False: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=4&positions=1,2,3,4,5,6,7,8,9,10', True: f'https://{myconfig.main_url}/gd/recruiting/Advanced.aspx?divisions=3,4&positions=1,2,3,4,5,6,7,8,9,10' } } division_emit_progress_mapping = { 'D-IA': { False: 1, True: 2 }, 'D-IAA': { False: 3, True: 4 }, 'D-II': { False: 5, True: 6 }, 'D-III': { False: 7, True: 8 } } settings = QSettings() dbname_short = dbname.split('\\')[-1] if "scrape_recruit_IDs" in f: recruit_search_url = division_to_page_mapping[ team_division][myconfig.higher_division_recruits] saved_url = settings.setValue(f"{dbname_short}/searchpage", recruit_search_url) if "update_considering" in f: recruit_search_url = settings.value( f"{dbname_short}/searchpage", division_to_page_mapping[team_division][True]) logger.debug( f"Going to advanced recruit search URL = {recruit_search_url}" ) page.goto(recruit_search_url) #page.goto("https://www.whatifsports.com/gd/recruiting/advanced.aspx") # assert page.url == "https://www.whatifsports.com/gd/recruiting/Search.aspx" div = page.query_selector('id=advanced-recruiting-table') logger.debug( "Waiting for advanced recruit search table to stabilize..." ) div.wait_for_element_state(state="stable") logger.debug("Advanced recruit search table has stabilized.") logger.debug( "Grabbing page content from Advanced Search page...") contents = page.content() #page.pause() context.close() browser.close() logger.info("Playwright browser closed.") if "scrape_recruit_IDs" in f: # Thread progress emit signal indicating WIS Auth is complete progress.emit( 2, division_emit_progress_mapping[team_division][ myconfig.higher_division_recruits]) logger.info("Begin scraping recruit IDs...") createRecruitQuery = get_create_recruit_query_object(d) recruitIDs = get_recruitIDs(contents, d, createRecruitQuery, progress) createRecruitQuery.finish() d.close() logger.info(f"Length of recruitIDs = {len(recruitIDs)}") logger.info( "Recruit initialization in database is complete.") logger.debug( "Saving hash of role ratings used for calculation to registry..." ) role_ratings_hash = settings.setValue( f"{dbname_short}/role_ratings_hash", myconfig.role_ratings_df_hash) return True if "update_considering" in f: # Thread progress emit signal indicating WIS Auth is complete progress.emit(2, 1) # This section covers unsigned recruits logger.info( "Begin update considering for unsigned recruit IDs...") createUpdateQuery = get_update_considering_query_object(d) recruitIDs = update_considering(contents, d, createUpdateQuery, progress) createUpdateQuery.finish() d.close() logger.info( f"Number of recruits updated from page = {len(recruitIDs)}" ) logger.info( "Recruit update considering in database is complete.") return True # This 'return False' should only be reached if there was some coding error above return False
def test_load_config(self): """Tests whether main.py=>load_config loads test config files""" config, config_path = main_py.load_config() assert (config_path in self.test_config_paths) assert ("input" in config) assert ("output" in config)