def searchDelay(search_work, table_list, item, query_url, db_path, req_date, day_limit, day_overdraft, limit_police, hour, res_list, rank, label_info, table_results, rate_url, lcdNumber_hour_limit, lcdNumber_day_limit, progressBar, value, log, btn_search, lineEdit_single_query, btn_input, waiting_queries): print("searchDelay: ", end='') if (len(waiting_queries) > 0): label_info.setText( 'Запросы отложены!\nПродолжим через ' + str(60 - int(datetime.now().strftime('%M')))) btn_search.setDisabled(True) lineEdit_single_query.setDisabled(True) btn_input.setDisabled(True) print("sleep 10 sec.") # time.sleep(10) time.sleep((60 - int(datetime.now().strftime('%M'))) * 60.0) for item in waiting_queries: print("searchDelay: for: item is ", end='') print(item) search(search_work, table_list, item, query_url, db_path, req_date, day_limit, day_overdraft, limit_police, hour, res_list, rank, label_info, table_results, rate_url, lcdNumber_hour_limit, lcdNumber_day_limit, progressBar, value, log, btn_search, lineEdit_single_query, btn_input, waiting_queries) waiting_queries = [] return waiting_queries ''''
def test_requests(self, mock_get): mock_response = MagicMock() mock_response.json.return_value = { "AbstractText" : "TestText", "AbstractURL" : "https://testurl.com" } mock_get.return_value = mock_response self.input.group.return_value = 'test' search.search(self.phenny, self.input) self.phenny.say.assert_called_with('TestText - https://testurl.com')
def test_requests(self, mock_get): mock_response = MagicMock() mock_response.json.return_value = { "AbstractText": "TestText", "AbstractURL": "https://testurl.com" } mock_get.return_value = mock_response self.input.group.return_value = 'test' search.search(self.phenny, self.input) self.phenny.say.assert_called_with('TestText - https://testurl.com')
def main(dork, scan): if not dork == '': print("%sUsing the dork: %s%s" % (style.WHITE, style.BLUE, dork)) url = 'https://search.yahoo.com/search?p=%s' % (dork) search.search(url, scan, log, args.verbose, args.tor) if sub == True: print('%sSearching in sub pages...' % (style.WHITE)) time.sleep(1) for page in search.sub_pages: search.search(page, scan, log, args.verbose, args.tor)
def search_handle(widget): import modules.search as search pattern = search_input.value matches = search.search(search_url, file_extension, search_local, cache_location, pattern) for match in matches: search_result.data.insert(0, match)
def search_route(): selections = Selections(DB_NAME) initial_selections = selections.get_selections() institutions = request.args.get("institutions", default=None, type=str) faculties = request.args.get("faculties", default=None, type=str) lecturers = request.args.get("lecturers", default=None, type=str) courses = request.args.get("courses", default=None, type=str) years = request.args.get("years", default=None, type=str) types = request.args.get("types", default=None, type=str) freetext = request.args.get("freetextsearch", default=None, type=str) if ("username" in session and institutions is None and faculties is None and lecturers is None and courses is None and years is None and freetext is None): search_res = default_search_for_user(DB_NAME, session["username"]) return render_template("search.html", **initial_selections, search_res=search_res) search_res = search(DB_NAME, institutions, faculties, lecturers, courses, years, types, freetext) return render_template("search.html", **initial_selections, freetext=freetext, search_res=search_res)
def results(): matches = search.search(request.args.get('keyword'), request.args.get('filter'), projectList) if not matches: return render_template('matchError.html') results=len(matches) if results > 100: return render_template('projects.html', matches=matches[1:100], results=results) else: return render_template('projects.html', matches=matches, results=results)
def post(self, request): search_text = request.POST['search-text'] searched_list = search(search_text) login_form = LoginForm() signup_form = SignUpForm() context = {'login_form': login_form, 'signup_form': signup_form} if len(searched_list) == 0: messages.error(request, 'No product found.') else: context['products'] = searched_list return render(request, 'shop.html', context)
def getSuccessPercentageStats(projectList): successfulStateList = (search.search("successful", "state", projectList)) mostSuccessfulStats = { "Total Success": 0, #Amperstand (&) is weird on graph so we manually spell it out } for project in successfulStateList: mostSuccessfulStats["Total Success"] += 1 totalProj = 0 for project in projectList: totalProj += 1 categoryStats = getCategoryStats(projectList) mostSuccessfulStats["Total Success"] = round( (mostSuccessfulStats["Total Success"] / totalProj * 100), 2) return mostSuccessfulStats
def find_metabolite(search_term, search_attr, discover: bool = True, cache: bool = True) -> Sequence[MetaboliteView]: # todo: @later: also fetch all metabolites result = search('metabolite', search_term, attrs=[search_attr]) metas = [] if not result: if discover: # search term is not cached. initiate discovery df: MetaboliteView = resolve_single_id(search_attr, search_term) metas.append(df) else: # turn search result to metaview meta_ids = [sr.entity_id for sr in result] metas = get_metabolites(meta_ids) return metas
def search_metabolite(search_term, search_attr: list, discover: bool = True, cache: bool = True, verbose=True): if not isinstance(search_attr, (list, tuple, set)) and search_attr is not None: search_attr = [search_attr] result = search('metabolite', search_term, attrs=search_attr) if not result and discover: # search term is not cached. initiate discovery mv, resp = resolve_single_id(search_attr, search_term, verbose=verbose, cache=cache) if cache: # persist MetaView to Meta object meta = view_to_db(mv) # todo: @temporal id mv.meta_id = "".join(random.choices(string.ascii_uppercase, k=6)) meta.meta_id = mv.meta_id _meta_repo.create(meta) # cache approriate cache entry as well sr = cache_search_metabolite(mv, search_attr) #else: # fake search, as it's either stored in DB or should be faked anyway sr = SearchItem(search_term=search_term, search_attr=search_attr) sr.endpoint, sr.entity_id = mv.search_endpoint result = [sr] return result
def last_updates(): search_res = search(DB_NAME, 'all', 'all', 'all', 'all', 'all', 'all', 'all') now = datetime.now().date() print(now) as_list = [] for line in search_res: as_list.append(list(line)) for line in as_list: for index_in_line in range(len(line)): if index_in_line == 1 or index_in_line == 2: date = datetime.strptime(line[index_in_line], '%Y-%m-%d %H:%M:%S.%f').date() age = now - date if age.days <= limit_by_days: show = 1 else: show = 0 tmp = (parse_file_time(line[index_in_line]), show) line[index_in_line] = tmp return render_template('last_updates.html', search_res=as_list)
def test_search(self): self.input.group.return_value = 'Apertium' search.search(self.phenny, self.input) self.assertTrue(self.phenny.say.called)
def getMostSuccessfulCategoryStats(projectList): successfulStateList = (search.search("successful", "state", projectList)) mostSuccessfulStats = { "Comics": 0, "Crafts": 0, "Dance": 0, "Fashion": 0, #Amperstand (&) is weird on graph so we manually spell it out "Film and Video": 0, "Food": 0, "Journalism": 0, "Games": 0, "Music": 0, "Photography": 0, "Publishing": 0, "Technology": 0, "Theater": 0 } for project in successfulStateList: if project['main_category'] == "Comics": mostSuccessfulStats["Comics"] += 1 elif project['main_category'] == "Crafts": mostSuccessfulStats["Crafts"] += 1 elif project['main_category'] == "Dance": mostSuccessfulStats["Dance"] += 1 elif project['main_category'] == "Fashion": mostSuccessfulStats["Fashion"] += 1 elif project['main_category'] == "Film & Video": mostSuccessfulStats["Film and Video"] += 1 elif project['main_category'] == "Food": mostSuccessfulStats["Food"] += 1 elif project['main_category'] == "Journalism": mostSuccessfulStats["Journalism"] += 1 elif project['main_category'] == "Games": mostSuccessfulStats["Games"] += 1 elif project['main_category'] == "Music": mostSuccessfulStats["Music"] += 1 elif project['main_category'] == "Photography": mostSuccessfulStats["Photography"] += 1 elif project['main_category'] == "Publishing": mostSuccessfulStats["Publishing"] += 1 elif project['main_category'] == "Technology": mostSuccessfulStats["Technology"] += 1 elif project['main_category'] == "Theater": mostSuccessfulStats["Theater"] += 1 #We have the stats on successful projects from each category, now we need the total number of projects from each category to divide and get percentages categoryStats = getCategoryStats(projectList) mostSuccessfulStats["Comics"] = round( (mostSuccessfulStats["Comics"] / categoryStats["Comics"] * 100), 2) mostSuccessfulStats["Crafts"] = round( (mostSuccessfulStats["Crafts"] / categoryStats["Crafts"] * 100), 2) mostSuccessfulStats["Dance"] = round( (mostSuccessfulStats["Dance"] / categoryStats["Dance"] * 100), 2) mostSuccessfulStats["Fashion"] = round( (mostSuccessfulStats["Fashion"] / categoryStats["Fashion"] * 100), 2) mostSuccessfulStats["Film and Video"] = round( (mostSuccessfulStats["Film and Video"] / categoryStats["Film and Video"] * 100), 2) mostSuccessfulStats["Food"] = round( (mostSuccessfulStats["Food"] / categoryStats["Food"] * 100), 2) mostSuccessfulStats["Journalism"] = round( (mostSuccessfulStats["Journalism"] / categoryStats["Journalism"] * 100), 2) mostSuccessfulStats["Games"] = round( (mostSuccessfulStats["Games"] / categoryStats["Games"] * 100), 2) mostSuccessfulStats["Music"] = round( (mostSuccessfulStats["Music"] / categoryStats["Music"] * 100), 2) mostSuccessfulStats["Photography"] = round( (mostSuccessfulStats["Photography"] / categoryStats["Photography"] * 100), 2) mostSuccessfulStats["Publishing"] = round( (mostSuccessfulStats["Publishing"] / categoryStats["Publishing"] * 100), 2) mostSuccessfulStats["Technology"] = round( (mostSuccessfulStats["Technology"] / categoryStats["Technology"] * 100), 2) mostSuccessfulStats["Theater"] = round( (mostSuccessfulStats["Theater"] / categoryStats["Theater"] * 100), 2) return mostSuccessfulStats
def getFailedTakeoffStats(projectList): zeroBackersList = (search.search("0", "backers", projectList)) failedTakeoffStats = { "Comics": 0, "Crafts": 0, "Dance": 0, "Fashion": 0, #Amperstand (&) is weird on graph so we manually spell it out "Film and Video": 0, "Food": 0, "Journalism": 0, "Games": 0, "Music": 0, "Photography": 0, "Publishing": 0, "Technology": 0, "Theater": 0 } for project in zeroBackersList: if project['main_category'] == "Comics": failedTakeoffStats["Comics"] += 1 elif project['main_category'] == "Crafts": failedTakeoffStats["Crafts"] += 1 elif project['main_category'] == "Dance": failedTakeoffStats["Dance"] += 1 elif project['main_category'] == "Fashion": failedTakeoffStats["Fashion"] += 1 elif project['main_category'] == "Film & Video": failedTakeoffStats["Film and Video"] += 1 elif project['main_category'] == "Food": failedTakeoffStats["Food"] += 1 elif project['main_category'] == "Journalism": failedTakeoffStats["Journalism"] += 1 elif project['main_category'] == "Games": failedTakeoffStats["Games"] += 1 elif project['main_category'] == "Music": failedTakeoffStats["Music"] += 1 elif project['main_category'] == "Photography": failedTakeoffStats["Photography"] += 1 elif project['main_category'] == "Publishing": failedTakeoffStats["Publishing"] += 1 elif project['main_category'] == "Technology": failedTakeoffStats["Technology"] += 1 elif project['main_category'] == "Theater": failedTakeoffStats["Theater"] += 1 return failedTakeoffStats
def test_can_search_with_empty_db(self): res = sm.search("database.db", "all", "all", "all", "all", "1984", "all", "2+2=5") assert res == []
file_extension = config['Remote.file_extension'] language_selected = config['Languages.selected'] except: print( 'Config not able to be imported. Run \"python3 yapi.py config\" to fix the error' ) #Main Program if len(sys.argv) == 1: result = interface.start() elif len(sys.argv) == 2: if sys.argv[1] == 'config': config_import.update_config() elif len(sys.argv) == 3: if sys.argv[1] == 'search': matches = search.search(search_url, file_extension, search_local, cache_location, sys.argv[2]) for match in matches: print(match) elif sys.argv[1] == 'download': file_name = sys.argv[2] + file_extension file_url = remote_location + os_platform + '/' + remote_branch + '/scripts/' + file_name os.chdir(cache_location) output = installer.get_file(file_url, file_name) elif sys.argv[1] == 'run': file_name = sys.argv[2] + file_extension os.chdir(cache_location) output = installer.run_script(file_name, cache_boolean) elif sys.argv[1] == 'install': output = installer.full_install(sys.argv[2])
if arg['--tf-idf']: modelScoring = scoring.TF_IDF() elif arg['--freq']: modelScoring = scoring.Frequency() else: modelScoring = scoring.BM25F() if spellCheck: fixed = correct(query) if fixed != query: if batch or confirm("Did you mean: `" + fixed + "`?"): query = fixed print("Searching for `" + query + "`") index = openIndex(indexDir) r = search(query, index, modelScoring, limit, wildcard) for result in r: with open(result["path"], "r") as f: body = re.sub(" +", " ", f.read().replace("\n", " ")) print("[%s] %s..." % (result["id"], " ".join(body.split(" ")[:10]))) index.close() print( "Found %d matching documents, displaying the first %d (use --limit to change this)" % (len(r), limit)) elif arg['index']: if arg['--delete']: