def test_buildMap(self): f = Finder("./test.txt") f.lines = ["dog", "cat", "god"] f.buildMap() expected = {"dgo": ["dog", "god"], "act": ["cat"]} self.assertEqual(expected, f.map)
def test_buildMapEmpty(self): f = Finder("./test.txt") f.lines = [] f.buildMap() expected = {} self.assertEqual(expected, f.map)
class HTTPHandler(BaseHTTPRequestHandler): finder = Finder('recommends.csv') @staticmethod def parse_query_string_(qs): qs_dict = parse_qs(qs[2:]) for q in qs_dict: qs_dict[q] = qs_dict[q][0] sku = qs_dict.get('sku', '') try: power = float(qs_dict.get('power', 0)) except ValueError: power = 0 return sku, power def do_GET(self): sku, power = self.parse_query_string_(self.path) if not sku: self.send_response(403, message='Bad Request') self.end_headers() else: rec = self.finder.find_rec(sku, power) if rec == -1: self.send_response(404, message='Product not found') self.end_headers() else: resp_dict = {'rec': rec} self.send_response(200, message='OK') self.end_headers() self.wfile.write(json.dumps(resp_dict).encode())
def createTeam(firstIndex, secondIndex, isRed, first='OffensiveAgent', second='DefensiveDummyAgent'): """ This function should return a list of two agents that will form the team, initialized using firstIndex and secondIndex as their agent index numbers. isRed is True if the red team is being created, and will be False if the blue team is being created. As a potentially helpful development aid, this function can take additional string-valued keyword arguments ("first" and "second" are such arguments in the case of this function), which will come from the --redOpts and --blueOpts command-line arguments to capture.py. For the nightly contest, however, your team will be created without any extra arguments, so you should make sure that the default behavior is what you want for the nightly contest. """ locationFinder = Finder() locationFinder.__init__() # The following line is an example only; feel free to change it. return [ eval(first)(firstIndex, locationFinder), eval(second)(secondIndex, locationFinder) ]
def __init__(self, query): self.stop_words = map( lambda x: unicode(x), codecs.open('stop_words.txt', 'r', 'utf-8').read().split()) self.finder = Finder() self.query = query self.tokens = self.parse()
def scores_for(puzzle): puzzle = normalize_nums(puzzle) F = Finder(Grid(puzzle)) return sorted([ (x, F.graph.path_values(y), y) for x, y in [(Score.calculate_from(F.graph.path_values(path)), path) for path in F.find_all_paths()] ])
def on_post(self, req, resp): finder = Finder() raw = req.stream.read() data = json.loads(raw) resp.status = falcon.HTTP_200 resp.set_header('Access-Control-Allow-Origin', '*') resp.body = (finder.findMultipleDevices(data['Clients']))
def setUp(self): per_str_chars = 10 array_size = 10000 string_list = [ ''.join( random.choice(string.ascii_lowercase) for _ in range(per_str_chars)) for _ in range(array_size) ] self.first_array_str = string_list[0] self.finder = Finder(string_list) self.start_time = time.time()
def gather_links(page_url): html_string = '' try: response = urlopen(page_url) if 'text/html' in response.getheader('Content-Type'): html_bytes = response.read() html_string = html_bytes.decode('utf-8') finder = Finder(Spider.base_url, page_url) finder.feed(html_string) except Exception as e: print(str(e)) return set() return finder.page_links()
def crawl(url): if not url in spider.links_crawled: req = Request(url, headers={'User-Agent': 'Mozilla/5.0'}) with urlopen(req) as urldata: data = urldata.read() f = Finder() f.baseurl=spider.website_url data = data.decode('utf-8') data = html.unescape(data) f.feed(data) f.close() links=f.return_links() spider.links_website.remove(url) for val in links: spider.links_website.append(val) spider.links_crawled.append(url) spider.data_dict[url] = f.return_data()
class Test(unittest.TestCase): finder = Finder() html = "" path = "test.html" phrase = "Tekst" finder.setPhrase(phrase) finder.setOffset(30) params = ["p", "h3"] try: file = open(path, "r") html = file.read() file.close() except: print("Cannot open file") sys.exit() def test_find_phrase_with_params(self): soup = BeautifulSoup(self.html, "html.parser") self.finder.setParams(self.params) a = self.finder.find_phrase_with_params(soup, "test") b = [ 'Tekst in paragraph.Tekst in heading', 'Tekst in paragraph.Tekst in heading3' ] self.assertEqual(a, b) def test_find_phrase_without_params(self): soup = BeautifulSoup(self.html, "html.parser") a = self.finder.find_phrase_without_params(soup, "test") b = [ 'Tekst in heading1Tekst in paragrap', 'Tekst in heading1Tekst in paragraph.Tekst in headin', 'heading1Tekst in paragraph.Tekst in heading3' ] self.assertEqual(a, b)
async def message(msg: types.Message): find = msg.text avito = Finder('avito.ru', find, 'https://www.avito.ru/rossiya?q=', 'price-text-1HrJ_', 'span') await bot.send_message(msg.from_user.id, avito.find())
def on_get(self, req, resp): finder = Finder() resp.status = falcon.HTTP_200 resp.set_header('Access-Control-Allow-Origin', '*') resp.body = (finder.findAllDevices())
def index(): """ """ paths = None print('Request - 202 - Analysis beginning') if request.method == 'POST': data = list(request.form.keys())[0] data = ast.literal_eval(data) email: str = data['email'] accession: str = data['accession'] if 'accession' in data else '' protein: str = data['protein'] if 'protein' in data else '' sequence: str = data['sequence'] if 'sequence' in data else '' similarity: int = data['similarity'] if 'similarity' in data else '' basepairs: int = data['basepairs'] if 'basepairs' in data else '' # expect: int = data['evalue'] if 'evalue' in data else 10 print('Launching finder ... ') # send_email(email, data['uuid4']) finder = None if sequence: if similarity and basepairs: finder = Finder(email=email, seq=sequence, similarity=float(similarity) / 100, bp=int(basepairs)) elif similarity and not basepairs: finder = Finder(email=email, seq=sequence, similarity=float(similarity) / 100) elif not similarity and basepairs: finder = Finder(email=email, seq=sequence, bp=int(basepairs)) else: finder = Finder(email=email, seq=sequence) else: print(similarity, basepairs) if similarity and basepairs: finder = Finder(email=email, accession=accession, coreGene=protein, similarity=float(similarity) / 100, bp=int(basepairs)) #noqa elif similarity and not basepairs: finder = Finder(email=email, accession=accession, coreGene=protein, similarity=float(similarity) / 100) #noqa elif not similarity and basepairs: finder = Finder(email=email, accession=accession, coreGene=protein, bp=int(basepairs)) else: finder = Finder(email=email, accession=accession, coreGene=protein) # finder = Finder(email, accession=accession, coreGene=protein, similarity=float(similarity), bp=int(basepairs)) paths = finder.finder() print('Finder completed ... ') if paths: print('Insert into DB') insert_job_number(data['uuid4']) insert_job_data(data['uuid4'], paths) else: print('empty') return jsonify('AMI Motivated')
from conf import Cfg from finder import Finder from convertor import Convertor from storage import Storage import glob import os from parser import Parser import scholarly cfg = Cfg() db = Storage(cfg) db.load() finder = Finder(cfg) convertor = Convertor(cfg) def process_file(f): checksum = Storage.file_checksum(f) if checksum in db.data: print("file {} already processed ({})".format(f, checksum)) return db.store(checksum, {"pdf": f}) convertor.convert(f) for f in finder.find_all(): process_file(f) db.load()
def setUp(self): self.g = nx.DiGraph() self.f = Finder(self.g)
import os from finder import Finder from flask import Flask, render_template, url_for, request, redirect, session app = Flask(__name__) app.secret_key = "37825789567878456784867878680" f = Finder() @app.context_processor def override_url_for(): return dict(url_for=dated_url_for) def dated_url_for(endpoint, **values): if endpoint == 'static': filename = values.get('filename', None) if filename: file_path = os.path.join(app.root_path, endpoint, filename) values['q'] = int(os.stat(file_path).st_mtime) return url_for(endpoint, **values) @app.route("/", methods=["POST", "GET"]) def home(): if request.method == "POST": url = request.form["url"] term = request.form["term"] condition = request.form["condition"] maxresults = request.form["maxresults"]