def calculate(expr):
    """Search for a calculation expression in google.

    Attempts to search google calculator for the result of an expression.
    Returns a `CalculatorResult` if successful or `None` if it fails.

    Args:
        expr: Calculation expression (eg. "cos(25 pi) / 17.4" or
            "157.3kg in grams")

    Returns:
        CalculatorResult object."""

    url = _get_search_url(expr)
    html = get_html_from_dynamic_site(url)
    bs = BeautifulSoup(html)

    cr = CalculatorResult()
    cr.value = _get_to_value(bs)
    cr.from_value = _get_from_value(bs)
    cr.unit = _get_to_unit(bs)
    cr.from_unit = _get_from_unit(bs)
    cr.expr = _get_expr(bs)
    cr.result = _get_result(bs)
    cr.fullstring = _get_fullstring(bs)

    return cr
def calculate(expr):
    """Search for a calculation expression in google.

    Attempts to search google calculator for the result of an expression.
    Returns a `CalculatorResult` if successful or `None` if it fails.

    Args:
        expr: Calculation expression (eg. "cos(25 pi) / 17.4" or
            "157.3kg in grams")

    Returns:
        CalculatorResult object."""

    url = _get_search_url(expr)
    html = get_html_from_dynamic_site(url)
    bs = BeautifulSoup(html)

    cr = CalculatorResult()
    cr.value = _get_to_value(bs)
    cr.from_value = _get_from_value(bs)
    cr.unit = _get_to_unit(bs)
    cr.from_unit = _get_from_unit(bs)
    cr.expr = _get_expr(bs)
    cr.result = _get_result(bs)
    cr.fullstring = _get_fullstring(bs)

    return cr
Beispiel #3
0
async def search(query,
                 session,
                 pages=1,
                 lang='ru',
                 ncr=False,
                 void=True,
                 time_period=False,
                 sort_by_date=False,
                 first_page=0):
    """Returns a list of GoogleResult.

    Args:
        query: String to search in google.
        pages: Number of pages where results must be taken.
        area : Area of google homepages.
        first_page : First page.

    Returns:
        A GoogleResult object."""

    results = []
    for i in range(first_page, first_page + pages):
        url = _get_search_url(query,
                              i,
                              lang=lang,
                              ncr=ncr,
                              time_period=time_period,
                              sort_by_date=sort_by_date)
        headers = {"User-Agent": UserAgent().random}

        async with session.get(url, headers=headers) as response:
            html = await response.read()

        if html:
            soup = BeautifulSoup(html, "html.parser")
            divs = soup.findAll("div", attrs={"class": "g"})

            results_div = soup.find("div", attrs={"id": "resultStats"})
            number_of_results = _get_number_of_results(results_div)

            j = 0
            for li in divs:
                res = GoogleResult()

                res.page = i
                res.index = j

                res.name = _get_name(li)
                res.link = _get_link(li)
                res.description = _get_description(li)
                res.number_of_results = number_of_results

                if void is True:
                    if res.description is None:
                        continue
                results.append(res)
                j += 1
    return results
def get_aver_num(query, lang='en'):
    """Returns average number of search results.

    Args:
        query: String to search in google.

    Returns:
        int number"""
    av_num = 0
    url = _get_search_url(query, 0, lang=lang)
    html = get_html(url)

    if html:
        soup = BeautifulSoup(html, "html.parser")            
        av_num = soup.find("div", {"id": "resultStats"})
        av_num = _get_num(av_num)

    return av_num
def search(query, pages=1, lang='en', void=True, search_engine="", domain=""):
    """Returns a list of GoogleResult.

    Args:
        query: String to search in google.
        pages: Number of pages where results must be taken.

    Returns:
        A GoogleResult object."""

    results = []
    for i in range(pages):
        url = _get_search_url(query, i, lang=lang, search_engine = search_engine)
        html = get_html(url)
        if html:
            soup = BeautifulSoup(html, "html.parser")
            divs = soup.findAll("div", attrs={"class": "g"})
            j = 0
            for li in divs:
                res = GoogleResult()

                res.page = i
                res.index = j
                res.name = _get_name(li)
                res.link = _get_link(li)
                res.google_link = _get_google_link(li, search_engine)
                res.description = _get_description(li)
                res.thumb = _get_thumb()
                res.cached = _get_cached(li, search_engine)
                if str(domain) not in str(_get_link(li)):
                    continue
                else: 
                    res.rank = j+1
                    results.append(res)
                    return results
                    break
                if void is True:
                    if res.description is None:
                        continue
                results.append(res)
                j += 1

    return results
def search(query, pages=1, lang='en', void=True):
    """Returns a list of GoogleResult.

    Args:
        query: String to search in google.
        pages: Number of pages where results must be taken.

    Returns:
        A GoogleResult object."""

    results = []
    for i in range(pages):
        url = _get_search_url(query, i, lang=lang)
        html = get_html(url)

        if html:
            soup = BeautifulSoup(html, "html.parser")
            lis = soup.findAll("div", attrs={"class": "g"})
            
            j = 0
            for li in lis:
                res = GoogleResult()

                res.page = i
                res.index = j

                res.name = _get_name(li)
                res.link = _get_link(li)
                res.google_link = _get_google_link(li)
                res.description = _get_description(li)
                res.thumb = _get_thumb()
                res.cached = _get_cached(li)
                if void is True:
                    if res.description is None:
                        continue
                results.append(res)
                j += 1

    return results
def search(query, pages=1, lang='en'):
    """Returns a list of GoogleResult.

    Args:
        query: String to search in google.
        pages: Number of pages where results must be taken.

    Returns:
        A GoogleResult object."""

    results = []
    for i in range(pages):
        url = _get_search_url(query, i, lang=lang)
        html = get_html(url)

        if html:
            soup = BeautifulSoup(html, "html.parser")
            lis = soup.findAll("li", attrs={"class": "g"})
            
            j = 0
            for li in lis:
                res = GoogleResult()

                res.page = i
                res.index = j

                res.name = _get_name(li)
                res.link = _get_link(li)
                res.google_link = _get_google_link(li)
                res.description = _get_description(li)
                res.thumb = _get_thumb()
                res.cached = _get_cached(li)

                results.append(res)
                j += 1

    return results
Beispiel #8
0
def search(query,
           pages=1,
           lang='en',
           area='com',
           ncr=False,
           void=True,
           time_period=False,
           sort_by_date=False,
           first_page=0):
    """Returns a list of GoogleResult.
    Args:
        query: String to search in google.
        pages: Number of pages where results must be taken.
        area : Area of google homepages.
        first_page : First page.
    TODO: add support to get the google results.
    Returns:
        A GoogleResult object."""

    start = time.time()
    results = []
    for i in range(first_page, first_page + pages):
        url = _get_search_url(query,
                              i,
                              lang=lang,
                              area=area,
                              ncr=ncr,
                              time_period=time_period,
                              sort_by_date=sort_by_date)
        html = get_html(url)

        urls_time = time.time()
        print('got html in ' + str(urls_time - start) + 's')
        if html:
            soup = BeautifulSoup(html, "html.parser")
            divs = soup.findAll("div", attrs={"class": "g"})

            results_div = soup.find("div", attrs={"id": "resultStats"})
            number_of_results = _get_number_of_results(results_div)

            parse_time = time.time()
            print('parsed html in ' + str(parse_time - urls_time) + 's')

            j = 0
            for li in divs:
                res = GoogleResult()

                res.page = i
                res.index = j

                res.name = _get_name(li)
                res.link = _get_link(li)
                res.google_link = _get_google_link(li)
                res.description = _get_description(li)
                res.thumb = _get_thumb()
                res.cached = _get_cached(li)
                res.number_of_results = number_of_results

                if void is True:
                    if res.description is None:
                        continue
                results.append(res)
                j += 1
    return results