def normal_translate(proxy_url, source, from_lang='auto', to_lang='en', host=None, type_=None):
    data = {"q": source}
    base_url = "http://translate.google.com/translate_a/t?client=webapp&dt=bd&dt=ex&dt=ld&dt=md&dt=qca&dt=rw&dt=rm&dt=ss&dt=t&dt=at&ie=UTF-8&oe=UTF-8&otf=2&ssel=0&tsel=0&kc=1"
    url = u'{url}&sl={from_lang}&tl={to_lang}&hl={to_lang}&tk={tk}'.format(
        url=base_url,
        from_lang=from_lang,
        to_lang=to_lang,
        tk=_calculate_tk(source),
    )
    resp = None
    try:
        encoded_data = urlencode(data).encode('utf-8')
        req = request.Request(url=url, headers=headers, data=encoded_data)

        proxies = {'http': proxy_url}
        print("Using HTTP proxy %s" % proxies['http'])

        authinfo = urllib.request.HTTPBasicAuthHandler()
        proxy_support = urllib.request.ProxyHandler({"http": proxy_url})
        # build a new opener that adds authentication and caching FTP handlers
        opener = urllib.request.build_opener(proxy_support, authinfo,
                                             urllib.request.CacheFTPHandler)
        # install it
        urllib.request.install_opener(opener)
        # f = urllib.request.urlopen('http://www.google.com/')
        # resp = urllib.request.urlopen(req, data=data, headers=headers)
        resp = request.urlopen(req)
        content = resp.read()
        return content.decode('utf-8')
    except Exception as e:
        print(e)
        return None
Esempio n. 2
0
 def _get_json5(self, url, host=None, type_=None):
     req = request.Request(url=url, headers=self.headers)
     if host or type_:
         req.set_proxy(host=host, type=type_)
     r = request.urlopen(req)
     content = r.read()
     return content.decode('utf-8')
Esempio n. 3
0
 def _get_json5(self, url, host=None, type_=None, data=None):
     encoded_data = urlencode(data).encode('utf-8')
     req = request.Request(url=url, headers=self.headers, data=encoded_data)
     if host or type_:
         req.set_proxy(host=host, type=type_)
     resp = request.urlopen(req)
     content = resp.read()
     return content.decode('utf-8')
def normal_google(proxy_url):
    base_url = "http://google.com"
    resp = None
    try:
        req = request.Request(url=base_url, headers=headers)
        proxies = {'http': proxy_url}
        print("Using HTTP proxy %s" % proxies['http'])
        authinfo = urllib.request.HTTPBasicAuthHandler()
        proxy_support = urllib.request.ProxyHandler({"http": proxy_url})
        # build a new opener that adds authentication and caching FTP handlers
        opener = urllib.request.build_opener(proxy_support, authinfo,
                                             urllib.request.CacheFTPHandler)
        # install it
        urllib.request.install_opener(opener)
        # f = urllib.request.urlopen('http://www.google.com/')
        # resp = urllib.request.urlopen(req, data=data, headers=headers)
        resp = request.urlopen(req)
        content = resp.read()
        return content.decode('utf-8')
    except Exception as e:
        print(e)
        return None