Example #1
0
    def _register_agent(self):
        register_name = self.app.config.get('TRCDASH_REGISTER_AS')
        if not register_name:
            register_name = socket.gethostname()

        url_args = {
            'name': register_name,
            'port': self.app.config.get('TRCDASH_PORT', self.DEFAULT_PORT),
        }
        register_url = '%s/register?%s' % (
            self.app.config['TRCDASH_REGISTER_TO'], urllib.urlencode(url_args))

        if 'TRCDASH_AUTH_USERNAME' in self.app.config and 'TRCDASH_AUTH_PASSWORD' in self.app.config:
            auth_handler = urllib3.HTTPBasicAuthHandler()
            auth_handler.add_password(
                realm='TRCDash login required',
                uri=register_url,
                user=self.app.config['TRCDASH_AUTH_USERNAME'],
                passwd=self.app.config['TRCDASH_AUTH_PASSWORD'])
            opener = urllib3.build_opener(auth_handler)
            urllib3.install_opener(opener)

        try:
            urllib3.urlopen(register_url)
        except urllib3.HTTPError as e:
            logger.error('Failed to register agent to "%s": %s', register_url,
                         e)
Example #2
0
 def init(self):
     self.AUTH_MGR = urllib3.HTTPPasswordMgrWithDefaultRealm()
     self.AUTH_MGR.add_password(None, "https://%s/" % (self.hostname),
                                r'%s\%s' % (self.domain, self.username),
                                self.password)
     self.AUTH = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(self.AUTH_MGR)
     self._handler = urllib3.HTTPHandler(debuglevel=self.debug)
     self._opener = urllib3.build_opener(self.AUTH)
     urllib3.install_opener(self._opener)
def register_openers():
    """Register the streaming http handlers in the global urllib3 default
    opener object.

    Returns the created OpenerDirector object."""
    handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
    if hasattr(httplib2, "HTTPS"):
        handlers.append(StreamingHTTPSHandler)

    opener = urllib3.build_opener(*handlers)

    urllib3.install_opener(opener)

    return opener
def register_openers():
    """Register the streaming http handlers in the global urllib3 default
    opener object.

    Returns the created OpenerDirector object."""
    handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
    if hasattr(httplib2, "HTTPS"):
        handlers.append(StreamingHTTPSHandler)

    opener = urllib3.build_opener(*handlers)

    urllib3.install_opener(opener)

    return opener
Example #5
0
def deploy():
    opts, args = parse_opts()
    if not inside_project():
        _log("Error: no Scrapy project found in this location")
        sys.exit(1)

    _delete_old_package()

    urllib3.install_opener(urllib3.build_opener(HTTPRedirectHandler))

    if opts.list_targets:
        for name, target in _get_targets().items():
            print ("%-20s %s" % (name, target['url']))
        return

    if opts.list_projects:
        target = _get_target(opts.list_projects)
        req = urllib3.Request(_url(target, 'listprojects.json'))
        _add_auth_header(req, target)
        f = urllib3.urlopen(req)
        projects = json.loads(f.read())['projects']
        print (os.linesep.join(projects))
        return

    tmpdir = None

    # build egg only
    if opts.build_egg:
        egg, tmpdir = _build_egg()
        _log("Writing egg to %s" % opts.build_egg)
        shutil.copyfile(egg, opts.build_egg)
    elif opts.deploy_all_targets:
        version = None
        for name, target in _get_targets().items():
            if version is None:
                version = _get_version(target, opts)
            _build_egg_and_deploy_target(target, version, opts)
    else: # buld egg and deploy
        target_name = _get_target_name(args)
        target = _get_target(target_name)
        version = _get_version(target, opts)
        exitcode, tmpdir = _build_egg_and_deploy_target(target, version, opts)

    if tmpdir:
        if opts.debug:
            _log("Output dir not removed: %s" % tmpdir)
        else:
            shutil.rmtree(tmpdir)
            _delete_old_package()
Example #6
0
 def linkWithPorxy(self, line):
     lineList = line.split('\t')
     protocol = lineList[2].lower()
     server = protocol + r'://' + lineList[0] + ':' + lineList[1]
     opener = urllib3.build_opener(urllib3.ProxyHandler({protocol:server}))
     urllib3.install_opener(opener)
     try:
         response = urllib3.urlopen(self.URL, timeout=self.timeout)
     except:
         print('%s connect failed' %server)
         return
     else:
         try:
             str = response.read()
         except:
             print('%s connect failed' %server)
             return
         if self.regex.search(str):
             print('%s connect success .........' %server)
             self.aliveList.append(line)
Example #7
0
import urllib3
import cookielib
from bs4 import BeautifulSoup
#设置代理IP
proxy_support = urllib3.ProxyHandler({'http':'120.197.234.164:80'})
#设置cookie
cookie_support = urllib3.HTTPCookieProcessor(cookielib.LWPCookieJar())
opener = urllib3.build_opener(proxy_support,cookie_support,urllib.HTTPHandler)
urllib3.install_opener(opener)
#开始的URL
#hosturl = "http://www.renren.com"
hosturl = "http://mail.163.com/"
#接受表单数据的URL
#posturl = "http://www.renren.com/ajaxLogin/login"
posturl = "https://mail.163.com/entry/cgi/ntesdoor?df=mail163_letter&from=web&funcid=loginone&iframe=1&language=-1&passtype=1&product=mail163&net=e&style=-1&race=118_35_39_bj&[email protected]"
#发送表单数据
postdata = urllib.urlencode(
  {
  "username":"******",
  "password":"******"
  }
)
#设置表头
headers = {
  #'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0/',
  #'Referer':'http://www.renren.com/'
  'User-Agent':"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0",
  'Referer':'http://mail.163.com/'
}
#生成HTTP请求
req =urllib.Request(
Example #8
0
def UrlRequest(str_symbol,start,end):
    #sym=SymbolCheck(symbol)
    mainurl="http://quotes.money.163.com/service/chddata.html?"
    #http://quotes.money.163.com/service/chddata.html?code=1000593&start=19960312&end=20150623&fields=TCLOSE;HIGH;LOW;TOPEN;LCLOSE;CHG;PCHG;TURNOVER;VOTURNOVER;VATURNOVER;TCAP;MCAP
    options="TCLOSE;HIGH;LOW;TOPEN;LCLOSE;CHG;PCHG;TURNOVER;VOTURNOVER;VATURNOVER;TCAP;MCAP"
    suburl="code=%s&start=%d&end=%d&fields=%s" % (str_symbol, start, end, options)

    #print mainurl+suburl

    #header=False
    header=True
    testpost=False
    if testpost == True:
        url=mainurl
        user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
        values = {'code' : str_symbol,
                  'start' : start,
                  'end' : end,
                  'fields' : options }
        headers = { 'User-Agent' : user_agent }

    else :
        url=mainurl+suburl
        i_headers = {"User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.1) Gecko/20090624 Firefox/3.5"}

    Debug=False
    if Debug==True:
        httpHandler = urllib3.HTTPHandler(debuglevel=1)
        httpsHandler = urllib3.HTTPSHandler(debuglevel=1)
        opener = urllib3.build_opener(httpHandler, httpsHandler)
        urllib3.install_opener(opener)

    #useipv4=True
    useipv4=False

    retry =0
    MaxRetry=3
    while True :
        try:

            headers = {
                    'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:52.0) Gecko/20100101 Firefox/52.0'
                    }

            requests.packages.urllib3.disable_warnings()
            # print(url)
            r= requests.get(url, headers=headers, verify=False)
            r.encoding='UTF-8'
            page = r.text
            return page

            tout=120
            if useipv4==True:
                urlopen_IPv4= urllib3.build_opener(HTTPHandler_IPv4).open
                response= urlopen_IPv4(url, timeout=tout)
                break

            if header==True:
                if testpost == True:
                    data = urllib3.urlencode(values)
                    print(data)
                    req = urllib3.Request(url, data, headers)
                else:
                    req = urllib3.Request(url, headers=i_headers)

                response = urllib3.urlopen(req, timeout=tout)
            else:
                response = urllib3.urlopen(url, timeout=tout)

            break
        except Exception as e:
            if hasattr(e,'code'):
                print('code:{0}'.format(e.code))
            # raise urllib3.HTTPError

        except Exception as e:
            if hasattr(e,'reason'):
                print('reason:{0}'.format(e.reason))
            if hasattr(e,'code'):
                print('code:{0}'.format(e.code))

            retry +=1
            if retry > MaxRetry:
                print('More than max %d' % MaxRetry)
                raise urllib3.URLError
            else:
                print('Try request again ...')
        else :
            pass
            #print "Down data ok"

    return response
Example #9
0
 def openDebug(self):
     httpHandler = urllib3.HTTPHandler(debuglevel=1)
     httpsHandler = urllib3.HTTPSHandler(debuglevel=1)
     opener = urllib3.build_opener(httpHandler, httpsHandler)
     urllib3.install_opener(opener)
Example #10
0
 def setProxy(self, proxy_info):
     proxy_handler = urllib3.ProxyHandler({"http": "http://%(host)s:%(port)d"%proxy_info})
     opener = urllib3.build_opener(proxy_handler)
     urllib3.install_opener(opener)