Esempio n. 1
0
def main(argv):
    parser = OptionParser(usage="%prog -c case")
    parser.add_option("-c", "--case", dest="case", help="The test case file path")

    (options, argv) = parser.parse_args(argv)

    if options.case is None:
        parser.error("The test case file path was not given")

    handler = HTTPHandler()
    opener = urllib2.build_opener(handler)
    urllib2.install_opener(opener)

    case = loadTestCase(options.case)

    if not case:
        print options.case + " was not found."
        sys.exit(1)

    target = 0
    checker = None
    log_path = "."
    log_name = "case"
    api_server = "127.0.0.1:8080"
    namespace = "default"
    case_name = "TESTCASE"

    if case.has_key("api_server"):
        api_server = str(case["api_server"])
Esempio n. 2
0
 def setUseKeepAlive(self):
     """Make urllib2 use keep-alive.
     @raise ImportError: when could not be imported urlgrabber.keepalive.HTTPHandler
     """
     try:
         from urlgrabber.keepalive import HTTPHandler
         keepalive_handler = HTTPHandler()
         opener = urllib2.build_opener(keepalive_handler)
         urllib2.install_opener(opener)
     except ImportError:
         warnings.warn("urlgrabber not installed in the system. The execution of this method has no effect.")
Esempio n. 3
0
    def installHandlers(self):
        global cj
        #Install support for KeepAlive and HTTP/1.1, via urlgrabber (older versions)
        keepalive_handler = HTTPHandler()
        opener = urllib2.build_opener(keepalive_handler)
        urllib2.install_opener(opener)

        #Install cookielib for easy cookie management
        self.cj = cookielib.LWPCookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
        urllib2.install_opener(opener)
# ----------------------------------------
# DESCRIPTION
# ===========
# urlgrabber version: 3.1.0
# 测试 python 的 http 长连接
# 通过测试发现,访问 python.org 20 次,使用长连接比不用长连接快一倍
# 参考:
# http://stackoverflow.com/questions/1037406/python-urllib2-with-keep-alive
# http://urlgrabber.baseurl.org/examples.html
# ----------------------------------------

# build-in, 3rd party and my modules
import urllib2
import time
from urlgrabber.keepalive import HTTPHandler
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)


def time_it(method):

    def timed(*args, **kw):
        start_time = time.time()
        result = method(*args, **kw)
        end_time = time.time()
        print '%r (%r, %r) %2.2f sec' % \
              (method.__name__, args, kw, end_time-start_time)
        return result

    return timed
Esempio n. 5
0
                conn.send(fw)
                print "SENT:", fw

                res = conn.recv(1024)
                print "RECEIVED:", res
                return hashvar


urlbase = "http://scalews.withings.net/cgi-bin/"
urlonce = urlbase + "once"
urlsess = urlbase + "session"
urlmaint = urlbase + "maint"
urlassoc = urlbase + "association"

opener = urllib2.build_opener(HTTPHandler())  # Keep-alive

##
# Step 1
# MITM -> Server: /cgi-bin/once
##
'''
params = {
    "action": "get"
}

data = craft_params(params)

res = do_request(opener, urlonce, data)
print res