Exemple #1
0
    def run(self):
        while True:
            IDlength = self.client.recv(1)
            if (IDlength):
                lenth1 = int(IDlength[0])
                ID = self.client.recv(lenth1)
                Taglength = self.client.recv(1)
                if (Taglength):
                    lenth2 = int(Taglength[0])
                    Tagdata = self.client.recv(lenth2)
                    Datalength = self.client.recv(1)
                    if (Datalength):
                        lenth3 = int(Datalength[0])
                        Data = self.client.recv(lenth3)
            if (IDlength):
                IDstring = b64encode(ID).decode()
                print(IDstring + '\n')
                Tagstring = str(Tagdata[0])
                print(Tagstring + '\n')
                Datastring = b64encode(Data).decode()
                print(Datastring + '\n')
                config = ConfigHelper('config.yaml')
                client = HttpClient(*config.api)
                public_id = IDstring
                tag = int(Tagstring)
                data = Datastring
                client.main_entry(public_id, tag, data, *config.reference_llh)

            else:
                break
        ip = self.client.getpeername()
        print('start data transform:\n', ip)
        n = bytes[0x34, 0x12]
        self.clent.send(n, ip)
        print("close:", self.client.getpeername())
 def __init__(self,
              cert_path,
              key_path,
              insecure=False,
              proxy_configuration=None):
     HttpClient.__init__(self, cert_path, key_path, insecure,
                         proxy_configuration)
 def test_mainfunctionality(self):
     param = PARAM
     test_db = NoSQL(param["database"]["engine"],
                     {"host": param["database"]["host"],
                      "port": param["database"]["port"],
                      "db": param["database"]["db"]["urlcache"]})
     url = "http://www.seas.upenn.edu/~yunkai/"
     # clear cache and other initilization
     del_job = Job(url, {})
     test_db.delete(del_job.identifier)
     # test none cached url
     job_parameters = {}
     test_job = Job(url, job_parameters)
     h = HttpClient()
     header, _ = h.request(url, method="HEAD")
     header["last-modified"] = "Tue, 19 Apr 2015 02:33:38 GMT"
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     self.assertEqual(result["url"], url)
     # test cached url
     cached, result = UrlChecker(test_job, param, header)
     self.assertTrue(cached)
     self.assertEqual(result["url"], url)
     header["last-modified"] = "Tue, 21 Apr 2015 02:33:38 GMT"
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     # test the different url with same identifier (This is rare)
     test_job.url = """https://alliance.seas.upenn.edu/~cis520/wiki/\
         index.php?n=Lectures.Lectures"""
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     self.assertEqual(result["url"], test_job.url)
Exemple #4
0
    def run(self):
        """ 通过提交委托下载文本 """
        _urls = []
        for v in filter(lambda x: not self.check[x['Subnum'] - 1], self.index):
            _urls.append(self.home + v['Href'])

        try:
            print('run:')
            HClient = HttpClient(_urls)
            for x in HClient.page_generater():
                if x == 0:
                    break
                elif x != None:
                    _text = get_text(x)
                    self.Text[_text['No']] = _text
                    self.check[int(_text['No']) - 1] = True
        except:
            print('download text fail.')
            raise
            return -1
        HClient.close()
        if all(self.check) == True:
            return 1
        else:
            return 0
 def test_mainfunctionality(self):
     param = PARAM
     test_db = NoSQL(
         param["database"]["engine"], {
             "host": param["database"]["host"],
             "port": param["database"]["port"],
             "db": param["database"]["db"]["urlcache"]
         })
     url = "http://www.seas.upenn.edu/~yunkai/"
     # clear cache and other initilization
     del_job = Job(url, {})
     test_db.delete(del_job.identifier)
     # test none cached url
     job_parameters = {}
     test_job = Job(url, job_parameters)
     h = HttpClient()
     header, _ = h.request(url, method="HEAD")
     header["last-modified"] = "Tue, 19 Apr 2015 02:33:38 GMT"
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     self.assertEqual(result["url"], url)
     # test cached url
     cached, result = UrlChecker(test_job, param, header)
     self.assertTrue(cached)
     self.assertEqual(result["url"], url)
     header["last-modified"] = "Tue, 21 Apr 2015 02:33:38 GMT"
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     # test the different url with same identifier (This is rare)
     test_job.url = """https://alliance.seas.upenn.edu/~cis520/wiki/\
         index.php?n=Lectures.Lectures"""
     cached, result = UrlChecker(test_job, param, header)
     self.assertFalse(cached)
     self.assertEqual(result["url"], test_job.url)
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    headset_scraper = HeadsetDispensaryScraper(
        HttpClient(), HeadsetCategoryExtractor(HttpClient()))
    result = run(dispFilter.get_state_names(), headset_scraper.produce,
                 headset_scraper.consume)

    return json.dumps(result)
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    leaflyScraper = LeaflyDispensaryScraper(
        dispFilter, HttpClient(), LeaflyDetailsExtractor(HttpClient()))
    result = run(dispFilter.get_state_names(), leaflyScraper.produce,
                 leaflyScraper.consume)

    return json.dumps(result)
Exemple #8
0
    def setUp(self):
        """Test fixtures."""

        self._http_client = HttpClient()

        self._httpcore_mock = mock()
        self._response_mock = mock()

        when(self._http_client).get_http_core().thenReturn(self._httpcore_mock)
Exemple #9
0
    def __init__(self, apikey, name, commands, workers=5):
        self.apikey = apikey
        self.name = name
        self.commands = commands
        self.lastUpdate = 0
        self.updateTimeout = 30

        self.workerPool = concurrent.futures.ThreadPoolExecutor(
            max_workers=workers)
        self.workerSemaphore = Semaphore(workers)

        self.httpClient = HttpClient()
        self.httpClient.userAgent = 'Telegram Bot (@%s)' % (name)
 def __init__(self, client, email, password):
     self._session = client
     self._client = HttpClient(client)
     self._email = email
     self._password = password
     self._finish = False
     self._commenttime = '1970'
     self._imgurl = asyncio.Queue()
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    potguide_scraper = PotGuideDispensaryScraper(HttpClient(),
                                                 PotGuideDispInfoExtractor(),
                                                 dispFilter)
    result = run(dispFilter.get_state_names(), potguide_scraper.produce,
                 potguide_scraper.consume)
    return json.dumps(result)
Exemple #12
0
def scrape(arr):
    dispFilter = get_dispensary_filter(arr)
    wmScraper = WeedMapsDispensaryScraper(dispFilter, HttpClient(),
                                          WeedMapsDespensaryExtractor())
    result = run(dispFilter.get_state_names(), wmScraper.produce,
                 wmScraper.consume)

    return json.dumps(result)
Exemple #13
0
class ProxyHunter:
	def __init__(self):
		self.srcrss='http://www.56ads.com/data/rss/2.xml'
		self.client=HttpClient()
		self.queue=Queue(100)
		self.proxies=[]
		for i in range(10): SpeedTester(self.queue, self.proxies).start()
	
	def hunt(self):
		while self.proxies: self.proxies.pop()
		proxies=set()
		xml=self.client.get(self.srcrss, 'gb2312')
		if xml is None: return
		rss=feedparser.parse(xml)
		for entry in rss.entries:
			if time.time()-time.mktime(entry.published_parsed)<3*24*3600: 
				html=self.client.get(entry.link, 'gb2312')
				if html is None: continue
				for rproxy in rex_proxy.findall(html): 
					proxy=rproxy.replace(' ', ':')
					if proxy in proxies: continue
					proxies.add(proxy)
					self.queue.put(proxy)
		print(len(proxies), "parsed.")
		self.queue.join()
		time.sleep(10)
	
	def save(self, fnm='proxies.inf'):
		print("Saving...")
		#with lock: if len(self.proxies)==0: return
		fw=open(fnm, 'w')
		fw.write('[proxies]\n')
		n=1
		with lock:
			for proxy, speed in sorted(self.proxies, key=lambda x: x[1]): 
				fw.write('proxy%d = %s\nspeed%d = %.2f\n' % (n, proxy, n, speed))
				n+=1
		fw.write('\n\n[default]\nproxies = %d\n' % (n-1))
		fw.close()
	
	def run(self):
		while True:
			self.hunt()
			self.save('proxies-%d.ini' % int(time.time()))
			print(time.ctime(), "Done. Sleeping...")
			time.sleep(4*3600)
Exemple #14
0
 def __init__(self):
     Thread.__init__(self)
     self._run = True
     self.success = 0    # 登录成功1or失败-1or正在登录中0
     self._http = HttpClient.getInstance()
     self._task = Tasks()
     self._event = Event()
     self._event.setDaemon(True)
Exemple #15
0
 def __init__(self):
     self._http = HttpClient.getInstance()
     self._url = "http://openapi.baidu.com/public/2.0/bmt/translate?client_id={0}&q={1}&from={2}&to={3}"
     self._client_id = "WuGUDac2hVyNWqFPYGWEPash"
     self._from = "auto"
     self._to = "auto"
     self._error_code = {"52001":"超时,请调整文本字符长度", "52002":"翻译系统错误", "52003":"未授权的用户"}
     self._error_msg = ""
    def __voteRecommentation(self, bookId, recVoteNum, userName, bookName, isDebug = 0):
        "This method will get cookies from twill and start to vote use this cookie."
#        <Cookie cmfuToken=500E45B7AD7EDC79BECB50CC9C95D500D5B9F9B68F4563B599ECDB
#B461B0B9DB6ECDFEE2F5D2E231146CCE03EE76F4B527C27D6EDCB9F71EABCBAC4F3F1AF7C50F1732
#6F409366C98A40144AECB8EBC2CC6B7A23B3A0180861A87609677CF3990FECA8E3C71927A8D1F558
#84E25D300BF85C1ECA3D64BAD926BC169D91D5C28E879F6E68CE4CD8AD2FA1D29429E505B96304F7
#B317425AFD807043C7F4425C16A0ACDE2E3280FE10B48BACFAC10650D382F10D058B1C453AAD8247
#B3152B98B6041BC1FBA8DD7D4DEBD987BA62C12E95ED061B609DCDE398E6FBDD37FC28BAB827700C
#341D63D6EC277FB0568FB3048C for .qidian.com/>
#        <Cookie cui=1959856209 for .qidian.com/>
#        <Cookie ll=2009-02-07 18:44:51 for .qidian.com/>
#        <Cookie rt=2008-06-08 21:02:09 for .qidian.com/>

        # get cookie jar
        postCookies = ''
        for cookies in twill.get_browser().cj:
            postCookies = postCookies + re.findall(r'<Cookie (.*?) for', str(cookies))[0] + '; '
        if postCookies:
            postCookies = postCookies[0:-2]

        # add cookie and start to vote.
        httpclient = HttpClient('www.qidian.com', False, isDebug)
        if postCookies:
            httpclient.cookie = postCookies

        # Add headers as below, or the vote will fail.
        headers = {'User-Agent' : 'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.0.6) Gecko/2009011913 Firefox/3.0.6',
    'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
    'Accept-Language' : 'zh-cn',
    'Accept-Encoding' : 'gzip,deflate',
    'Accept-Charset' : 'gb2312,utf-8;q=0.7,*;q=0.7',
    'Keep-Alive' : '300',
    'Connection' : 'keep-alive',
    'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8',
    'CMFUAJAX-Ver' : 'ver1.0',
    'Referer' : 'http://www.qidian.com/Book/{0}.aspx'.format(bookId),
    'Pragma' : 'no-cache',
    'Cache-Control' : 'no-cache'}

        # Begin to vote with the specified recVoteNum.
        for iRec in range(int(recVoteNum)):
            self.__emitStatus(u'正在使用用户 {0} 投票图书 {1} , 第 {2} 票。。。'.format(userName, bookName, iRec + 1))
            response, data = httpclient.post_request('/ajax.aspx?opName=RecomBook', params={'bookId' : bookId}, headers=headers, enable_redirect=False)
            self.__emitVoteOk(userName, bookName, '1', '0')

        httpclient.close()
Exemple #17
0
    def check_index(self):
        """ 目录检查,更新check, """
        _urls = (self.home + "/novelview/infotop/ncode/" + self.ncode,
                 self.home + '/' + self.ncode)
        try:
            print('check index:')
            HClient = HttpClient(_urls)
            _pages = HClient.pages
        except:
            print('Error!!get page fail.')
            print('please tey again.')
            raise
            return -1
        if None in _pages:
            print('get page fail.')
            print('please try again.')
            return -1

        _info = get_info(_pages[0])
        _index = get_index(_pages[1])

        mysql = My_sqlconnecter()
        _info_sql = mysql.read_info(self.ncode)
        _index_sql = mysql.read_index(self.ncode)
        mysql.disconnect()

        #检测数据库,是否为新书籍
        if (_info_sql == None) | (_index_sql == None):
            print("New Book:", _info[0]['ncode'], '\n\t', _info[0]['Title'])
            self.info = _info
            self.index = _index
            self.check = [False] * len(_index)
            return 2
        #检查数据更新时间,是否已完成下载
        if (_info_sql.UpDate == _info[0]['update']) & (len(_index)
                                                       == len(_index_sql)):
            if all(map(lambda x: x.Check, _index_sql)):
                print('Book:', self.ncode, ' has Downloaded complete.')
                self.info = _info_sql
                self.index = _index_sql
                self.check = list(map(lambda x: x.Check, _index_sql))
                return 0
        #逐个更新check
        self.check = []
        for i, v in enumerate(_index):
            if i < len(_index_sql):
                if len(v) == 5:
                    self.check.append(_index_sql[i].Check)
                elif (len(v) == 6) & (v['Spandate'] == _index_sql[i].Spandate):
                    self.check.append(_index_sql[i].Check)
                else:
                    self.check.append(False)
            else:
                self.check.append(False)

        self.info = _info
        self.index = _index
        return 1
Exemple #18
0
def update():
    # Start logging
    init_logging()
    # Create dbmanager and httpclient
    db_client = DBManager(db_file, db_logger_name)
    http_client = HttpClient(host, url_paths, http_logger_name)
    
    # Create db if not existing
    if db_client.is_empty():
        db_client.create_db()

    #print(db_client.fetch_products_from_db())
    
    for product_group, url_path in url_paths.items():
        html_file = http_client.fetch_html_file(host, url_path)
        json_file = http_client.parse_html_file(html_file)
        
        db_client.add_products(product_group, json_file)
Exemple #19
0
    def setUp(self):
        logging.config.fileConfig(os.path.join(os.getcwd(), "logging.conf"))
        self.file_path = os.path.abspath(os.path.dirname(__file__))
        my_headers = [('User-Agent', 'Mozilla/4.0'), ('X-From', 'UA')]
        my_user_pass = ('kiril', 'supersecret')

        self.client = HttpClient(
            connect_timeout=5,  # socket timeout on connect
            transfer_timeout=3,  # socket timeout on send/recv
            max_redirects=10,
            set_referer=True,
            keep_alive=3,  # Keep-alive socket up to N requests
            headers=my_headers,  # send custom headers
            http_version="1.1",  # use custom http/version
            auth=my_user_pass,  # http auth
            retry=5,
            retry_delay=5)  # wait betweet tries

        #
        # Enter the path to the cookies file in setting file
        #
        dictionary = self.client.configure_from_file(
            os.path.join(self.file_path, "http_client_setting.ini"))
        self.client.logger = logging.getLogger("httpclient_test")
        os.chdir("../")
        self.children = multiprocessing.Value('i', 0)

        self.p = multiprocessing.Process(target=self.process,
                                         args=(self.children, ),
                                         daemon=False)
        self.p.start()
        self.pid = self.p.pid
        print("slave >> " + str(self.pid))
        print("head  >> " + str(os.getpid()))
        print("child >> " + str(self.children.value))
        self.config = configparser.ConfigParser()
        self.config.read(
            os.path.join(self.file_path, "..", "setting", "setting.ini"))
        print(os.path.join(self.file_path, "..", "setting", "setting.ini"))
        self.ip = self.config['ip_port_setting']["ip"]
        self.port = self.config['ip_port_setting']["port"]
        self.domen = self.ip + ":" + self.port
        self.data_base = DataBese(
            os.path.join(self.file_path, "..", "setting", "setting.ini"))
 def test_normal_functionality(self):
     url = "http://www.seas.upenn.edu/~yunkai/"
     new_job = Job(url, {})
     h = HttpClient()
     resp, content = h.request(url)
     param = PARAM
     SaveAndStatistics(
         new_job,
         content,
         param,
         response_header=resp,
         url_cache={"last-modified": "Wed, 22 Apr 2015 20:13:17 GMT"})
     db = NoSQL(
         param["database"]["engine"], {
             "host": param["database"]["host"],
             "port": param["database"]["port"],
             "db": param["database"]["db"]["content"]
         })
     self.assertEqual(db.dictget(new_job.identifier, "content"), content)
     self.assertEqual(db.dictget(new_job.identifier, "url"), new_job.url)
Exemple #21
0
	def __init__(self, apikey, name, commands, workers = 5):
		self.apikey = apikey
		self.name = name
		self.commands = commands
		self.lastUpdate = 0
		self.updateTimeout = 30

		self.workerPool = concurrent.futures.ThreadPoolExecutor(max_workers = workers)
		self.workerSemaphore = Semaphore(workers)

		self.httpClient = HttpClient()
		self.httpClient.userAgent = 'Telegram Bot (@%s)' % (name)
Exemple #22
0
class Strato:
	def __init__(self):
		self.r = HttpClient(debug=True)

	def login(self, username, password):
		self.r.GET('https://www.strato.de/apps/CustomerService')
		f = self.r.Page.findForm(action='https://www.strato.de/apps/CustomerService')
		f.input['identifier'].value = username
		f.input['passwd'].value = password
		self.r.submit(f)
		if "versuchen Sie es erneut." in str(self.r.Page):
			return False
		self.sessionID = self.r.Cookie['SK_Session']
		return True

	def getDomains(self):
		self.r.GET('https://www.strato.de/apps/CustomerService?sessionID='+self.sessionID+'&cID=1&node=kds_DomainManagement&source=menu')
		domains = {}
		key = '<strong class="trimDomain">'
		subkey = '<span class="trimSubdomain">'
		p = self.r.Page.find(key)
		while p > -1:
			p += len(key)
			q = self.r.Page.find('</', p)
			domain = str(self.r.Page)[p:q]
			domains[domain] = []
			p_next = self.r.Page.find(key, q)
			if p_next < 0:
				p_next = len(str(self.r.Page))
			x = self.r.Page.find(subkey, p)
			while x > -1 and x < p_next:
				x += len(subkey)
				y = self.r.Page.find('</', x)
				subdomain = str(self.r.Page)[x:y]
				domains[domain].append(subdomain)
				x = self.r.Page.find(subkey, y)
			p = self.r.Page.find(key, q)
		return domains
Exemple #23
0
class SpeedTester(threading.Thread):
	def __init__(self, queue, proxies):
		threading.Thread.__init__(self)
		self.daemon=True
		self.client=HttpClient()
		self.queue=queue
		self.proxies=proxies

	def test_speed(self, proxy):
		self.client.__init__(proxy)
		html=self.client.get('http://www.baidu.com', 'gbk')
		self.client.__init__()
		if html is None: return -1
		else: return self.client.speed
	
	def run(self):
		while True:
			proxy=self.queue.get()
			speed=self.test_speed(proxy)
			if speed>0 and speed<3:
				print(proxy, speed, self.queue.qsize())
				with lock: self.proxies.append((proxy, speed))
			self.queue.task_done()
 def test_basicConnections(self):
     httpclient = HttpClient()
     # test for available hosts
     avalaible_hosts = ["google.com", "facebook.com", "youtube.com",
                        "baidu.com", "yahoo.com", "wikipedia.org",
                        "amazon.com", "twitter.com", "taobao.com", "qq.com",
                        "google.co.in", "live.com",
                        "linkedin.com", "sina.com.cn", "weibo.com",
                        "yahoo.co.jp", "tmall.com", "google.co.jp",
                        "ebay.com", "t.co"]
     for each_url in avalaible_hosts:
         with self.assertRaises(RelativeURIError):
             resp, content = httpclient.request(each_url, method="HEAD")
     for each_url in avalaible_hosts:
         resp, content = httpclient.request("".join(["http://", each_url]),
                                    method="HEAD")
         # This cannot be enhanced by the library
         # self.assertEqual(len(content), 0)
     # test for none available hosts
     not_available = ["goggleb.com", "githude.com", "modlabcc.net"]
     for each_url in not_available:
         with self.assertRaises(RelativeURIError):
             resp, content = httpclient.request(each_url, method="HEAD")
     for each_url in not_available:
         with self.assertRaises(ServerNotFoundError):
             resp, content = httpclient.request("".join(["http://", each_url]),
                                        method="HEAD")
     # test headers
     param = PARAM
     headers = {"User-Agent": "cis455/penn/crawler/0.1",
             "Connection": "keep-alive",
             "Accept-Language": ";".join([",".join(list(param["language"])),
                                         "q=0.9"]),
             "Accept": ";".join([",".join(list(param["filetypes"])),
                                 "q=0.9"]),
             }
     resp, content = httpclient.request("https://www.uber.com/")
     # test redirection url
     resp, content = httpclient.request("http://www.github.com")
     self.assertTrue("content-location" in resp)
     self.assertTrue("http" in resp["content-location"])
     self.assertNotEqual("http://www.github.com", resp["content-location"])
     resp, content = httpclient.request("http://www.seas.upenn.edu/~yunkai/")
     self.assertTrue("content-location" in resp)
     self.assertTrue("http" in resp["content-location"])
     self.assertEqual("http://www.seas.upenn.edu/~yunkai/", resp["content-location"])
Exemple #25
0
    def __init__(self):
        self.client = HttpClient()

        # cache
        self.friend_list = {}
        self._group_sig_list = {}
        self._self_info = {}

        self.client_id = 53999199
        self.ptwebqq = ''
        self.psessionid = ''
        self.appid = 0
        self.vfwebqq = ''
        self.qrcode_path = './v.jpg'
        self.username = ''
        self.account = 0
Exemple #26
0
    def __init__(self, dir):
        dir = dir if path.isabs(dir) else path.join(os.getcwd(), dir)
        self.path = PathInfo(dir)
        self.url = UrlInfo()
        self.str = StringInfo()
        self.config = parse_config(self.path.config_file)
        self.client = HttpClient(self)
        self.incomplete_cache = datetime.now().timestamp(
        ) < self.config.counter.end.timestamp()

        comment_dirname = '%s_%s_%s' % (
            self.config.counter.start.strftime('%y%m%d-%H%M%S'),
            self.config.counter.end.strftime('%y%m%d-%H%M%S'),
            self.config.counter.encoding,
        )
        self.path.comment_dir = path.join(self.path.temp_dir, self.str.version,
                                          comment_dirname)

        if not path.isdir(self.path.comment_dir):
            os.makedirs(self.path.comment_dir)
        if not path.isdir(self.path.log_dir):
            os.makedirs(self.path.log_dir)
        basicConfig(
            level=self.config.logging.level,
            format=self.config.logging.format,
            handlers=[
                FileHandler(
                    path.join(
                        self.path.log_dir,
                        datetime.now(
                            tz=TZ).strftime('nicocc-%y%m%d-%H%M%S.log'),
                    ),
                    encoding='utf-8',
                )
            ],
        )
Exemple #27
0
 def __init__(self, cert_path, key_path, insecure=False):
     HttpClient.__init__(self, cert_path, key_path, insecure)
 def __init__(self, cert_path, key_path, insecure=False):
     HttpClient.__init__(self, cert_path, key_path, insecure)
Exemple #29
0
 def __init__(self):
     self._http = HttpClient.getInstance()
     self._content = re.compile(b'<a href="(.+?)">(\d)(.+?)</a><div class="abs">(.+?)<br\s*/>.+?date">(\d+\-\d+\-\d+)</span>')
     self._next = re.compile(b'<a href="(.+?)">(.+?)</a>  </div>')
     self._url = "http://wap.baidu.com/s?word={0}"
Exemple #30
0
    def test_get_http_core(self):
        """Test of the 'get_http_core' method."""

        http_client = HttpClient()

        self.assertTrue(isinstance(http_client.get_http_core(), HttpCore))
Exemple #31
0
class HttpClientTest(TestCase):
    """Tests of the HttpClient class."""
    def setUp(self):
        """Test fixtures."""

        self._http_client = HttpClient()

        self._httpcore_mock = mock()
        self._response_mock = mock()

        when(self._http_client).get_http_core().thenReturn(self._httpcore_mock)

    def tearDown(self):
        """Unregisters all stubs."""
        unstub()

    def test_get(self):
        """Test of the 'get' method."""

        when(self._httpcore_mock).do_get_string(any()).thenReturn("response")
        when(self._httpcore_mock).do_get_string("error").thenRaise(Exception)

        when(self._http_client).get_html_page("response").thenReturn("page")

        self.assertEqual("page", self._http_client.get("url"))

        verify(self._httpcore_mock).do_get_string("url")
        verify(self._http_client).get_html_page("response")

        with self.assertRaises(HttpClientException):
            self._http_client.get("error")

    def test_post(self):
        """Test of the 'post' method."""

        when(self._httpcore_mock).do_post_string(any(), any()) \
            .thenReturn("response")
        when(self._httpcore_mock).do_post_string("error", any()) \
            .thenRaise(Exception)

        when(self._http_client).get_html_page("response").thenReturn("page")

        self.assertEqual("page", self._http_client.post("url", "data"))

        verify(self._httpcore_mock).do_post_string("url", "data")
        verify(self._http_client).get_html_page("response")

        with self.assertRaises(HttpClientException):
            self._http_client.post("error", "data")

    def test_get_http_core(self):
        """Test of the 'get_http_core' method."""

        http_client = HttpClient()

        self.assertTrue(isinstance(http_client.get_http_core(), HttpCore))

    def test_get_html_page(self):
        """Test of the 'get_html_page' method."""

        page = self._http_client.get_html_page("content")
        self.assertEqual(page.content_as_string, "content")
Exemple #32
0
pinBtn = machine.Pin(39, machine.Pin.IN)

pinTrig.value(1)

wlan = network.WLAN(network.STA_IF)
wlan.active(True)
if not wlan.isconnected():
    print('Connecting to network...')
    wlan.connect('***', '**')
    while not wlan.isconnected():
        time.sleep_ms(50)
        pass
print('Connected to network (', wlan.ifconfig(), ')')

print('Starting QR Code reading')
h = HttpClient()
while True:
    time.sleep_ms(200)

    if pinBtn.value() == 0:
        print('Button Pressed')
        pinTrig.value(0)
    else:
        pinTrig.value(1)

    if uart_qr.any() > 0:
        query_url = uart_qr.readline().decode('ascii').replace('\r',
                                                               '').replace(
                                                                   '\n', '')
        url = 'http://192.168.1.250:6005/office' + query_url
        resp = h.get(url)
 def __init__(self, client):
     self._client = HttpClient(client)
     self._albumq = asyncio.Queue()
     self._photoq = asyncio.Queue()
     self._count = 0
     self._flag = False
Exemple #34
0
 def __init__(self):
     self.operate = Operate()
     self._api = OpenApi()
     self._http = HttpClient.getInstance()
     self._pool = ThreadPool(5)    # 初始化5个线程
     print("Task Class 初始化完毕")
Exemple #35
0
	def __init__(self):
		self.r = HttpClient(debug=True)
Exemple #36
0
# Callback Methode zum Einlesen und Senden der Messwerte. Exceptions sollen abgefangen werden,
# da sonst der ganze Prozess beendet wird.
def read_temp_sensor():
    logger.debug("Send TEMP.")
    values = {'temperature': 23.1, 'timestamp': time.time()}
    try:
        client.send_data(values)
    except Exception as e:
        logger.error("Failed to send temperature.")


# Konfigurationseinstellungen aus config.json laden. Exceptions sollen abgefangen werden,
# da sonst der ganze Prozess beendet wird.
config = {}
with open('config.json') as json_file:
    config = json.load(json_file)

client = HttpClient(config['Secret'], logger)
cron = Cronjob(logger)

# Die einzelnen Methoden in der Cronjob Klasse registrieren.
cron.append_work(id="HEART", action=send_heartbeat, interval=10)
cron.append_work(id="TEMP", action=read_temp_sensor, interval=2)

try:
    cron.start()
except KeyboardInterrupt:
    print("Cancelled")
except Exception:
    logging.exception('Programmabbruch')
Exemple #37
0
#!/usr/bin/python

from httpclient import HttpClient

country = 'DE'
currency = 'EUR'
language = 'de'

http = HttpClient()
http.GET('http://pricelist.skype.com/destinations/1.0/'+country+'/'+currency+'/'+language+'/xml/')

print str(http.Page)
Exemple #38
0
class Test_serv(unittest.TestCase):
    def setUp(self):
        logging.config.fileConfig(os.path.join(os.getcwd(), "logging.conf"))
        self.file_path = os.path.abspath(os.path.dirname(__file__))
        my_headers = [('User-Agent', 'Mozilla/4.0'), ('X-From', 'UA')]
        my_user_pass = ('kiril', 'supersecret')

        self.client = HttpClient(
            connect_timeout=5,  # socket timeout on connect
            transfer_timeout=3,  # socket timeout on send/recv
            max_redirects=10,
            set_referer=True,
            keep_alive=3,  # Keep-alive socket up to N requests
            headers=my_headers,  # send custom headers
            http_version="1.1",  # use custom http/version
            auth=my_user_pass,  # http auth
            retry=5,
            retry_delay=5)  # wait betweet tries

        #
        # Enter the path to the cookies file in setting file
        #
        dictionary = self.client.configure_from_file(
            os.path.join(self.file_path, "http_client_setting.ini"))
        self.client.logger = logging.getLogger("httpclient_test")
        os.chdir("../")
        self.children = multiprocessing.Value('i', 0)

        self.p = multiprocessing.Process(target=self.process,
                                         args=(self.children, ),
                                         daemon=False)
        self.p.start()
        self.pid = self.p.pid
        print("slave >> " + str(self.pid))
        print("head  >> " + str(os.getpid()))
        print("child >> " + str(self.children.value))
        self.config = configparser.ConfigParser()
        self.config.read(
            os.path.join(self.file_path, "..", "setting", "setting.ini"))
        print(os.path.join(self.file_path, "..", "setting", "setting.ini"))
        self.ip = self.config['ip_port_setting']["ip"]
        self.port = self.config['ip_port_setting']["port"]
        self.domen = self.ip + ":" + self.port
        self.data_base = DataBese(
            os.path.join(self.file_path, "..", "setting", "setting.ini"))

    def process(self, child_pid):
        children = subprocess.Popen(["python3", "twitter.py"], shell=False)
        child_pid.value = children.pid
        print("OLOLO >> ", child_pid.value)

    def tearDown(self):
        sleep(1)
        print("slave >> " + str(self.pid))
        print("head  >> " + str(os.getpid()))
        print("child >> " + str(self.children.value))

        os.kill(self.children.value, signal.SIGINT)
        print("IS_ALIVE >> ", self.p.is_alive())
        self.p.terminate()

        try:
            os.kill(self.children.value, signal.SIGINT)
        except Exception as e:
            print("try to kill child", self.children.value, " but Exception")
            print(e.args)
        try:
            os.kill(self.pid, signal.SIGINT)
        except Exception as e:
            print("try to kill ", self.pid, " but Exception")
            print(e.args)
        print("Delete database ", self.config['database']["DB"])
        print(os.getcwd())
        os.remove(self.config['database']["DB"])

    def test_page(self):
        sleep(1)
        # Register new user
        # And check cookies
        res = self.client.post('http://' + self.domen + '/auth',
                               data={
                                   'register_email': '*****@*****.**',
                                   'password': '******'
                               })
        user_list = list(dict(self.data_base.read_auth_from_sql()).values())
        self.assertIn('*****@*****.**', user_list)
        self.assertIn("." + self.domen, res.cook_dick)

        # Push new posr to twitter
        #
        res = self.client.post('http://' + self.domen + '/',
                               data={
                                   'type_post': 'post_post',
                                   'text': 'Some new post 0'
                               })
        res = self.client.post('http://' + self.domen + '/',
                               data={
                                   'type_post': 'post_post',
                                   'text': 'Some new post 1'
                               })
        post_data_list = [
            el[1]
            for el in self.data_base.read_data_from_sql('*****@*****.**')
        ]
        self.assertIn('Some new post 1', post_data_list)
        self.assertIn('Some new post 0', post_data_list)

        # Try push POST with WRONG cookie
        # New post not in database
        res = self.client.post(
            'http://' + self.domen + '/',
            cookie={"twit": "ce538b70a7c30f98ab056cd2dc1151b9"},
            data={
                'type_post': 'post_post',
                'text': 'BLA BLA BLA'
            })
        post_data_list = [
            el[1]
            for el in self.data_base.read_data_from_sql('*****@*****.**')
        ]
        self.assertNotIn('BLA BLA BLA', post_data_list)

        # delete_post
        #
        res = self.client.post('http://' + self.domen + '/',
                               data={
                                   'type_post': 'delete_post',
                                   'elem': '1'
                               })
        post_data_list = [
            el[1]
            for el in self.data_base.read_data_from_sql('*****@*****.**')
        ]
        self.assertNotIn('Some new post 0', post_data_list)

        # Test filter data
        #
        #
        self.fiter_test_data("<script>alert('test');</script>")
        self.fiter_test_data("<h1>LALKA</h1")
        self.fiter_test_data("<script>alert('INVALID USER &');</script>")
        self.fiter_test_data("<h2>'</h2>")
        self.fiter_test_data('''<h3>'"&<></h3>''')

        # Exit
        #
        res = self.client.post('http://' + self.domen + '/',
                               data={'type_post': 'exit'})
        self.assertNotIn("." + self.domen, res.cook_dick)
        post_data_list = [
            el[1]
            for el in self.data_base.read_data_from_sql('*****@*****.**')
        ]
        self.assertNotIn('Some new post 0', post_data_list)

        # Enter with erong e-mail and pass
        # Message which say that e-mail or pass is Incorect
        #
        res = self.client.post('http://' + self.domen + '/auth',
                               data={
                                   'enter_email': '*****@*****.**',
                                   'password': '******'
                               })
        self.assertIn(b"There is incorrect e-mail or password. Try again",
                      res.body)

        # Try register user with same e-mail
        #
        #
        res = self.client.post('http://' + self.domen + '/auth',
                               data={
                                   'register_email': '*****@*****.**',
                                   'password': '******'
                               })
        user_list = list(dict(self.data_base.read_auth_from_sql()).values())
        self.assertIn(b'There is user with this e-mail. Try another', res.body)

    def fiter_test_data(self, test_str):
        print(test_str)
        res = self.client.post('http://' + self.domen + '/',
                               data={
                                   'type_post': 'post_post',
                                   'text': test_str
                               })

        db_elem = self.data_base.read_data_from_sql('*****@*****.**')
        post_data_list = [el[1] for el in db_elem]
        for elem in post_data_list:
            print(elem)
            for symbols in ["<", ">", "'", '"']:
                self.assertNotIn(symbols, elem)
Exemple #39
0
            self.__class = Movie_Video_Cook_Novel(data)
            # print(self.__class)
            self.__setResult(self.__class.getResult())
        if code == 309000:    # 酒店
            self.__class = Hotel(data)
            # print(self.__class)
            self.__setResult(self.__class.getResult())
        if code == 311000:    # 价格
            self.__class = Price(data)
            # print(self.__class)
            self.__setResult(self.__class.getResult())

if __name__ == "__main__":

    baseUrl = "http://www.tuling123.com/openapi/api?key=a24145e743c374d24d19d3e2c0d332a4&info={0}&userid=892768447"
    http = HttpClient.getInstance()
    api = OpenApi()

    # 1文字类
    url = baseUrl.format(quote("你漂亮么"))
    data = http.get(url)
    # print(data)
    api.parse(data)
    print(api.getResult())
    print("-------------------------------------")

    # 2链接类
    url = baseUrl.format(quote("打开百度官网"))
    data = http.get(url)
    # print(data)
    api.parse(data)
Exemple #40
0
class TeleBot:
	def __init__(self, apikey, name, commands, workers = 5):
		self.apikey = apikey
		self.name = name
		self.commands = commands
		self.lastUpdate = 0
		self.updateTimeout = 30

		self.workerPool = concurrent.futures.ThreadPoolExecutor(max_workers = workers)
		self.workerSemaphore = Semaphore(workers)

		self.httpClient = HttpClient()
		self.httpClient.userAgent = 'Telegram Bot (@%s)' % (name)

	def request(self, op, params, **kwargs):
		url = 'https://api.telegram.org/bot%s/%s' % (self.apikey, op)

		reply = self.httpClient.getJSON(url, params, **kwargs)
		if not reply['ok']:
			raise ValueError('Telegram replied with an error: %s' % repr(reply))

		return reply['result']

	def get_updates(self, start):
		params = {
			'offset': start,
			'timeout': self.updateTimeout
		}
		try:
			return self.request('getUpdates', params, timeout = self.updateTimeout)
		except socket.timeout:
			return []

	def send_message(self, chat, text, **kwargs):
		params = {
			'chat_id': chat,
			'text': text,
			'reply_to_message_id': kwargs.pop('reply_to', None),
			'parse_mode': kwargs.pop('markup', None)
		}
		return self.request('sendMessage', params)

	def handle_update(self, update):
		workerSemaphore = self.workerSemaphore

		try:
			request = Request(self, update)
		except Exception as e:
			print('Could not parse request: %s' % (repr(e)))
			return

		def async_command(request):
			print('Servicing %s' % (request.readable))

			try:
				request.execute()
			except Exception as e:
				request.reply('Got an exception attempting to execute request: %s' % (repr(e)))

		# Running it on callback ensures it will *always* free the semaphore no matter what the hell happens with the task
		def free_lock(future):
			workerSemaphore.release()

		workerSemaphore.acquire()
		future = self.workerPool.submit(async_command, request)
		future.add_done_callback(free_lock)

	def run_iteration(self):
		updates = []
		try:
			updates = self.get_updates(self.lastUpdate)
		except Exception as e:
			print('Got exception reading server status: ' + str(e))
			time.sleep(3)

		for update in updates:
			self.handle_update(update)
			self.lastUpdate = max(self.lastUpdate, update['update_id'] + 1)

	def run_main(self):
		try:
			while True:
				self.run_iteration()
		except KeyboardInterrupt:
			pass

		print('Shutting down...')
		self.workerPool.shutdown()
Exemple #41
0
	def __init__(self):
		self.srcrss='http://www.56ads.com/data/rss/2.xml'
		self.client=HttpClient()
		self.queue=Queue(100)
		self.proxies=[]
		for i in range(10): SpeedTester(self.queue, self.proxies).start()
Exemple #42
0
def start():
    use_user_data_flag = True
    try:
        user_data_url = 'http://169.254.169.254/latest/user-data'
        response = HttpClient(method='GET', url=user_data_url).process
        status_code = response.status_code
        if status_code != 200:
            use_user_data_flag = False
        else:
            conf = response.json()
    except:
        use_user_data_flag = False

    if not use_user_data_flag:
        file = glob.glob("".join(["/root/conf*.json"]))
        global i
        while True:
            try:
                conf = json.load(open(file[0], 'r'))
            except (IOError, ValueError):
                time.sleep(5)
                continue
            else:
                break
    if conf.has_key('udp_worm'):
        conf1 = conf['udp_worm']
        local_network = conf1['local_network']
        node_number = conf1['node_number']
        KVM_gateway_address = conf1['KVM_gateway_address']
        scan_ip_from = conf1['scan_ip_from']
        scan_ip_to = conf1['scan_ip_to']
        scan_interval = conf1['scan_interval']
        scan_port = conf1['scan_port']
        worm_behavior = conf1['worm_behavior']
        if (i == 0):
            os.system('route del -net default')
            os.system('route add -net default gw %s' % KVM_gateway_address)
        if (worm_behavior):
            cmd = 'nohup ./waf --run "scratch/udpcli-socket --scan_ip_from=%s --scan_ip_to=%s --scan_port=%s --scan_interval=%s --local_network=%s --node_number=%s --KVM_gateway_address=%s" > /root/infection_data.out &' % (
                scan_ip_from, scan_ip_to, scan_port, scan_interval,
                local_network, node_number, KVM_gateway_address)
        else:
            cmd = 'nohup ./waf --run "scratch/udpdst-socket --scan_ip_from=%s --scan_ip_to=%s --scan_port=%s --scan_interval=%s --local_network=%s --node_number=%s --KVM_gateway_address=%s" > /root/infection_data.out &' % (
                scan_ip_from, scan_ip_to, scan_port, scan_interval,
                local_network, node_number, KVM_gateway_address)

    elif conf.has_key('tcp_background'):
        conf1 = conf['tcp_background']
        local_network = conf1['local_network']
        node_number = conf1['node_number']
        KVM_gateway_address = conf1['KVM_gateway_address']
        service_net_from = conf1['service_net_from']
        service_net_to = conf1['service_net_to']
        service_port = conf1['service_port']
        probability_client = conf1['probability_client']
        cmd = 'nohup ./waf --run "scratch/tcp --service_net_from=%s --service_net_to=%s --service_port=%s --local_network=%s --node_number=%s --KVM_gateway_address=%s --probability_client=%s" > /root/tcp.out &' % (
            service_net_from, service_net_to, service_port, local_network,
            node_number, KVM_gateway_address, probability_client)
    else:
        print('Please set right mode!')
        exit(1)
    os.chdir('/root/ns-allinone-3.21/ns-3.21/')
    os.system(cmd)
Exemple #43
0
	def __init__(self, queue, proxies):
		threading.Thread.__init__(self)
		self.daemon=True
		self.client=HttpClient()
		self.queue=queue
		self.proxies=proxies
Exemple #44
0
# -*- coding: utf-8 -*-
from httpclient import HttpClient

client = HttpClient()

resp = client.get('http://127.0.0.1/?c=4', query={'a': 1, 'b': 2})
print(resp.status_code)
print(resp.text)

resp = client.get('http://127.0.0.1/?c=4', query={'a': 1, 'b': 2})
print(resp.status_code)
print(resp.text)
Exemple #45
0
class TeleBot:
    def __init__(self, apikey, name, commands, workers=5):
        self.apikey = apikey
        self.name = name
        self.commands = commands
        self.lastUpdate = 0
        self.updateTimeout = 30

        self.workerPool = concurrent.futures.ThreadPoolExecutor(
            max_workers=workers)
        self.workerSemaphore = Semaphore(workers)

        self.httpClient = HttpClient()
        self.httpClient.userAgent = 'Telegram Bot (@%s)' % (name)

    def request(self, op, params, **kwargs):
        url = 'https://api.telegram.org/bot%s/%s' % (self.apikey, op)

        reply = self.httpClient.getJSON(url, params, **kwargs)
        if not reply['ok']:
            raise ValueError('Telegram replied with an error: %s' %
                             repr(reply))

        return reply['result']

    def get_updates(self, start):
        params = {'offset': start, 'timeout': self.updateTimeout}
        try:
            return self.request('getUpdates',
                                params,
                                timeout=self.updateTimeout)
        except requests.exceptions.Timeout:
            return []

    def send_message(self, chat, text, **kwargs):
        params = {
            'chat_id': chat,
            'text': text,
            'reply_to_message_id': kwargs.pop('reply_to', None),
            'parse_mode': kwargs.pop('markup', None)
        }
        return self.request('sendMessage', params)

    def handle_update(self, update):
        workerSemaphore = self.workerSemaphore

        try:
            request = Request(self, update)
        except Exception as e:
            print('Could not parse request: %s' % (repr(e)))
            return

        def async_command(request):
            print('Servicing %s' % (request.readable))

            try:
                request.execute()
            except Exception as e:
                request.reply(
                    'Got an exception attempting to execute request: %s' %
                    (repr(e)))

        # Running it on callback ensures it will *always* free the semaphore no matter what the hell happens with the task
        def free_lock(future):
            workerSemaphore.release()

        workerSemaphore.acquire()
        future = self.workerPool.submit(async_command, request)
        future.add_done_callback(free_lock)

    def run_iteration(self):
        updates = []
        try:
            updates = self.get_updates(self.lastUpdate)
        except Exception as e:
            print('Got exception reading server status: ' + str(e))
            time.sleep(3)

        for update in updates:
            self.handle_update(update)
            self.lastUpdate = max(self.lastUpdate, update['update_id'] + 1)

    def run_main(self):
        try:
            while True:
                self.run_iteration()
        except KeyboardInterrupt:
            pass

        print('Shutting down...')
        self.workerPool.shutdown()
Exemple #46
0
 def __init__(self, url):
     self.__httpClient = HttpClient(url)
    def __init__(self, dispesary_filter, http_client):
        self._http_client = http_client
        self._weedmaps_disp_extractor = WeedMapsDespensaryExtractor(dispesary_filter, WeedMapsDetailsExtractor(HttpClient()))

        self._url = "https://api-g.weedmaps.com/wm/v2/location?include%5B%5D=regions.listings&region_slug={0}&page_size=150&page={1}"
Exemple #48
0
class PtracApi:
    def __init__(self, url):
        self.__httpClient = HttpClient(url)

    # Authenticate and store session token
    def authenticate(self, user, password):
        args = {'user': user, 'password': password}
        session = self.__httpClient.post('/session/createSession', args)
        size = sys.getsizeof(session)
        string = session.decode('UTF-8', 'ignore')
        if session is not None and len(string) > 0:
            resp = json.loads(session)
            token = resp["token"]
            self.__httpClient.set_token(token)
            return resp
        else:
            return None

    # Returns the user groups
    def get_user_groups(self):
        groups = self.__httpClient.post('/user/getUserGroups', {})
        if groups is not None:
            return json.loads(groups)
        else:
            return None

    # Returns the members of a group
    def get_user_group_user_memb(self, group_oid):
        users = self.__httpClient.post('/user/getUserGroupUserMemb', group_oid)
        if users is not None:
            return json.loads(users)
        else:
            return None

    # Lookup a user by OID.
    def get_user(self, user_oid):
        user = self.__httpClient.post('/user/getUser', user_oid)
        if user is not None:
            return json.loads(user)
        else:
            return None

    # Fetch time records for a group in a date range
    def get_time_records_groups(self, date_from, date_to, user_group):
        args = {"from": date_from, "to": date_to, "oids": [user_group]}
        time_records = self.__httpClient.post('/timerec/getTimeRecordsGroups',
                                              args)
        if time_records is not None:
            return json.loads(time_records)
        else:
            return None

    # Lookup a project by the project OID
    def get_project(self, project_oid):
        project = self.__httpClient.post('/project/getProject', project_oid)
        if project is not None:
            return json.loads(project)
        else:
            return None

    # Returns the ptrac version information
    def get_version(self):
        version = self.__httpClient.get('/version', {})
        if len(version) > 0:
            return json.loads(version)
        else:
            return None

    # Exports the project definitions and return the result as a binary file
    def export_projects(self):
        args = {
            "exportCalendars": False,
            "exportGroups": False,
            "exportProjects": True,
            "exportUsers": False,
            "socketId": None
        }
        data = self.__httpClient.post('/expimp/export', args)
        return data

    # Returns the events in the action log.
    def get_action_log(self):
        args = {"from": None, "to": None}
        data = self.__httpClient.post('/log/getEvents', args)
        if data is not None:
            return json.loads(data)
        else:
            return None
Exemple #49
0
class QQBot(object):
    def __init__(self):
        self.client = HttpClient()

        # cache
        self.friend_list = {}
        self._group_sig_list = {}
        self._self_info = {}

        self.client_id = 53999199
        self.ptwebqq = ''
        self.psessionid = ''
        self.appid = 0
        self.vfwebqq = ''
        self.qrcode_path = './v.jpg'
        self.username = ''
        self.account = 0
    
    def _get_qr_login_status(
            self, qr_validation_url, appid, star_time,
            mibao_css, js_ver, sign, init_url
    ):
        redirect_url = None
        login_result = self.client.get(
            qr_validation_url.format(
                appid,
                date_to_millis(datetime.datetime.utcnow()) - star_time,
                mibao_css,
                js_ver,
                sign
            ),
            init_url
        )
        ret_code = int(find_first_result(login_result, r"\d+?", None))
        redirect_info = re.findall(r"(http.*?)\'", login_result)
        if redirect_info:
            logger.debug("redirect_info match is: %s" % redirect_info)
            redirect_url = redirect_info[0]
        return ret_code, redirect_url
    
    def check_msg(self):

        # Pooling the message
        response = self.client.post(
            'http://d1.web2.qq.com/channel/poll2',
            {
                'r': json.dumps(
                    {
                        "ptwebqq": self.ptwebqq,
                        "clientid": self.client_id,
                        "psessionid": self.psessionid,
                        "key": ""
                    }
                )
            },
            SMART_QQ_REFER
        )
        logger.debug("Pooling returns response:\n %s" % response)
        if response == "":
            return
        try:
            ret = json.loads(response)
        except ValueError:
            logger.warning("RUNTIMELOG decode poll response error.")
            logger.debug("RESPONSE {}".format(response))
            return

        ret_code = ret['retcode']

        if ret_code in (103, ):
            logger.warning(
                "Pooling received retcode: " + str(ret_code) + ": Check error. 请前往http://w.qq.com/ 手动登陆SmartQQ一次."
            )
        elif ret_code in (121,):
            logger.warning("Pooling error with retcode %s" % ret_code)
        elif ret_code == 0:
            if 'result' not in ret or len(ret['result']) == 0:
                logger.info("Pooling ends, no new message received.")
            else:
                return ret['result']
        elif ret_code == 100006:
            logger.error("Pooling request error, response is: %s" % ret)
        elif ret_code == 116:
            self.ptwebqq = ret['p']
            logger.debug("ptwebqq updated in this pooling")
        else:
            logger.warning("Pooling returns unknown retcode %s" % ret_code)
        return None
    
    def get_self_info2(self):
        """
        获取自己的信息
        get_self_info2
        {"retcode":0,"result":{"birthday":{"month":1,"year":1989,"day":30},"face":555,"phone":"","occupation":"","allow":1,"college":"","uin":2609717081,"blood":0,"constel":1,"lnick":"","vfwebqq":"68b5ff5e862ac589de4fc69ee58f3a5a9709180367cba3122a7d5194cfd43781ada3ac814868b474","homepage":"","vip_info":0,"city":"青岛","country":"中国","personal":"","shengxiao":5,"nick":"要有光","email":"","province":"山东","account":2609717081,"gender":"male","mobile":""}}
        :return:dict
        """
        if not self._self_info:
            url = "http://s.web2.qq.com/api/get_self_info2"
            response = self.client.get(url)
            rsp_json = json.loads(response)
            if rsp_json["retcode"] != 0:
                return {}
            self._self_info = rsp_json["result"]
        return self._self_info
    
    def get_online_buddies2(self):
        """
        获取在线好友列表
        get_online_buddies2
        :return:list
        """
        try:
            logger.info("RUNTIMELOG Requesting the online buddies.")
            online_buddies = json.loads(self.client.get(
                    'http://d1.web2.qq.com/channel/get_online_buddies2?vfwebqq={0}&clientid={1}&psessionid={2}&t={3}'
                        .format(
                            self.vfwebqq,
                            self.client_id,
                            self.psessionid,
                            self.client.get_timestamp()),
            ))
            logger.debug("RESPONSE get_online_buddies2 html:    " + str(online_buddies))
            if online_buddies['retcode'] != 0:
                raise TypeError('get_online_buddies2 result error')
            online_buddies = online_buddies['result']
            return online_buddies

        except:
            logger.warning("RUNTIMELOG get_online_buddies2 fail")
            return None
        
    def _login_by_cookie(self):
        logger.info("Try cookie login...")

        self.client.load_cookie()
        self.ptwebqq = self.client.get_cookie('ptwebqq')

        response = self.client.post(
            'http://d1.web2.qq.com/channel/login2',
            {
                'r': '{{"ptwebqq":"{0}","clientid":{1},"psessionid":"{2}","status":"online"}}'.format(
                    self.ptwebqq,
                    self.client_id,
                    self.psessionid
                )
            },
            SMART_QQ_REFER
        )
        try:
            ret = json.loads(response)
        except ValueError:
            logger.warning("Cookies login fail, response decode error.")
            return
        if ret['retcode'] != 0:
            raise CookieLoginFailed("Login step 1 failed with response:\n %s " % ret)

        response2 = self.client.get(
                "http://s.web2.qq.com/api/getvfwebqq?ptwebqq={0}&clientid={1}&psessionid={2}&t={3}".format(
                        self.ptwebqq,
                        self.client_id,
                        self.psessionid,
                        self.client.get_timestamp()
                ))
        ret2 = json.loads(response2)

        if ret2['retcode'] != 0:
            raise CookieLoginFailed(
                "Login step 2 failed with response:\n %s " % ret
            )

        self.psessionid = ret['result']['psessionid']
        self.account = ret['result']['uin']
        self.vfwebqq = ret2['result']['vfwebqq']

        logger.info("Login by cookie succeed. account: %s" % self.account)
        return True
    
    def _login_by_qrcode(self, no_gui):
            logger.info("RUNTIMELOG Trying to login by qrcode.")
            logger.info("RUNTIMELOG Requesting the qrcode login pages...")
            qr_validation_url = 'https://ssl.ptlogin2.qq.com/ptqrlogin?' \
                                'webqq_type=10&remember_uin=1&login2qq=1&aid={0}' \
                                '&u1=http%3A%2F%2Fw.qq.com%2Fproxy.html%3Flogin2qq%3D1%26webqq_type%3D10' \
                                '&ptredirect=0&ptlang=2052&daid=164&from_ui=1&pttype=1&dumy=' \
                                '&fp=loginerroralert&action=0-0-{1}&mibao_css={2}' \
                                '&t=undefined&g=1&js_type=0&js_ver={3}&login_sig={4}'
    
            init_url = "https://ui.ptlogin2.qq.com/cgi-bin/login?" \
                       "daid=164&target=self&style=16&mibao_css=m_webqq" \
                       "&appid=501004106&enable_qlogin=0&no_verifyimg=1" \
                       "&s_url=http%3A%2F%2Fw.qq.com%2Fproxy.html" \
                       "&f_url=loginerroralert&strong_login=1" \
                       "&login_state=10&t=20131024001"
            html = self.client.get(
                init_url,
            )
            appid = find_first_result(
                html,
                r'<input type="hidden" name="aid" value="(\d+)" />', 'Get AppId Error',
                True
            )
            sign = find_first_result(
                html,
                r'g_login_sig=encodeURIComponent\("(.*?)"\)', 'Get Login Sign Error',
            )
            js_ver = find_first_result(
                html,
                r'g_pt_version=encodeURIComponent\("(\d+)"\)',
                'Get g_pt_version Error',
                True,
            )
            mibao_css = find_first_result(
                html,
                r'g_mibao_css=encodeURIComponent\("(.+?)"\)',
                'Get g_mibao_css Error',
                True
            )
    
            star_time = date_to_millis(datetime.datetime.utcnow())
    
            error_times = 0
            ret_code = None
            login_result = None
            redirect_url = None
    
            while True:
                error_times += 1
                logger.info("Downloading QRCode file...")
                self.client.download(
                    'https://ssl.ptlogin2.qq.com/ptqrshow?appid={0}&e=0&l=L&s=8&d=72&v=4'.format(appid),
                    self.qrcode_path
                )
                if not no_gui:
                    thread = Thread(target=show_qr, args=(self.qrcode_path, ))
                    thread.setDaemon(True)
                    thread.start()
    
                while True:
                    ret_code, redirect_url = self._get_qr_login_status(
                        qr_validation_url, appid, star_time, mibao_css, js_ver,
                        sign, init_url
                    )
    
                    if ret_code in (
                            QR_CODE_STATUS['succeed'], QR_CODE_STATUS["qr_code_expired"]
                    ):
                        break
                    time.sleep(1)
    
                if ret_code == QR_CODE_STATUS['succeed'] or error_times > 10:
                    break
    
            if os.path.exists(self.qrcode_path):
                os.remove(self.qrcode_path)
    
            login_failed_tips = "QRCode validation response is:\n%s" % login_result
    
            if ret_code is not None and (ret_code != 0):
                raise QRLoginFailed(login_failed_tips)
            elif redirect_url is None:
                raise QRLoginFailed(login_failed_tips)
            else:
                html = self.client.get(redirect_url)
                logger.debug("QR Login redirect_url response: %s" % html)
                return True
    
    def login(self, no_gui=False):
        try:
            self._login_by_cookie()
        except CookieLoginFailed:
            logger.info("Cookie login failed.")
            while True:
                if self._login_by_qrcode(no_gui):
                    if self._login_by_cookie():
                        break
                time.sleep(4)
        user_info = self.get_self_info2()
        self.get_online_buddies2()
        try:
            self.username = user_info['nick']
            logger.info(
                "User information got: user name is [%s]" % self.username
            )
        except KeyError:
            logger.exception(
                "User info access failed, check your login and response:\n%s"
                % user_info
            )
            exit(1)
        logger.info("RUNTIMELOG QQ:{0} login successfully, Username:{1}".format(self.account, self.username))
        self._self_info = user_info
    
    def getTulin(self, info):
        logger.info("Try Tulin...")

        self.client.load_cookie()
        response = self.client.post(
            'http://www.tuling123.com/openapi/api',
            {
                'key': '46dec4507ea59630889dce242767ca9b',
                'info':info
            }
        )
        try:
            ret = json.loads(response)
        except ValueError:
            logger.warning("Tulin connect fail, response decode error.")
            return
        logger.info("Tulin connect succeed. account: %s" % ret)
        code=ret['code']
        result=''
        if code==100000:
            result= ret['text']
            if len(result)>250:
                result=result[0:100]+'...'+result[-150:len(result)]
        elif code==200000:
            result= ret['text']+'\n'+ret['url']
        elif code==302000:
            for item in ret['list']:
                result_temp=result+item['article']+'\n'+item['detailurl']+'\n'
                if len(result)>250:
                    break
                result=result+item['article']+'\n'+item['detailurl']+'\n'
        elif code==305000:
            for item in ret['list']:
                result=result+item['trainnum']+':'+item['start']+'-'+item['terminal']+' '+item['starttime']+'-'+item['endtime']
        elif code==308000:
            for item in ret['list']:
                result=result+item['name']+':'+item['info']+'\n'+item['detailurl']
                break;
        logger.info("Tulin message: %s" % result)
        return result

    # 发送群消息
    def send_qun_msg(self, reply_content, guin, msg_id, fail_times=0):
        fix_content = str(reply_content.replace("\\", "\\\\\\\\").replace("\n", "\\\\n").replace("\t", "\\\\t"))
        rsp = ""
        try:
            logger.info("Starting send group message: %s" % reply_content)
            req_url = "http://d1.web2.qq.com/channel/send_qun_msg2"
            data = (
                ('r',
                 '{{"group_uin":{0}, "face":564,"content":"[\\"{4}\\",[\\"font\\",{{\\"name\\":\\"Arial\\",\\"size\\":\\"10\\",\\"style\\":[0,0,0],\\"color\\":\\"000000\\"}}]]","clientid":{1},"msg_id":{2},"psessionid":"{3}"}}'.format(
                         guin, self.client_id, msg_id, self.psessionid, fix_content)),
                ('clientid', self.client_id),
                ('psessionid', self.psessionid)
            )
            rsp = self.client.post(req_url, data, SMART_QQ_REFER)
            rsp_json = json.loads(rsp)
            logger.debug("RESPONSE send_qun_msg: Reply response: " + str(rsp))
            if 'retcode' in rsp_json:
                raise ValueError("RUNTIMELOG reply group chat error" + str(rsp_json['retcode']))
            logger.info("RUNTIMELOG send_qun_msg: Reply '{}' successfully.".format(reply_content))
            return rsp_json
        except:
            logger.warning("RUNTIMELOG send_qun_msg fail")
            if fail_times < 5:
                logger.warning("RUNTIMELOG send_qun_msg: Response Error.Wait for 2s and Retrying." + str(fail_times))
                logger.debug("RESPONSE send_qun_msg rsp:" + str(rsp))
                time.sleep(2)
                self.send_qun_msg(reply_content,guin, msg_id, fail_times + 1)
            else:
                logger.warning("RUNTIMELOG send_qun_msg: Response Error over 5 times.Exit.reply content:" + str(reply_content))
                return False
    
    # 发送私密消息
    def send_buddy_msg(self, reply_content, tuin, msg_id, fail_times=0):
        fix_content = str(reply_content.replace("\\", "\\\\\\\\").replace("\n", "\\\\n").replace("\t", "\\\\t"))
        rsp = ""
        try:
            req_url = "http://d1.web2.qq.com/channel/send_buddy_msg2"
            data = (
                ('r',
                 '{{"to":{0}, "face":594, "content":"[\\"{4}\\", [\\"font\\", {{\\"name\\":\\"Arial\\", \\"size\\":\\"10\\", \\"style\\":[0, 0, 0], \\"color\\":\\"000000\\"}}]]", "clientid":{1}, "msg_id":{2}, "psessionid":"{3}"}}'.format(
                         tuin, self.client_id, msg_id, self.psessionid, fix_content)),
                ('clientid', self.client_id),
                ('psessionid', self.psessionid)
            )
            rsp = self.client.post(req_url, data, SMART_QQ_REFER)
            rsp_json = json.loads(rsp)
            if 'errCode' in rsp_json and rsp_json['errCode'] != 0:
                raise ValueError("reply pmchat error" + str(rsp_json['retcode']))
            logger.info("RUNTIMELOG Reply successfully.")
            logger.debug("RESPONSE Reply response: " + str(rsp))
            return rsp_json
        except:
            if fail_times < 5:
                logger.warning("RUNTIMELOG Response Error.Wait for 2s and Retrying." + str(fail_times))
                logger.debug("RESPONSE " + str(rsp))
                time.sleep(2)
                self.send_buddy_msg(tuin, reply_content, msg_id, fail_times + 1)
            else:
                logger.warning("RUNTIMELOG Response Error over 5 times.Exit.reply content:" + str(reply_content))
                return False
    
    def reply_msg(self, msg, reply_content=None, return_function=False):
        """
        :type msg: QMessage类, 例如 GroupMsg, PrivateMsg, SessMsg
        :type reply_content: string, 回复的内容.
        :return: 服务器的响应内容. 如果 return_function 为 True, 则返回的是一个仅有 reply_content 参数的便捷回复函数.
        """
        import functools
        assert isinstance(msg, QMessage)
        if isinstance(msg, GroupMsg):
            if return_function:
                return functools.partial(self.send_qun_msg, guin=msg.group_code, msg_id=msg.msg_id+1)
            return self.send_qun_msg(guin=msg.group_code, reply_content=reply_content, msg_id=msg.msg_id+1)
        if isinstance(msg, PrivateMsg):
            if return_function:
                return functools.partial(self.send_buddy_msg, tuin=msg.from_uin, msg_id=msg.msg_id+1)
            return self.send_buddy_msg(tuin=msg.from_uin, reply_content=reply_content, msg_id=msg.msg_id+1)
        if isinstance(msg, SessMsg):
            pass
 def worker(url):
     httpclient = HttpClient()
     httpclient.request("".join(["http://", url]), method="HEAD")