def download_torrent(self, url): content = HttpRequest.requestMultTimes(url) if(content==""): return False f = GetIdList() f.feed(content) boundary = "----WebKitFormBoundarydMcOM7W0mij63Igr" parts=[] for k,v in f.formdata.items(): parts.append('--' + boundary) parts.append('Content-Disposition: form-data; name="'+k+'"') parts.append('') #if(k=="reff"): # parts.append(base64.b64encode(str(time.time()))) #else: # parts.append(v) parts.append(v) parts.append('--' + boundary + '--') parts.append('\r\n') postdata = '\r\n'.join(parts) r = urlparse(url) downloadUrl="http://"+r.netloc+"/"+f.action content = HttpRequest.requestMultTimes(downloadUrl, data=postdata, headers={ "Content-Type":"multipart/form-data; boundary="+boundary}) if(content==""): return False filename=f.formdata['ref']+".torrent" f=open(os.path.join(self.dir, filename), "wb") f.write(content) f.close() return True
def initial(self): self.htmlContent = HttpRequest.requestMultTimes(self.url) #print(self.htmlContent) if(not self.htmlContent): self.validpage=False return self.parserhtml.feed(self.htmlContent) self.torrent_url=self.parserhtml.url if self.torrent_url=="": self.validpage=False return self.pic_url=self.parserhtml.picture self.fileDir=self.parserhtml.title.split(' ')[0].replace('/','-').replace(' ', '-') if(self.fileDir==""): self.fileDir=self.torrent_url.split('=')[-1] #self.fileDir=re.match("[A-Z0-9]*", self.parserhtml.title) print(self.fileDir) #print(self.pic_url) dirlist=os.listdir('.') if( not self.fileDir.encode("GBK") in dirlist ): try: os.mkdir(self.fileDir) except: self.fileDir=self.torrent_url.split("hash=")[-1] if( not self.fileDir.encode("GBK") in dirlist ): os.mkdir(self.fileDir)
def initial(self): self.htmlContent = HttpRequest.requestMultTimes(self.url) #print(self.htmlContent) if (not self.htmlContent): self.validpage = False return self.parserhtml.feed(self.htmlContent) self.torrent_url = self.parserhtml.url if self.torrent_url == "": self.validpage = False return self.pic_url = self.parserhtml.picture self.fileDir = self.parserhtml.title.split(' ')[0].replace( '/', '-').replace(' ', '-') if (self.fileDir == ""): self.fileDir = self.torrent_url.split('=')[-1] #self.fileDir=re.match("[A-Z0-9]*", self.parserhtml.title) print(self.fileDir) #print(self.pic_url) dirlist = os.listdir('.') if (not self.fileDir.encode("GBK") in dirlist): try: os.mkdir(self.fileDir) except: self.fileDir = self.torrent_url.split("hash=")[-1] if (not self.fileDir.encode("GBK") in dirlist): os.mkdir(self.fileDir)
def download_picture(self): i=0 for url in self.pic_url[:4]: name="%d.jpg"%i f=open( os.path.join(self.fileDir, name), "wb") f.write(HttpRequest.requestMultTimes(url)) f.close() i += 1 print("Download picture %d"%i+" success")
def download_picture(self): i = 0 for url in self.pic_url[:4]: name = "%d.jpg" % i f = open(os.path.join(self.fileDir, name), "wb") f.write(HttpRequest.requestMultTimes(url)) f.close() i += 1 print("Download picture %d" % i + " success")
def main(): arg=sys.argv ht = HttpRequest() tData = ThreadData() if len(arg) > 1: num_messages = int(arg[1]) else: num_messages = 1000 for i in range(num_messages): print('*' * 20+' Sending Message '+'*' * 20) r = ht.post_request( 'api/message/', tData.data ) time_sleep = random.randrange(0, 3)/18 print ('Message #: %s\nWait: %ss' % (i+1, time_sleep)) print ('Headers: %s' % r.headers) print('Encoding: %s' % r.encoding) print ('Response : %s' % str(r.text)) print (r) # Sleep messages randomly time.sleep(time_sleep)
def command_process(cmd): # Check if the command is an empty string if (len(cmd) == 0): print("The command line is empty") return # Check if the command first starts with "httpc" if (cmd[0] != "httpc"): print(cmd[0] + " is not a valid command.") return # Check if there is any argument given after "httpc" if (len(cmd) == 1): print("No arguement is given.") return if (cmd[1] == "help"): help_menu(cmd) if (cmd[1] == "get" or cmd[1] == "post"): redirectionCtr = 1 while (True): HostObj = HttpRequest(" ".join(cmd)).buildRequest() HostObj.sendHttpRequest() response = HostObj.receiveHttpResponse() if (response is not None): if (redirectionCtr > 5): print("Too many redirections.") return print(str(redirectionCtr) + "- Redirecting to " + response) cmd[-1] = "'" + response + "'" redirectionCtr = redirectionCtr + 1 else: break return print(cmd[1] + " is not a valid command.")
def Parse(self, sRequest): strRequest = str(sRequest).replace( '\n', '\r\n') # make sure we have a proper line separators lines = strRequest.split('\r\n') uri = self.ExtractUrl(lines) method = self.ExtractMethod(lines) cookies = self.ExtractCookies(lines) headers = self.ExtractHeaders(lines) body = self.ExtractBody(lines) params = self.ExtractParams(lines) request = httpreq.HttpRequest(uri, method, headers, body, cookies, params) return request
def __init__(self, indexUrl, timeout=1, retryTimes=50): self.index = 0 self.urls = [] content = HttpRequest.requestMultTimes(indexUrl, retryTimes=50) ''' i=0 while(i<retryTimes): try: print("try %d time"%i) r = requests.get(indexUrl, timeout=1.5) break except requests.exceptions.RequestException as e: i += 1 ''' parserhtml = ParserHtml() parserhtml.feed(content) self.urls = parserhtml.url_list
def __init__(self, indexUrl,timeout=1, retryTimes=50): self.index=0 self.urls=[] content=HttpRequest.requestMultTimes(indexUrl, retryTimes=50) ''' i=0 while(i<retryTimes): try: print("try %d time"%i) r = requests.get(indexUrl, timeout=1.5) break except requests.exceptions.RequestException as e: i += 1 ''' parserhtml=ParserHtml() parserhtml.feed(content) self.urls=parserhtml.url_list
def httpRequest(self): HttpRequest.ghNetWork(self.call)
# Created on: 29/06/2020 # License: GPL v.3 https://www.gnu.org/copyleft/gpl.html import sys, os import math import time from urllib import urlencode, quote from urlparse import parse_qsl import xbmcgui import xbmcplugin import xbmc import HttpRequest _BASE_URL = sys.argv[0] _HANDLE = int(sys.argv[1]) http = HttpRequest.Http() def try_get(src, getter, expected_type=None): if not isinstance(getter, (list, tuple)): getter = [getter] for get in getter: try: v = get(src) except (AttributeError, KeyError, TypeError, IndexError): pass else: if expected_type is None or isinstance(v, expected_type): return v return None
def test_no_token(self): r = HttpRequest() self.assertRaises(ImproperlyConfigured, views.vote, r, Link, 1, 1)
def _req(self): r = HttpRequest() r.secretballot_token = '1.2.3.4' return r
import jsonfrom django.test import TestCase, Clientfrom django.http import HttpRequest, Http404, HttpResponseForbiddenfrom django.core.exceptions import ImproperlyConfiguredfrom django.contrib.contenttypes.models import ContentType from secretballot.middleware import (SecretBallotMiddleware, SecretBallotIpMiddleware, SecretBallotIpUseragentMiddleware)from .models import Link, WeirdLinkfrom secretballot import views class MiddlewareTestCase(TestCase): def test_ip_middleware(self): mw = SecretBallotIpMiddleware() r = HttpRequest() r.META['REMOTE_ADDR'] = '1.2.3.4' mw.process_request(r) assert r.secretballot_token == '1.2.3.4' def test_ip_ua_middleware(self): mw = SecretBallotIpUseragentMiddleware() # basic token r = HttpRequest() r.META['REMOTE_ADDR'] = '1.2.3.4' r.META['HTTP_USER_AGENT'] = 'Firefox' mw.process_request(r) ff_token = r.secretballot_token # same one r = HttpRequest() r.META['REMOTE_ADDR'] = '1.2.3.4' r.META['HTTP_USER_AGENT'] = 'Firefox' mw.process_request(r) ff_token2 = r.secretballot_token assert ff_token == ff_token2 # different one r = HttpRequest() r.META['REMOTE_ADDR'] = '1.2.3.4' r.META['HTTP_USER_AGENT'] = 'Chrome' mw.process_request(r) chrome_token = r.secretballot_token assert ff_token != chrome_token # blank one r = HttpRequest() r.META['REMOTE_ADDR'] = '1.2.3.4' r.META['HTTP_USER_AGENT'] = '' mw.process_request(r) blank_token = r.secretballot_token assert ff_token != blank_token def test_no_token(self): mw = SecretBallotMiddleware() with self.assertRaises(NotImplementedError): mw.process_request(HttpRequest()) class TestVoting(TestCase): def test_add_vote(self): l = Link.objects.create(url='https://google.com') assert Link.objects.get().vote_total == 0 l.add_vote('1.2.3.4', 1) assert Link.objects.get().vote_total == 1 l.add_vote('1.2.3.5', 1) assert Link.objects.get().vote_total == 2 l.add_vote('1.2.3.6', -1) assert Link.objects.get().vote_total == 1 def test_up_and_down(self): l = Link.objects.create(url='https://google.com') l.add_vote('1.2.3.4', 1) l.add_vote('1.2.3.6', -1) l = Link.objects.get() assert l.total_upvotes == 1 assert l.total_downvotes == 1 assert l.vote_total == 0 def test_remove_vote(self): l = Link.objects.create(url='https://google.com') assert Link.objects.get().vote_total == 0 l.add_vote('1.2.3.4', 1) l.add_vote('1.2.3.5', 1) assert Link.objects.get().vote_total == 2 l.remove_vote('1.2.3.5') assert Link.objects.get().vote_total == 1 def test_from_token(self): b = Link.objects.create(url='https://bing.com') g = Link.objects.create(url='https://google.com') y = Link.objects.create(url='https://yahoo.com') # no vote on bing, +1 on google, -1 yahoo g.add_vote('1.2.3.4', 1) y.add_vote('1.2.3.4', -1) sorted_links = Link.objects.from_token('1.2.3.4').order_by('url') assert sorted_links[0].user_vote == None # bing assert sorted_links[1].user_vote == 1 # google assert sorted_links[2].user_vote == -1 # yahoo def test_from_request(self): b = Link.objects.create(url='https://bing.com') g = Link.objects.create(url='https://google.com') y = Link.objects.create(url='https://yahoo.com') # no vote on bing, +1 on google, -1 yahoo g.add_vote('1.2.3.4', 1) y.add_vote('1.2.3.4', -1)