class BaseCarRider(object): """Базовый протокол такси-клиента""" def __init__(self, host, port): self.host = host self.port = port self.url = 'http://{0}:{1}/'.format(self.host, self.port) self.agent = CookieAgent(Agent(reactor), CookieJar()) @defer.inlineCallbacks def login(self): response = yield self.agent.request('GET', self.url + "login") assert response.code == 200 body = yield readBody(response) data = json.loads(body) assert data["result"] == "ok" log.msg("Loggin in") @defer.inlineCallbacks def logout(self): response = yield self.agent.request('GET', self.url + "logout") assert response.code == 200 body = yield readBody(response) data = json.loads(body) assert data["result"] == "ok" log.msg("Logged out") @defer.inlineCallbacks def send_location(self, latitude, longitude): url = self.url + "data?latitude={0:.6f}&longitude={1:.6f}".format(latitude, longitude) response = yield self.agent.request('GET', url) assert response.code == 200 body = yield readBody(response) data = json.loads(body) assert data["result"] == "ok" log.msg("Location update success!") @defer.inlineCallbacks def work(self): yield task.deferLater(reactor, 0, self.login) w = 56835567 l = 60590891 while True: w += random.randint(-10000000, 10000000)/1000.0 l += random.randint(-10000000, 10000000)/1000.0 if w < 56838388: w = 56838388 if w > 56839803: w = 56839803 if l < 60552843: l = 60552843 if l > 60574815: l = 60574815 yield task.deferLater(reactor, 0, self.send_location, w/1000000.0, l/1000000.0) yield sleep(settings.WORKER_SLEEP_TIME) yield task.deferLater(reactor, 0, self.logout)
def handleGetMData(self, data): #data = (method, tgtccid) pprint(data) def handleGetMData_cb(response): defer = Deferred() defer.addCallback(data[0]) response.deliverBody(DataPrinter(defer, "getmdata")) return NOT_DONE_YET args = ("get_mdata", str(self.ccid), data[1]) salt = self.processCookie("/pboxes") body = FileBodyProducer(StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/pboxes/?method=get_mdata&ccid=' + self.ccid + "&tgtccid=" + data[1], headers, body) d.addCallback(handleGetMData_cb) return NOT_DONE_YET
def handlePutFile(self, line): print "Encrypting file..." s = line.split() file = open(s[2], 'r') enc_file = open("enc_fileout", 'w') crd = self.client_id.encryptFileSym(file, enc_file) args = ("putfile", str(self.ccid), os.path.basename(s[2])) salt = self.processCookie("/files") dataq = [] dataq.append( self.client_id.genHashArgs(args, salt)) dataq.append( self.client_id.encryptData(crd[0], self.client_id.pub_key)) dataq.append( self.client_id.encryptData(crd[1]) ) agent = CookieAgent(Agent(reactor), self.cookie_jar) #print crd[1] # print "debugging:key, iv putfile" # print dataq[1] # print len(dataq[1]) # print dataq[2] # print len(dataq[2]) print "Uploading file..." enc_file = open("enc_fileout", 'r') body = _FileProducer(enc_file ,dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/files/?method=putfile&ccid=' + self.ccid + "&name=" + os.path.basename(s[2]), headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def updateShared_cb(iv): print "Updating file..." args = ("updateshared", str(self.ccid), os.path.basename(s[3]), s[2]) salt = self.processCookie("/shares") dataq = [] dataq.append( self.client_id.genHashArgs(args, salt)) dataq.append( iv ) # print "debugging:ticket, iv updatefile" # print dataq[0] # print dataq[1] # print len(dataq[1]) print "Uploading file..." agent = CookieAgent(Agent(reactor), self.cookie_jar) enc_file = open("enc_fileout", 'r') body = _FileProducer(enc_file ,dataq) headers = http_headers.Headers() d = agent.request( 'POST', 'http://localhost:8000/shares/?method=updateshared&ccid=' + self.ccid + "&name=" + os.path.basename(s[3]) + "&fileid=" + s[2] , headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def handlePutFile(self, line): print "Encrypting file..." s = line.split() file = open(s[2], 'r') enc_file = open("enc_fileout", 'w') crd = self.client_id.encryptFileSym(file, enc_file) args = ("putfile", str(self.ccid), os.path.basename(s[2])) salt = self.processCookie("/files") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(self.client_id.encryptData(crd[0], self.client_id.pub_key)) dataq.append(self.client_id.encryptData(crd[1])) agent = CookieAgent(Agent(reactor), self.cookie_jar) #print crd[1] # print "debugging:key, iv putfile" # print dataq[1] # print len(dataq[1]) # print dataq[2] # print len(dataq[2]) print "Uploading file..." enc_file = open("enc_fileout", 'r') body = _FileProducer(enc_file, dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/files/?method=putfile&ccid=' + self.ccid + "&name=" + os.path.basename(s[2]), headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def handleGetShared(self, s): def handleGetShared_cb(response, f): finished = Deferred() finished.addCallback(self.writeFile_cb, s) cons = FileConsumer(f) response.deliverBody(FileDownload(finished, cons)) print "Downloading file..." return finished fileId = s[2] args = ("getshared", str(self.ccid), str(fileId)) salt = self.processCookie("/shares") body = FileBodyProducer(StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=getshared&ccid=' + self.ccid + '&fileid=' + fileId, headers, body) f = open(fileId, "w") d.addCallback(handleGetShared_cb, f) return NOT_DONE_YET
def updateShared_cb(iv): print "Updating file..." args = ("updateshared", str(self.ccid), os.path.basename(s[3]), s[2]) salt = self.processCookie("/shares") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(iv) # print "debugging:ticket, iv updatefile" # print dataq[0] # print dataq[1] # print len(dataq[1]) print "Uploading file..." agent = CookieAgent(Agent(reactor), self.cookie_jar) enc_file = open("enc_fileout", 'r') body = _FileProducer(enc_file, dataq) headers = http_headers.Headers() d = agent.request( 'POST', 'http://localhost:8000/shares/?method=updateshared&ccid=' + self.ccid + "&name=" + os.path.basename(s[3]) + "&fileid=" + s[2], headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def upload(self, folder, filename, comment): exception = self.master.modules["commands"].exception user = yield self.config.get("user") passwd = yield self.config.get("pass") twitters = yield self.config.get("twitter", {"jdp": "johnnydickpants"}) nyaagent = CookieAgent(self.master.agent, cookielib.CookieJar()) if user is None or passwd is None: raise exception(u"No NT username or password in config") response = yield nyaagent.request("POST","http://www.nyaa.se/?page=login", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({"method": "1", "login": user,"password": passwd})))) body = yield self.master.modules["utils"].returnBody(response) if "Login successful" not in body: raise exception(u"Couldn't login to Nyaa.") name, twitter = random.choice(twitters.items()) twitter_list = self.master.modules["utils"].rheinbowify('[b]Follow [u][url="https://twitter.com/RHExcelion"]@RHExcelion[/url][/u], [u][url="https://twitter.com/{}"]@{}[/url][/u], and the rest of Commie at [u][url="https://twitter.com/RHExcelion/commie-devs"]@Commie-Devs[/url][/u].[/b]'.format(twitter, name)) comment = "\n\n" + comment if comment else "" post_data = self.master.modules["multipart"].MultiPartProducer({"torrent": os.path.join(folder, filename)},{ "name": filename, "catid": "1_37", "info": "#[email protected]", "description": "Visit us at [url]http://commiesubs.com[/url] for the latest updates and news.\n{}{}".format(twitter_list, comment), "remake": "0", "anonymous": "0", "hidden": "0", "rules": "1", "submit": "Upload" }) response = yield nyaagent.request("POST","http://www.nyaa.se/?page=upload", Headers({'Content-Type': ['multipart/form-data; boundary={}'.format(post_data.boundary)]}), post_data) if response.code != 200: raise exception(u"Couldn't upload torrent to Nyaa. Error #{:d}: {}".format(response.code, self.codes[response.code])) body = yield self.master.modules["utils"].returnBody(response) match = re.search("http://www.nyaa.se/\?page=view&tid=[0-9]+", body) if not match: raise exception(u"Couldn't find torrent link in Nyaa's response.") info_link = match.group(0).replace("&","&") download_link = info_link.replace("view","download") returnValue((info_link, download_link))
class Fetcher(Service, NamedAdapter): implements(IWebWorld) def __init__(self, client): NamedAdapter.__init__(self, client) self.url = "http://%(host)s:%(port)s/play/%(game)s/json%%s" % vars(self.config) self.log("Fetching from: %s" % self.url) self._loop = LoopingCall(self.update) self.interval = 0.11 self.agent = CookieAgent(Agent(reactor, pool=http_pool), CookieJar()) self.verbose = client.config.verbose self.debug = client.config.debug registerGlobal(self, IWebWorld) def startService(self): if not self.running: self._loop.start(self.interval) Service.startService(self) def stopService(self): if self.running: self._loop.stop() Service.stopService(self) def update(self, args=None): headers = None body = None url = self.url % "" if args is not None: url = self.url % "?" url += urllib.urlencode(args) headers = Headers({'content-type': ['application/x-www-form-urlencoded']}) self.log("Args fetch: %s" % url) if self.verbose: self.log("Fetch %s" % url) return self.agent.request( 'GET', url, headers, body ).addCallback( self.fetched ).addErrback( self.err ).addErrback( lambda _: reactor.stop() ) def logLoaded(self, data): self.log("Loaded response:\n%s" % pformat(data)) return data def fetched(self, response): d = Deferred() response.deliverBody(BodyReturner(d, verbose=self.verbose)) d.addCallback(json.loads) if self.verbose or self.debug: d.addBoth(self.logLoaded) return d.addCallback(IDrawingEngine(self).update)
def main(): cookieJar = CookieJar() agent = CookieAgent(Agent(reactor), cookieJar) d = agent.request('GET', 'http://www.google.com/') d.addCallback(displayCookies, cookieJar) d.addErrback(log.err) d.addCallback(lambda ignored: reactor.stop()) reactor.run()
def http_request(method, url, params={}, data=None, headers={}, cookies=None, timeout=30, ignore_errors=True): # Urlencode does not accept unicode, so convert to str first url = url.encode('utf-8') if isinstance(url, unicode) else url for k, v in params.items(): params[k] = v.encode('utf-8') if isinstance(v, unicode) else v for k, v in headers.items(): headers[k] = v.encode('utf-8') if isinstance(v, unicode) else v # Add any additional params to the url url_parts = list(urlparse.urlparse(url)) query = dict(urlparse.parse_qsl(url_parts[4])) query.update(params) url_parts[4] = urllib.urlencode(query, doseq=True) url = urlparse.urlunparse(url_parts) # Handle cookies if isinstance(cookies, cookielib.CookieJar): cookiejar = cookies else: cookiejar = cookielib.CookieJar() for name, value in (cookies or {}).iteritems(): cookiejar.set_cookie(create_cookie(name=name, value=value)) # Urlencode the data, if needed if isinstance(data, dict): data = urllib.urlencode(data) headers['Content-Type'] = 'application/x-www-form-urlencoded' agent = Agent(reactor, connectTimeout=timeout) cookie_agent = CookieAgent(agent, cookiejar) body = FileBodyProducer(StringIO(data)) if data else None d = cookie_agent.request(method, url, Headers({k: [v] for k, v in headers.iteritems()}), body) def handle_response(response, cookiejar): if 'audio/mpeg' in response.headers.getRawHeaders('content-type')[-1]: # Don't download any multimedia files raise Exception('reponse contains a multimedia file') d = defer.Deferred() response.deliverBody(BodyReceiver(response.code, dict(response.headers.getAllRawHeaders()), cookiejar, d)) return d def handle_error(error): if isinstance(error, _WrapperException): reason = ', '.join(error.reasons) else: reason = error.getErrorMessage() logger = logging.getLogger(__name__) logger.error('Failed to GET %s (reason: %s)', url, reason) return Response(0, {}, cookielib.CookieJar(), '') d.addCallback(handle_response, cookiejar) if ignore_errors: d.addErrback(handle_error) return d
def main(): cookieJar = compat.cookielib.CookieJar() agent = CookieAgent(Agent(reactor), cookieJar) d = agent.request(b"GET", b"http://httpbin.org/cookies/set?some=data") d.addCallback(displayCookies, cookieJar) d.addErrback(log.err) d.addCallback(lambda ignored: reactor.stop()) reactor.run()
def upload(self, link): exception = self.master.modules["commands"].exception user = yield self.config.get("user") passwd = yield self.config.get("pass") ttagent = CookieAgent(self.master.agent, cookielib.CookieJar()) if user is None or passwd is None: raise exception(u"No TT username or password in config") response = yield ttagent.request( "POST", "http://tokyotosho.info/login.php", Headers({"Content-Type": ["application/x-www-form-urlencoded"]}), FileBodyProducer(StringIO(urllib.urlencode({"username": user, "password": passwd, "submit": "Submit"}))), ) body = yield self.master.modules["utils"].returnBody(response) if "Logged in." not in body: raise exception(u"Couldn't login to TT.") response = yield ttagent.request( "POST", "http://tokyotosho.info/new.php", Headers({"Content-Type": ["application/x-www-form-urlencoded"]}), FileBodyProducer( StringIO( urllib.urlencode( { "type": "1", "url": link, "comment": "#[email protected]", "website": "http://www.commiesubs.com/", "send": "Submit New Torrent", } ) ) ), ) body = yield self.master.modules["utils"].returnBody(response) if "Torrent Submitted" not in body: raise exception(u"Couldn't upload torrent to TT.")
class Fetcher(Service, NamedAdapter): implements(IWebWorld) def __init__(self, client): NamedAdapter.__init__(self, client) self.url = "http://%(host)s:%(port)s/play/%(game)s/json%%s" % vars( self.config) self.log("Fetching from: %s" % self.url) self._loop = LoopingCall(self.update) self.interval = 0.11 self.agent = CookieAgent(Agent(reactor, pool=http_pool), CookieJar()) self.verbose = client.config.verbose self.debug = client.config.debug registerGlobal(self, IWebWorld) def startService(self): if not self.running: self._loop.start(self.interval) Service.startService(self) def stopService(self): if self.running: self._loop.stop() Service.stopService(self) def update(self, args=None): headers = None body = None url = self.url % "" if args is not None: url = self.url % "?" url += urllib.urlencode(args) headers = Headers( {'content-type': ['application/x-www-form-urlencoded']}) self.log("Args fetch: %s" % url) if self.verbose: self.log("Fetch %s" % url) return self.agent.request('GET', url, headers, body).addCallback( self.fetched).addErrback( self.err).addErrback(lambda _: reactor.stop()) def logLoaded(self, data): self.log("Loaded response:\n%s" % pformat(data)) return data def fetched(self, response): d = Deferred() response.deliverBody(BodyReturner(d, verbose=self.verbose)) d.addCallback(json.loads) if self.verbose or self.debug: d.addBoth(self.logLoaded) return d.addCallback(IDrawingEngine(self).update)
def upload(self, link): exception = self.master.modules["commands"].exception user = yield self.config.get("user") passwd = yield self.config.get("pass") ttagent = CookieAgent(self.master.agent, cookielib.CookieJar()) if user is None or passwd is None: raise exception(u"No TT username or password in config") response = yield ttagent.request( "POST", "http://tokyotosho.info/login.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer( StringIO( urllib.urlencode({ "username": user, "password": passwd, "submit": "Submit" })))) body = yield self.master.modules["utils"].returnBody(response) if "Logged in." not in body: raise exception(u"Couldn't login to TT.") response = yield ttagent.request( "POST", "http://tokyotosho.info/new.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer( StringIO( urllib.urlencode({ "type": "1", "url": link, "comment": "#[email protected]", "website": "http://www.commiesubs.com/", "send": "Submit New Torrent" })))) body = yield self.master.modules["utils"].returnBody(response) if "Torrent Submitted" not in body: raise exception(u"Couldn't upload torrent to TT.")
def deleteFile_cb(): args = ("delete", str(self.ccid), s[2]) salt = self.processCookie("/files") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'DELETE', 'http://localhost:8000/files/?method=delete&ccid=' + self.ccid + "&fileid=" + s[2], headers, body) d.addCallback(printDeleteReply_cb)
def startSession_cb((signedNonce, nonceid)): agent = CookieAgent(Agent(reactor), self.cookie_jar) dataq = [] dataq.append(signedNonce) body = _FileProducer( StringIO(self.client_id.encryptData(self.client_id.password)), dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/session/?method=startsession&ccid=' + self.ccid + '&nonceid=' + str(nonceid), headers, body) d.addCallback(procResponse_cb) return NOT_DONE_YET
def handleListShares(self): args = ("list", str(self.ccid)) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=list&ccid=' + self.ccid, headers, body) d.addCallback(self.handleList_cb) return NOT_DONE_YET
def startSession_cb((signedNonce, nonceid)): agent = CookieAgent(Agent(reactor), self.cookie_jar) dataq = [] dataq.append(signedNonce) body = _FileProducer(StringIO(self.client_id.encryptData(self.client_id.password)) ,dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/session/?method=startsession&ccid=' + self.ccid + '&nonceid=' + str(nonceid), headers, body) d.addCallback(procResponse_cb) return NOT_DONE_YET
def register_cb((signedNonce, nonceid)): agent = CookieAgent(Agent(reactor), self.cookie_jar) dataq = [] dataq.append(signedNonce) dataq.append(self.client_id.encryptData(self.client_id.password)) # Sending the Certificate and the Sub CA to the server if self.pin is None: print "ERROR! Check the pin!" reactor.stop() cert = cc.get_certificate(cc.CERT_LABEL, self.pin) #print type(cert.as_pem()) #print cert.as_pem() if cert is None: print "ERROR! Check the pin" reactor.stop() subca = cc.get_certificate(cc.SUBCA_LABEL, self.pin) #print type(subca.as_pem()) #print subca.as_pem() if subca is None: print "ERROR! Check the pin" reactor.stop() enc_cert = b64encode(cert.as_pem()) #print "cert len: ", len(enc_cert) dataq.append(enc_cert) enc_subca = b64encode(subca.as_pem()) #print "sub ca len: ", len(enc_subca) dataq.append(enc_subca) dataq.append(self.client_id.pub_key.exportKey('PEM')) ext_key = self.client_id.pub_key.exportKey('PEM') if self.pin is None: print "ERROR! Check the pin or the CC" reactor.stop() signed_ext_key = cc.sign(ext_key, cc.KEY_LABEL, self.pin) enc_sek = b64encode(signed_ext_key) #print "encoded ext key: ", enc_sek #print "len encoded: ", len(enc_sek) dataq.append(enc_sek) body = FileProducer2(dataq) headers = http_headers.Headers() #print "Password:"******"LEN:", len(self.client_id.encryptData(self.client_id.password)) d = agent.request( 'PUT', 'http://localhost:8000/pboxes/?method=register' + '&nonceid=' + str(nonceid), headers, body) d.addCallback(procResponse_cb, checkClientReg_cb)
def _sendVortexMsgLater(self, vortexMsgs: VortexMsgList): yield None assert self._server assert vortexMsgs def ebSendAgain(failure): self._retrying = True logger.debug("Retrying send of %s messages : %s", len(vortexMsgs), failure.value) return task.deferLater(reactor, self.RETRY_DELAY, self._sendVortexMsgLater, vortexMsgs) def cbRequest(response): if response.code != 200: msg = "Connection to vortex %s:%s failed" % (self._server, self._port) logger.error(msg) return Failure(Exception(msg)) elif self._retrying: logger.info("VortexServer client %s:%s reconnected", self._server, self._port) self._retrying = False self.__protocol = VortexPayloadHttpClientProtocol( logger, vortexClient=self) response.deliverBody(self.__protocol) return True bodyProducer = _VortexClientPayloadProducer(vortexMsgs) agent = CookieAgent(Agent(reactor), self._cookieJar) args = {'vortexUuid': self._vortexUuid, 'vortexName': self._vortexName} uri = ("http://%s:%s/vortex?%s" % (self._server, self._port, urlencode(args))).encode("UTF-8") d = agent.request( b'POST', uri, Headers({ b'User-Agent': [b'Synerty VortexServer Client'], b'Content-Type': [b'text/plain'] }), bodyProducer) d.addCallback(cbRequest) d.addErrback(ebSendAgain) # Must be after cbRequest return d
def authenticate(self): #self.session_id = cb_authenticate(self.auth_url) self.logger.info("Authenticating") cookieJar = CookieJar() agent = CookieAgent(Agent(self.reactor), cookieJar) data = '{"key": "' + self.auth_key + '"}' print "self.auth_url is", self.auth_url d = agent.request( 'POST', self.auth_url, Headers({'User-Agent': ['Twisted Web Client Example'], 'content-type': ['application/json']}), StringProducer(data)) d.addCallback(self.handleAuthResponse, cookieJar) d.addErrback(self.handleAuthFailed)
def deleteShare_cb(): args = ("delete", str(self.ccid), s[2], s[3]) salt = self.processCookie("/shares") body = FileBodyProducer(StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'DELETE', 'http://localhost:8000/shares/?method=delete&ccid=' + self.ccid + "&fileid=" + s[2] + "&rccid=" + s[3], headers, body) d.addCallback(printDeleteReply_cb)
def handleListShares(self): args = ("list", str(self.ccid)) salt = self.processCookie("/shares") body = FileBodyProducer(StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=list&ccid=' + self.ccid, headers, body) d.addCallback(self.handleList_cb) return NOT_DONE_YET
def handleUpdateSharePerm(self, s): args = ("updateshareperm", str(self.ccid), s[3], s[2], s[4]) salt = self.processCookie("/shares") body = FileBodyProducer(StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'POST', 'http://localhost:8000/shares/?method=updateshareperm&ccid=' + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2] + "&writeable=" + s[4] , headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def main(): c = Cookie(None, 'sid', '157272379', '443', '443', "10.0.199.8", None, None, '/', None, False, False, 'TestCookie', None, None, None) cj = CookieJar() cj.set_cookie(c) print ">>> cj:", cj contextFactory = WebClientContextFactory() agent = CookieAgent(RedirectAgent(Agent(reactor, contextFactory)), cj) d = agent.request('GET', 'https://10.0.199.8/datetime_get_request_periodic') d.addCallbacks(getBody, log.err) d.addCallback(lambda x: reactor.stop()) reactor.run()
def handleUpdateSharePerm(self, s): args = ("updateshareperm", str(self.ccid), s[3], s[2], s[4]) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'POST', 'http://localhost:8000/shares/?method=updateshareperm&ccid=' + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2] + "&writeable=" + s[4], headers, body) d.addCallback(self.printPutReply_cb) return NOT_DONE_YET
def shareFile_cb(): args = ("delete", str(self.ccid), s[3], s[2]) salt = self.processCookie("/shares") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(enc_sym_key) print "Uploading symkey..." agent = CookieAgent(Agent(reactor), self.cookie_jar) body = _FileProducer(StringIO(""), dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/shares/?method=sharefile&ccid=' + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2], headers, body) d.addCallback(self.printPutReply_cb) return d
def shareFile_cb(): args = ("delete", str(self.ccid), s[3], s[2]) salt = self.processCookie("/shares") dataq = [] dataq.append(self.client_id.genHashArgs(args, salt)) dataq.append(enc_sym_key) print "Uploading symkey..." agent = CookieAgent(Agent(reactor), self.cookie_jar) body = _FileProducer(StringIO("") ,dataq) headers = http_headers.Headers() d = agent.request( 'PUT', 'http://localhost:8000/shares/?method=sharefile&ccid=' + self.ccid + "&rccid=" + s[3] + "&fileid=" + s[2], headers, body) d.addCallback(self.printPutReply_cb) return d
class Client(): def __init__(self): self.pool = HTTPConnectionPool(reactor, persistent=True) self.pool.maxPersistentPerHost = 5 # 默认一个IP最大保持两个链接 self.pool.cachedConnectionTimeout = 50 # 默认240秒 contextFactory = WebClientContextFactory() raw_agent = Agent(reactor, contextFactory, pool=self.pool) agent = RedirectAgent( ContentDecoderAgent(raw_agent, [('gzip', GzipDecoder)])) self.cookieJar = CookieJar() self.agent = CookieAgent(agent, self.cookieJar) self.headers = {'User-agent': ['Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'], 'Accept-Language': ['zh-Hans-CN,zh-Hans;q=0.5'], 'Accept': ['text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'], 'Accept-Encoding': ['gb2313,utf-8;q=0.7,*;q=0.7'], 'Cache-Control': ['max-age=0']} def getpage(self, url, headers={}): headers.update(self.headers) return self.agent.request('GET', url, Headers(headers), None)
class TwitterReactorChallengeResponse(ChallengeResponse): def __init__(self, user_handle, user_data): self._challenge = ChallengeDataHolder() self._response = ResponseDataHolder() #self._deferreds = list() self._deferred = None self._cookieJar = cookiejar.CookieJar() # we'll use the same jar for all # requests self._contextFactory = WebClientContextFactory() self._pool = HTTPConnectionPool(reactor) # make ourselves a web agent which also handles cookies and does # persistent connections: self._agent = CookieAgent( Agent(reactor, self._contextFactory, pool=self._pool), self._cookieJar) #self.requestPOST("https://...", ...) def getChallenge(self): #deferred = self.requestGET("https://...") # extract challenge, store in self._challenge pass def getResponse(self): pass def requestGET(self, url): #self._deferreds.append(self._agent.request("GET", url)) self._deferred = self._agent.request("GET", url) self._deferred.addCallback(self.doneRequest) return self._deferred def doneRequest(self, response): finished = Deferred() response.deliverBody(ResponseBodyHolder(finished, self.someDataReceved)) def someDataReceived(self, bReceived): pass # we probably don't want to look at data until response is complete
def handleGetShareMData(self, data): #data = (method, fileid) def handleGetShareMData_cb(response): defer = Deferred() defer.addCallback(data[0]) response.deliverBody(DataPrinter(defer, "getmdata")) return NOT_DONE_YET args = ("get_mdata", str(self.ccid), data[1]) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=get_mdata&ccid=' + self.ccid + "&fileid=" + data[1], headers, body) d.addCallback(handleGetShareMData_cb) return NOT_DONE_YET
def async_request(self, method, data=None): global cj # agent = CookieAgent(RedirectAgent(Agent(reactor)), cj) agent = CookieAgent(Agent(reactor), cj) print ">>> request %s %s\n" % (method, self.target) # print ">>> cookies: %s\n" % cj # print ">>> data: %s\n" % data from twisted.web.http_headers import Headers body = "" if data: body = "&".join("%s=%s" % (k, v[0]) for k,v in data.items()) # print ">>> body: %s\n" % body d = agent.request(method, self.target, Headers({}), StringProducer(body)) d.addCallback(self.get_body) d.addErrback(errback) return NOT_DONE_YET
def perform_request(self, url, request_type='GET', raw_data='', headers=None): if headers: # Convert all header fields to arrays for key in headers.keys(): headers[key] = [headers[key]] headers = Headers(headers) def _on_error_response(response, response_str): raise RequestError(response, response_str) def _on_response(response): if response.code == http.OK or response.code == http.CREATED or response.code == http.PARTIAL_CONTENT: return readBody(response) else: return readBody(response).addCallback(lambda response_str: _on_error_response(response, response_str)) self._logger.debug("Performing %s request to %s", request_type, url) agent = CookieAgent(Agent(reactor), self.cookie_jar) data_producer = None if request_type == 'GET' else POSTDataProducer(raw_data) deferred = agent.request(request_type, url, headers, data_producer) deferred.addCallback(_on_response) return deferred
def handleGetShared(self, s): def handleGetShared_cb(response, f): finished = Deferred() finished.addCallback(self.writeFile_cb, s) cons = FileConsumer(f) response.deliverBody(FileDownload(finished, cons)) print "Downloading file..." return finished fileId = s[2] args = ("getshared", str(self.ccid), str(fileId)) salt = self.processCookie("/shares") body = FileBodyProducer( StringIO(self.client_id.genHashArgs(args, salt))) agent = CookieAgent(Agent(reactor), self.cookie_jar) headers = http_headers.Headers() d = agent.request( 'GET', 'http://localhost:8000/shares/?method=getshared&ccid=' + self.ccid + '&fileid=' + fileId, headers, body) f = open(fileId, "w") d.addCallback(handleGetShared_cb, f) return NOT_DONE_YET
class Browser(object): _HEADERS = { 'User-Agent' : ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17', ] } _POST_HEADERS = { 'Content-Type' : ['application/x-www-form-urlencoded', ] } def __init__(self, page_archiver, cookie_file=None): self._logger = logging.getLogger(__name__) self._page_archiver = page_archiver self._logger.debug('Using page archiver: %s. Cookie file: %s', page_archiver is not None, cookie_file) if cookie_file: umask = os.umask(077) self._cj = LWPCookieJar(cookie_file) try: self._cj.load() except LoadError: self._logger.warning('Cannot load cookies from %s' % (cookie_file, )) os.umask(umask) else: self._cj = CookieJar() pool = HTTPConnectionPool(reactor, persistent=True) pool.maxPersistentPerHost = 10 self._agent = CookieAgent(ContentDecoderAgent(Agent(reactor, pool=pool), [('gzip', GzipDecoder)]), self._cj) self._lock = Lock() def save_cookies(self): try: self._cj.save() except LoadError: pass else: self._logger.debug('Cookies saved') @defer.deferredGenerator def _request(self, request_type, url, referer=None, body=None): self._logger.debug('Fetching %s', url) headers = dict(self._HEADERS) if referer: headers['Referer'] = [referer, ] body_prod = None if body: headers.update(self._POST_HEADERS) body_prod = StringProducer(body) d = defer.waitForDeferred(self._agent.request(request_type, url, Headers(headers), body_prod)) yield d response = d.getResult() receiver = MemoryReceiver() response.deliverBody(receiver) if request_type == 'POST' and (response.code >= 300 and response.code < 400): new_location = '%s/%s' % ( os.path.split(url)[0], response.headers.getRawHeaders('location')[0]) d = defer.waitForDeferred(self.get(new_location, referer)) yield d yield d.getResult() return else: d = defer.waitForDeferred(receiver.finished) yield d page = d.getResult() if self._page_archiver: reactor.callInThread(self._archive_page, page, url, body, referer) yield page def _archive_page(self, page, url, body, referer): with self._lock: self._page_archiver.archive(page, url, body, referer) def get(self, url, referer=None): return self._request('GET', url, referer) def post(self, url, data, referer=None): self._logger.debug('Posting to %s: %s', url, data) encoded_data = urlencode(data) return self._request('POST', url, referer, encoded_data)
class MetricServiceRequest(object): """ A convience class for fetching metrics from CentralQuery that can be used by twisted daemons. """ # use a shared cookie jar so all Metric requests can share the same session cookieJar = CookieJar() def __init__(self, userAgent): self._aggMapping = AGGREGATION_MAPPING urlstart = getGlobalConfiguration().get('metric-url', 'http://localhost:8080') self._metric_url = '%s/%s' % (urlstart, METRIC_URL_PATH) self._metric_url_v2 = '%s/%s' % (urlstart, WILDCARD_URL_PATH) creds = IAuthorizationTool(None).extractGlobalConfCredentials() auth = base64.b64encode('{login}:{password}'.format(**creds)) self.agent = CookieAgent(Agent(reactor, pool=getPool(), connectTimeout=30), self.cookieJar) self._headers = Headers({ 'Authorization': ['basic %s' % auth], 'content-type': ['application/json'], 'User-Agent': ['Zenoss: %s' % userAgent] }) self.onMetricsFetched = None def getMetrics(self, uuid, dpNames, cf='AVERAGE', rate=False, downsample="1h-avg", start=None, end=None, deviceId=None, returnSet="EXACT"): metrics = [] if isinstance(dpNames, basestring): dpNames = [dpNames] for dpName in dpNames: # TODO find callers name = ensure_prefix(deviceId, dpName) metrics.append(dict( metric=name, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), rpn='', rate=rate, format='%.2lf', tags=dict(contextUUID=[uuid]), name='%s_%s' % (uuid, dpName) )) request = dict( returnset=returnSet, start=start, end=end, downsample=downsample, metrics=metrics ) body = FileBodyProducer(StringIO(json.dumps(request))) d = self.agent.request('POST', self._metric_url, self._headers, body) return d def fetchMetrics(self, metrics, start="1h-ago", end=None, returnSet="EXACT"): """ Uses the CentralQuery V2 api to fetch metrics. Mainly that means wild cards can be used to fetch all metrics with the same name grouped by a tag. Usually used to retrieve a specific metric for all component on a device :param metrics: dictionary with required keys of metricName, tags and optional rpn defaults to empty, cf defatults to average, rate defaults to false, downsample defaults to 5m-avg :param start: :param end: :param returnSet: :return: deferred """ metricQueries = [] for metric in metrics: log.info("fetchMetrics metrics %s", metric) cf = metric.get('cf', 'average') rpn = metric.get('rpn', '') rate = metric.get('rate', False) tags = metric['tags'] downsample = metric.get('downsample', '5m-avg') metricName = metric['metricName'] metricQueries.append(dict( metric=metricName, downsample=downsample, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), rpn=rpn, rate=rate, format='%.2lf', tags=tags, name=metricName )) request = dict( returnset=returnSet, start=start, end=end, downsample=downsample, queries=metricQueries ) body = FileBodyProducer(StringIO(json.dumps(request))) log.info("POST %s %s %s", self._metric_url_v2, self._headers, json.dumps(request)) d = self.agent.request('POST', self._metric_url_v2, self._headers, body) return d
def request(self, method, url, **kwargs): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() stacklevel = kwargs.pop('_stacklevel', 2) if isinstance(url, DecodedURL): parsed_url = url elif isinstance(url, EncodedURL): parsed_url = DecodedURL(url) elif isinstance(url, six.text_type): parsed_url = DecodedURL.from_text(url) else: parsed_url = DecodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.pop('params', None) if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.pop('headers', None) if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, six.text_type)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) bodyProducer, contentType = self._request_body( data=kwargs.pop('data', None), files=kwargs.pop('files', None), json=kwargs.pop('json', _NOTHING), stacklevel=stacklevel, ) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) cookies = kwargs.pop('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) browser_like_redirects = kwargs.pop('browser_like_redirects', False) if kwargs.pop('allow_redirects', True): if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.pop('auth', None) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) reactor = kwargs.pop('reactor', None) if reactor is None: from twisted.internet import reactor timeout = kwargs.pop('timeout', None) if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.pop('unbuffered', False): d.addCallback(_BufferedResponse) if kwargs: warnings.warn( ("Got unexpected keyword argument: {}." " treq will ignore this argument," " but will raise TypeError in the next treq release.").format( ", ".join(repr(k) for k in kwargs)), DeprecationWarning, stacklevel=stacklevel, ) return d.addCallback(_Response, cookies)
def upload(self, folder, filename, comment): exception = self.master.modules["commands"].exception user = yield self.config.get("user") passwd = yield self.config.get("pass") twitters = yield self.config.get("twitter", {"jdp": "johnnydickpants"}) nyaagent = CookieAgent(self.master.agent, cookielib.CookieJar()) if user is None or passwd is None: raise exception(u"No NT username or password in config") response = yield nyaagent.request( "POST", "http://www.nyaa.se/?page=login", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer( StringIO( urllib.urlencode({ "method": "1", "login": user, "password": passwd })))) body = yield self.master.modules["utils"].returnBody(response) if "Login successful" not in body: raise exception(u"Couldn't login to Nyaa.") name, twitter = random.choice(twitters.items()) twitter_list = self.master.modules["utils"].rheinbowify( '[b]Follow [u][url="https://twitter.com/RHExcelion"]@RHExcelion[/url][/u], [u][url="https://twitter.com/{}"]@{}[/url][/u], and the rest of Commie at [u][url="https://twitter.com/RHExcelion/commie-devs"]@Commie-Devs[/url][/u].[/b]' .format(twitter, name)) comment = "\n\n" + comment if comment else "" post_data = self.master.modules["multipart"].MultiPartProducer( {"torrent": os.path.join(folder, filename)}, { "name": filename, "catid": "1_37", "info": "#[email protected]", "description": "Visit us at [url]http://commiesubs.com[/url] for the latest updates and news.\n{}{}" .format(twitter_list, comment), "remake": "0", "anonymous": "0", "hidden": "0", "rules": "1", "submit": "Upload" }) response = yield nyaagent.request( "POST", "http://www.nyaa.se/?page=upload", Headers({ 'Content-Type': [ 'multipart/form-data; boundary={}'.format( post_data.boundary) ] }), post_data) if response.code != 200: raise exception( u"Couldn't upload torrent to Nyaa. Error #{:d}: {}".format( response.code, self.codes[response.code])) body = yield self.master.modules["utils"].returnBody(response) match = re.search("http://www.nyaa.se/\?page=view&tid=[0-9]+", body) if not match: raise exception(u"Couldn't find torrent link in Nyaa's response.") info_link = match.group(0).replace("&", "&") download_link = info_link.replace("view", "download") returnValue((info_link, download_link))
def request(self, method, url, **kwargs): method = method.upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.iteritems(): if isinstance(v, str): h.addRawHeader(k, v) else: h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = uuid.uuid4() headers.setRawHeaders( 'content-type', [ 'multipart/form-data; boundary=%s' % (boundary,)]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( 'content-type', ['application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = IBodyProducer(data) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [('gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
class Module(object): def __init__(self, master): self.master = master self.config = master.modules["config"].interface("crunchy") self.agent = CookieAgent(master.agent, cookielib.CookieJar()) self.shows = {} self.cache_loop = None self.logged_in = False self.start() @inlineCallbacks def start(self): yield self.login() self.cache_loop = LoopingCall(self.cache) self.cache_loop.start(900) def stop(self): if self.cache_loop is not None and self.cache_loop.running: self.cache_loop.stop() self.cache_loop = None @inlineCallbacks def login(self): self.logged_in = False user = yield self.config.get("user") passwd = yield self.config.get("pass") url = 'https://www.crunchyroll.com/?a=formhandler' headers = Headers({ 'Content-Type': ['application/x-www-form-urlencoded'], 'Referer': ['https://www.crunchyroll.com'], 'User-Agent': ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:17.0) Gecko/17.0 Firefox/17.0'] }) data = FileBodyProducer(StringIO(urllib.urlencode({ 'formname': 'RpcApiUser_Login', 'next_url': '', 'fail_url': '/login', 'name': user, 'password': passwd }))) response = yield self.agent.request("POST", url, headers, data) self.logged_in = True @inlineCallbacks def cache(self): if not self.logged_in: return body = "" for PAGE in range(3): # Pages 0, 1, 2 - Should be plenty response = yield self.agent.request("GET","http://www.crunchyroll.com/videos/anime/simulcasts/ajax_page?pg={:d}".format(PAGE)) body += yield self.master.modules["utils"].returnBody(response) soup = BeautifulSoup(body, from_encoding="utf8") epoch = datetime(1970,1,1) shows = {} deferreds = [] for element in soup("a", token="shows-portraits"): key = element["href"].lstrip("/") name = element["title"] shows[name] = {} response = yield self.agent.request("GET", "http://www.crunchyroll.com/{}.rss".format(key)) body = yield self.master.modules["utils"].returnBody(response) xml = BeautifulSoup(body, "xml", from_encoding="utf8") for item in xml("item"): airtime = item.find("premiumPubDate") episode = item.find("episodeNumber") title = item.find("episodeTitle") duration = item.find("duration") link = item.find("link") media_id = item.find("mediaId") if not airtime or not episode or not link or not media_id: continue airtime = int((datetime.strptime(airtime.string, "%a, %d %b %Y %H:%M:%S %Z") - epoch).total_seconds()) try: e = int(episode.string) key = "{:02d}".format(e) except: key = episode.string e = 0 episode = e title = title.string if title and title.string else u"" duration = int(duration.string) if duration else 0 link = link.string media_id = media_id.string duration = "{:d}:{:02d}".format(duration / 60, duration % 60) if not link or not media_id: continue shows[name][key] = { "series": name, "episode": episode, "title": title, "duration": duration, "airtime": airtime, "link": link, "media_id": media_id } for series, episodes in shows.items(): if series not in self.shows: self.log(u"Found {} (Series) [{:,d} episodes]", series, len(shows[series])) continue for episode, show in episodes.items(): if episode not in self.shows[series]: self.log(u"Found {} #{}", series, episode) elif self.shows[series][episode] != show: self.log(u"Updated {} #{}", series, episode) self.shows = shows def resolve(self, name): exception = self.master.modules["commands"].exception matches = [] if not name: raise exception(u"Show name not specified.") name = name.lower() for s in self.shows.keys(): if s.lower() == name: return self.nameToObject(s) if s.lower().count(name): matches.append(s) if len(matches) > 1: if len(matches) > 5: extra = "and {:d} more.".format(len(matches) - 5) matches = matches[:5] + [extra] raise exception(u"Show name not specific, found: {}".format(u", ".join(matches))) elif not matches: raise exception(u"Show name not found.") return self.nameToObject(matches[0]) def nameToObject(self, name): if name not in self.shows: return None data = self.shows[name] episodes = {} for k, v in data.items(): episodes[k] = EpisodeObject(**v) return ShowObject(name, episodes) @inlineCallbacks def rip(self, guid, show, quality, video, subs): exception = self.master.modules["commands"].exception if not self.logged_in: raise exception(u"Not logged in to CrunchyRoll") if quality not in qualities: raise exception(u"Invalid quality, must be one of: {}".format(u", ".join(qualities.keys()))) filename = u"[CR] {} - {:02d} [{}p]".format(show.series, show.episode, quality).replace(u":", u":").replace(u"/", u" \u2044 ") format = qualities[quality] url = xml_url.format(show.media_id, format[0], format[1]) headers = Headers({ 'Content-Type': ['application/x-www-form-urlencoded'], 'Referer': ['https://www.crunchyroll.com'], 'User-Agent': ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:17.0) Gecko/17.0 Firefox/17.0'] }) data = FileBodyProducer(StringIO(urllib.urlencode({ 'current_page': show.link }))) response = yield self.agent.request("POST", url, headers, data) xml = yield self.master.modules["utils"].returnBody(response) soup = BeautifulSoup(xml, from_encoding="utf8") player_url = soup.find('default:chromelessplayerurl').string stream_info = soup.find('stream_info') subtitles = soup.find('subtitles') if not stream_info: raise exception(u"Could not parse XML") stream = {} stream['url'] = stream_info.host.string stream['token'] = stream_info.token.string stream['file'] = stream_info.file.string stream['swf_url'] = swf_url+player_revision+"/"+player_url if subs: if not subtitles: raise exception(u"Could not find subtitles") decoded = Decoder(xml) formatted = decoded.fancy with open(os.path.join(guid, filename.encode("utf8") + ".ass"), 'wb') as subfile: subfile.write(codecs.BOM_UTF8) subfile.write(formatted.encode('utf-8')) yield self.master.modules["ftp"].put(guid, filename+".ass") if video: parsed_url = urlparse.urlparse(stream['url']) if parsed_url.netloc.endswith("fplive.net"): ### START NEW CDN RIP & CONVERT ### inner_path, _, args = parsed_url.path.partition("?") if not args and parsed_url.query: args = parsed_url.query elif parsed_url.query: args += "&" + parsed_url.query ddl_url = "http://v.lvlt.crcdn.net{}/{}?{}".format(inner_path, stream['file'][4:], args) response = yield self.agent.request("GET", ddl_url) if response.code != 200: self.log(u"DDL URL: {}".format(ddl_url)) self.log(u"RESPONSE CODE: {:d}".format(response.code)) raise exception(u"Failed to download FLV") done = Deferred() downloader = self.master.modules["ftp"].downloader(os.path.join(guid, filename.encode("utf8") + '.mp4'), done) response.deliverBody(downloader) yield done mkvmergeargs = ["-o", os.path.join(guid, filename.encode("utf8") + ".mkv"), os.path.join(guid, filename.encode("utf8") + ".mp4")] out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("mkvmerge"), args=mkvmergeargs, env=os.environ) if code == 2: raise exception(u"Failed to mux MKV") ### END NEW CDN RIP & CONVERT ### else: ### START OLD CDN RIP & CONVERT ### rtmpargs = ["-e", "-r", stream['url'], "-y", stream['file'], "-W", stream['swf_url'], "-T", stream['token'], "-o", os.path.join(guid, filename.encode("utf8") + '.flv')] retries = 15 out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("rtmpdump"), args=rtmpargs, env=os.environ) while code == 2 and retries: retries -= 1 out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("rtmpdump"), args=rtmpargs, env=os.environ) if code != 0: self.log(u"RTMPDUMP CMDLINE:\nrtmpdump " + u" ".join(rtmpargs)) self.log(u"RTMPDUMP STDOUT:\n" + out) self.log(u"RTMPDUMP STDERR:\n" + err) raise exception(u"Failed to download FLV") try: self.master.modules["flv"].FLVFile(os.path.join(guid, filename.encode("utf8") + ".flv")).ExtractStreams(True, True, True, True) except: self.err(u"FLVFile failed to extract streams") raise exception(u"Failed to extract streams from FLV") mkvmergeargs = ["-o", os.path.join(guid, filename.encode("utf8") + ".mkv"), "--forced-track","0:yes","--compression","0:none","--timecodes","0:"+os.path.join(guid, filename.encode("utf8") + ".txt"),"-d","0","-A","-S",os.path.join(guid, filename.encode("utf8") + ".264"), "--forced-track","0:yes","-a","0","-D","-S",os.path.join(guid, filename.encode("utf8") + ".aac")] out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("mkvmerge"), args=mkvmergeargs, env=os.environ) if code == 2: raise exception(u"Failed to mux MKV") ### END OLD CDN RIP & CONVERT ### yield self.master.modules["ftp"].put(guid, filename+".mkv") yield self.master.modules["ftp"].upload()
class Module(object): def __init__(self, master): self.master = master self.config = master.modules["config"].interface("crunchy") self.agent = CookieAgent(master.agent, cookielib.CookieJar()) self.shows = {} self.cache_loop = None self.logged_in = False self.start() @inlineCallbacks def start(self): yield self.login() self.cache_loop = LoopingCall(self.cache) self.cache_loop.start(900) def stop(self): if self.cache_loop is not None and self.cache_loop.running: self.cache_loop.stop() self.cache_loop = None @inlineCallbacks def login(self): self.logged_in = False user = yield self.config.get("user") passwd = yield self.config.get("pass") url = 'https://www.crunchyroll.com/?a=formhandler' headers = Headers({ 'Content-Type': ['application/x-www-form-urlencoded'], 'Referer': ['https://www.crunchyroll.com'], 'User-Agent': ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:17.0) Gecko/17.0 Firefox/17.0'] }) data = FileBodyProducer(StringIO(urllib.urlencode({ 'formname': 'RpcApiUser_Login', 'next_url': '', 'fail_url': '/login', 'name': user, 'password': passwd }))) response = yield self.agent.request("POST", url, headers, data) self.logged_in = True @inlineCallbacks def cache(self): if not self.logged_in: return body = "" for PAGE in range(3): # Pages 0, 1, 2 - Should be plenty response = yield self.agent.request("GET","http://www.crunchyroll.com/videos/anime/simulcasts/ajax_page?pg={:d}".format(PAGE)) body += yield self.master.modules["utils"].returnBody(response) soup = BeautifulSoup(body, from_encoding="utf8") epoch = datetime(1970,1,1) shows = {} deferreds = [] for element in soup("a", token="shows-portraits"): key = element["href"].lstrip("/") name = element["title"] shows[name] = {} response = yield self.agent.request("GET", "http://www.crunchyroll.com/{}.rss".format(key)) body = yield self.master.modules["utils"].returnBody(response) xml = BeautifulSoup(body, "xml", from_encoding="utf8") for item in xml("item"): airtime = item.find("premiumPubDate") episode = item.find("episodeNumber") title = item.find("episodeTitle") duration = item.find("duration") link = item.find("link") media_id = item.find("mediaId") if not airtime or not episode or not link or not media_id: continue airtime = int((datetime.strptime(airtime.string, "%a, %d %b %Y %H:%M:%S %Z") - epoch).total_seconds()) try: e = int(episode.string) key = "{:02d}".format(e) except: key = episode.string e = 0 episode = e title = title.string if title and title.string else u"" duration = int(duration.string) if duration else 0 link = link.string media_id = media_id.string duration = "{:d}:{:02d}".format(duration / 60, duration % 60) if not link or not media_id: continue shows[name][key] = { "series": name, "episode": episode, "title": title, "duration": duration, "airtime": airtime, "link": link, "media_id": media_id } for series, episodes in shows.items(): if series not in self.shows: self.log(u"Found {} (Series) [{:,d} episodes]", series, len(shows[series])) continue for episode, show in episodes.items(): if episode not in self.shows[series]: self.log(u"Found {} #{}", series, episode) elif self.shows[series][episode] != show: self.log(u"Updated {} #{}", series, episode) self.shows = shows def resolve(self, name): exception = self.master.modules["commands"].exception matches = [] if not name: raise exception(u"Show name not specified.") name = name.lower() for s in self.shows.keys(): if s.lower() == name: return self.nameToObject(s) if s.lower().count(name): matches.append(s) if len(matches) > 1: if len(matches) > 5: extra = "and {:d} more.".format(len(matches) - 5) matches = matches[:5] + [extra] raise exception(u"Show name not specific, found: {}".format(u", ".join(matches))) elif not matches: raise exception(u"Show name not found.") return self.nameToObject(matches[0]) def nameToObject(self, name): if name not in self.shows: return None data = self.shows[name] episodes = {} for k, v in data.items(): episodes[k] = EpisodeObject(**v) return ShowObject(name, episodes) @inlineCallbacks def rip(self, guid, show, quality, video, subs): exception = self.master.modules["commands"].exception if not self.logged_in: raise exception(u"Not logged in to CrunchyRoll") if quality not in qualities: raise exception(u"Invalid quality, must be one of: {}".format(u", ".join(qualities.keys()))) filename = u"[CR] {} - {:02d} [{}p]".format(show.series, show.episode, quality).replace(u":", u":").replace(u"/", u" \u2044 ") format = qualities[quality] url = xml_url.format(show.media_id, format[0], format[1]) headers = Headers({ 'Content-Type': ['application/x-www-form-urlencoded'], 'Referer': ['https://www.crunchyroll.com'], 'User-Agent': ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:17.0) Gecko/17.0 Firefox/17.0'] }) data = FileBodyProducer(StringIO(urllib.urlencode({ 'current_page': show.link }))) response = yield self.agent.request("POST", url, headers, data) xml = yield self.master.modules["utils"].returnBody(response) soup = BeautifulSoup(xml, from_encoding="utf8") player_url = soup.find('default:chromelessplayerurl').string stream_info = soup.find('stream_info') subtitles = soup.find('subtitles') if not stream_info: raise exception(u"Could not parse XML") stream = {} stream['url'] = stream_info.host.string stream['token'] = stream_info.token.string stream['file'] = stream_info.file.string stream['swf_url'] = swf_url+player_revision+"/"+player_url if subs: if not subtitles: raise exception(u"Could not find subtitles") decoded = Decoder(xml) formatted = decoded.fancy with open(os.path.join(guid, filename.encode("utf8") + ".ass"), 'wb') as subfile: subfile.write(codecs.BOM_UTF8) subfile.write(formatted.encode('utf-8')) yield self.master.modules["ftp"].put(guid, filename+".ass") if video: parsed_url = urlparse.urlparse(stream['url']) if parsed_url.netloc.endswith("fplive.net"): ### START NEW CDN RIP & CONVERT ### inner_path, _, args = parsed_url.path.partition("?") if not args and parsed_url.query: args = parsed_url.query elif parsed_url.query: args += "&" + parsed_url.query ddl_url = "http://v.lvlt.crcdn.net{}/{}?{}".format(inner_path, stream['file'][4:], args) response = yield self.agent.request("GET", ddl_url) if response.code != 200: self.log(u"DDL URL: {}".format(ddl_url)) self.log(u"RESPONSE CODE: {:d}".format(response.code)) raise exception(u"Failed to download FLV") try: with open(os.path.join(guid, filename.encode("utf8") + '.mp4'), "wb") as f: yield treq.collect(response, f.write) except Exception as e: self.err(u"Failed to download FLV") raise exception(u"Failed to download FLV") mkvmergeargs = ["-o", os.path.join(guid, filename.encode("utf8") + ".mkv"), os.path.join(guid, filename.encode("utf8") + ".mp4")] out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("mkvmerge"), args=mkvmergeargs, env=os.environ) if code == 2: raise exception(u"Failed to mux MKV") ### END NEW CDN RIP & CONVERT ### else: ### START OLD CDN RIP & CONVERT ### rtmpargs = ["-e", "-r", stream['url'], "-y", stream['file'], "-W", stream['swf_url'], "-T", stream['token'], "-o", os.path.join(guid, filename.encode("utf8") + '.flv')] retries = 15 out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("rtmpdump"), args=rtmpargs, env=os.environ) while code == 2 and retries: retries -= 1 out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("rtmpdump"), args=rtmpargs, env=os.environ) if code != 0: self.log(u"RTMPDUMP CMDLINE:\nrtmpdump " + u" ".join(rtmpargs)) self.log(u"RTMPDUMP STDOUT:\n" + out) self.log(u"RTMPDUMP STDERR:\n" + err) raise exception(u"Failed to download FLV") try: self.master.modules["flv"].FLVFile(os.path.join(guid, filename.encode("utf8") + ".flv")).ExtractStreams(True, True, True, True) except: self.err(u"FLVFile failed to extract streams") raise exception(u"Failed to extract streams from FLV") mkvmergeargs = ["-o", os.path.join(guid, filename.encode("utf8") + ".mkv"), "--forced-track","0:yes","--compression","0:none","--timecodes","0:"+os.path.join(guid, filename.encode("utf8") + ".txt"),"-d","0","-A","-S",os.path.join(guid, filename.encode("utf8") + ".264"), "--forced-track","0:yes","-a","0","-D","-S",os.path.join(guid, filename.encode("utf8") + ".aac")] out, err, code = yield getProcessOutputAndValue(self.master.modules["utils"].getPath("mkvmerge"), args=mkvmergeargs, env=os.environ) if code == 2: raise exception(u"Failed to mux MKV") ### END OLD CDN RIP & CONVERT ### yield self.master.modules["ftp"].put(guid, filename+".mkv") yield self.master.modules["ftp"].upload()
class MetricServiceRequest(object): """ A convience class for fetching metrics from CentralQuery that can be used by twisted daemons. """ # use a shared cookie jar so all Metric requests can share the same session cookieJar = CookieJar() def __init__(self, userAgent): self._aggMapping = AGGREGATION_MAPPING urlstart = getGlobalConfiguration().get('metric-url', 'http://localhost:8080') self._metric_url = '%s/%s' % (urlstart, METRIC_URL_PATH) self._metric_url_v2 = '%s/%s' % (urlstart, WILDCARD_URL_PATH) creds = IAuthorizationTool(None).extractGlobalConfCredentials() auth = base64.b64encode('{login}:{password}'.format(**creds)) self.agent = CookieAgent( Agent(reactor, pool=getPool(), connectTimeout=30), self.cookieJar) self._headers = Headers({ 'Authorization': ['basic %s' % auth], 'content-type': ['application/json'], 'User-Agent': ['Zenoss: %s' % userAgent] }) self.onMetricsFetched = None def getMetrics(self, uuid, dpNames, cf='AVERAGE', rate=False, downsample="1h-avg", start=None, end=None, deviceId=None, returnSet="EXACT"): metrics = [] if isinstance(dpNames, basestring): dpNames = [dpNames] for dpName in dpNames: # TODO find callers name = ensure_prefix(deviceId, dpName) metrics.append( dict(metric=name, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), rpn='', rate=rate, format='%.2lf', tags=dict(contextUUID=[uuid]), name='%s_%s' % (uuid, dpName))) request = dict(returnset=returnSet, start=start, end=end, downsample=downsample, metrics=metrics) body = FileBodyProducer(StringIO(json.dumps(request))) d = self.agent.request('POST', self._metric_url, self._headers, body) return d def fetchMetrics(self, metrics, start="1h-ago", end=None, returnSet="EXACT"): """ Uses the CentralQuery V2 api to fetch metrics. Mainly that means wild cards can be used to fetch all metrics with the same name grouped by a tag. Usually used to retrieve a specific metric for all component on a device :param metrics: dictionary with required keys of metricName, tags and optional rpn defaults to empty, cf defatults to average, rate defaults to false, downsample defaults to 5m-avg :param start: :param end: :param returnSet: :return: deferred """ metricQueries = [] for metric in metrics: log.info("fetchMetrics metrics %s", metric) cf = metric.get('cf', 'average') rpn = metric.get('rpn', '') rate = metric.get('rate', False) tags = metric['tags'] downsample = metric.get('downsample', '5m-avg') metricName = metric['metricName'] metricQueries.append( dict(metric=metricName, downsample=downsample, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), rpn=rpn, rate=rate, format='%.2lf', tags=tags, name=metricName)) request = dict(returnset=returnSet, start=start, end=end, downsample=downsample, queries=metricQueries) body = FileBodyProducer(StringIO(json.dumps(request))) log.info("POST %s %s %s", self._metric_url_v2, self._headers, json.dumps(request)) d = self.agent.request('POST', self._metric_url_v2, self._headers, body) return d
class BayeuxMessageSender(object): """Responsible for sending messages to the bayeux server from the client. Attributes: agent: The twisted agent to use to send the data client_id: The client id to use when sending messages msg_id: A message id counter server: The bayeux server to send messages to receiver: The message receiver """ def __init__(self, server, receiver, extra_headers=None): """Initialize the message sender. Args: server: The bayeux server to send messages to receiver: The message receiver to pass the responses to """ self.cookie_jar = CookieJar() self.agent = CookieAgent(Agent(reactor, pool=HTTPConnectionPool(reactor)), self.cookie_jar) self.extra_headers = extra_headers self.client_id = -1 #Will be set upon receipt of the handshake response self.msg_id = 0 self.server = server self.receiver = receiver def connect(self, errback=None): """Sends a connect request message to the server Args: errback: Optional callback issued if there is an error during sending. """ message = '{{"channel":"{0}","clientId":"{1}","id":"{2}",\ "connectionType":"long-polling"}}'.format( bayeux_constants.CONNECT_CHANNEL, self.client_id, self.get_next_id()) logging.debug('connect: %s' % message) self.send_message(message, errback) def disconnect(self, errback=None): """Sends a disconnect request message to the server. Args: errback: Optional callback issued if there is an error during sending. """ message = '{{"channel":"{0}","clientId":"{1}","id":"{2}"}}'.format( bayeux_constants.DISCONNECT_CHANNEL, self.client_id, self.get_next_id()) logging.debug('disconnect: %s' % message) self.send_message(message, errback) def get_next_id(self): """Increments and returns the next msg id to use. Returns: The next message id to use """ self.msg_id += 1 return self.msg_id def handshake(self, errback=None): """Sends a handshake request to the server. Args: errback: Optional callback issued if there is an error during sending. """ message = '''[{{"channel":"{0}","id":"{1}", "supportedConnectionTypes":["long-polling"], "version":"1.0","minimumVersion":"1.0"}}]'''.format( bayeux_constants.HANDSHAKE_CHANNEL, self.get_next_id()) logging.debug('handshake: %s' % message) self.send_message(message, errback) def send_message(self, message, errback=None): """Helper method to send a message. Args: message: The message to send errback: Optional callback issued if there is an error during sending. """ def do_send(): headers = { 'Content-Type': ['application/json'] } if self.extra_headers: headers.update(self.extra_headers) logging.debug("Request Headers: %s", headers) d = self.agent.request('POST', self.server, Headers(headers), BayeuxProducer(str(message)) ) def cb(response): response.deliverBody(self.receiver) return d def error(reason): logging.error('Error sending msg: %s' % reason) logging.error(reason.getErrorMessage()) #logging.debug(reason.value.reasons[0].printTraceback()) if errback is not None: errback(reason) d.addCallback(cb) d.addErrback(error) #Make sure that our send happens on the reactor thread reactor.callFromThread(do_send) def set_client_id(self, client_id): """Sets the client id to use for request messages that are sent. The client id is embedded in all request messages sent by the client to the server. This must be set prior to sending any request other than a handshake request. The client id is returned by the server in response to a handshake request. Args: client_id: The client id to use when sending requests to the server """ self.client_id = client_id def subscribe(self, subscription, errback=None): """Sends a subscribe request to the server. Args: subscription: The subscription path (e.g. '/foo/bar') errback: Optional callback issued if there is an error during sending """ message = '{{"channel":"{0}","clientId":"{1}","id":"{2}",\ "subscription":"{3}"}}'.format( bayeux_constants.SUBSCRIBE_CHANNEL, self.client_id, self.get_next_id(), subscription) logging.debug('subscribe: %s' % message) self.send_message(message, errback) def unsubscribe(self, subscription, errback=None): """Sends an unsubscribe request to the server. Args: subscription: The subscription path (e.g. '/foo/bar') errback: Optional callback issued if there is an error during sending """ message = '{{"channel":"{0}","clientId":"{1}","id":"{2}",\ "subscriptions":"{3}"}}'.format( bayeux_constants.UNSUBSCRIBE_CHANNEL, self.clientId, self.get_next_id(), subscriptions) logging.debug('unsubscribe: %s' % message) self.send_message(message, errback)
class BayeuxMessageSender(object): """Responsible for sending messages to the bayeux server from the client. Attributes: agent: The twisted agent to use to send the data client_id: The client id to use when sending messages msg_id: A message id counter server: The bayeux server to send messages to receiver: The message receiver oauth_header: if authorization is required """ def __init__(self, server, receiver, oauth_header=None): """Initialize the message sender. Args: server: The bayeux server to send messages to receiver: The message receiver to pass the responses to """ self.cookie_jar = CookieJar() self.agent = CookieAgent( Agent(reactor, pool=HTTPConnectionPool(reactor)), self.cookie_jar) self.client_id = -1 #Will be set upon receipt of the handshake response self.msg_id = 0 self.server = server self.receiver = receiver self.oauth_header = oauth_header def connect(self, errback=None): """Sends a connect request message to the server Args: errback: Optional callback issued if there is an error during sending. """ message = 'message={{"channel":"{0}","clientId":"{1}","id":"{2}",\ "connectionType":"long-polling"}}'.format( bayeux_constants.CONNECT_CHANNEL, self.client_id, self.get_next_id()) logging.debug('connect: %s' % message) self.send_message(message, errback) def disconnect(self, errback=None): """Sends a disconnect request message to the server. Args: errback: Optional callback issued if there is an error during sending. """ message = 'message={{"channel":"{0}","clientId":"{1}","id":"{2}"}}'.format( bayeux_constants.DISCONNECT_CHANNEL, self.client_id, self.get_next_id()) logging.debug('disconnect: %s' % message) self.send_message(message, errback) def get_next_id(self): """Increments and returns the next msg id to use. Returns: The next message id to use """ self.msg_id += 1 return self.msg_id def handshake(self, errback=None): """Sends a handshake request to the server. Args: errback: Optional callback issued if there is an error during sending. """ message = '''message={{"channel":"{0}","id":"{1}", "supportedConnectionTypes":["callback-polling", "long-polling"], "version":"1.0","minimumVersion":"1.0"}}'''.format( bayeux_constants.HANDSHAKE_CHANNEL, self.get_next_id()) logging.debug('handshake: %s' % message) self.send_message(message, errback) def send_message(self, message, errback=None): """Helper method to send a message. Args: message: The message to send errback: Optional callback issued if there is an error during sending. """ def do_send(): headers_dict = { 'Content-Type': ['application/x-www-form-urlencoded'], 'Host': [self.server[8:]] } if not self.oauth_header is None: headers_dict['Authorization'] = [self.oauth_header] logging.debug("headers dictionary: %s", headers_dict) logging.debug("message: %s", str(message)) headers = Headers(headers_dict) logging.debug("headers object:") for header in headers.getAllRawHeaders(): logging.debug("> %s", header) d = self.agent.request('POST', self.server, headers, BayeuxProducer(str(message))) logging.debug("send_message.do_send(): d object:\n%s", str(d)) def cb(response): logging.debug( "send_message.do_send.cb(): response version: %s", response.version) logging.debug("send_message.do_send.cb(): response code: %s", response.code) logging.debug("send_message.do_send.cb(): response phrase: %s", response.phrase) logging.debug( "send_message.do_send.cb(): response headers:\n%s", pprint.pformat(list(response.headers.getAllRawHeaders()))) response.deliverBody(self.receiver) return d def error(reason): logging.error('Error sending msg: %s' % reason) logging.error(reason.getErrorMessage()) logging.debug(reason.value.reasons[0].printTraceback()) if errback is not None: errback(reason) d.addCallback(cb) d.addErrback(error) #Make sure that our send happens on the reactor thread reactor.callFromThread(do_send) def set_client_id(self, client_id): """Sets the client id to use for request messages that are sent. The client id is embedded in all request messages sent by the client to the server. This must be set prior to sending any request other than a handshake request. The client id is returned by the server in response to a handshake request. Args: client_id: The client id to use when sending requests to the server """ self.client_id = client_id def subscribe(self, subscription, errback=None): """Sends a subscribe request to the server. Args: subscription: The subscription path (e.g. '/foo/bar') errback: Optional callback issued if there is an error during sending """ message = 'message={{"channel":"{0}","clientId":"{1}","id":"{2}",\ "subscription":"{3}"}}'.format(bayeux_constants.SUBSCRIBE_CHANNEL, self.client_id, self.get_next_id(), subscription) logging.debug('subscribe: %s' % message) self.send_message(message, errback) def unsubscribe(self, subscription, errback=None): """Sends an unsubscribe request to the server. Args: subscription: The subscription path (e.g. '/foo/bar') errback: Optional callback issued if there is an error during sending """ message = 'message={{"channel":"{0}","clientId":"{1}","id":"{2}",\ "subscriptions":"{3}"}}'.format( bayeux_constants.UNSUBSCRIBE_CHANNEL, self.clientId, self.get_next_id(), subscription) logging.debug('unsubscribe: %s' % message) self.send_message(message, errback)
def request(self, method, url, **kwargs): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, unicode): parsed_url = URL.from_text(url) else: parsed_url = URL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, unicode)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') # since json=None needs to be serialized as 'null', we need to # explicitly check kwargs for this key has_json = 'json' in kwargs if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = str(uuid.uuid4()).encode('ascii') headers.setRawHeaders( b'content-type', [b'multipart/form-data; boundary=' + boundary]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer(data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders(b'content-type', [b'application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = self._data_to_body_producer(data) elif has_json: # If data is sent as json, set Content-Type as 'application/json' headers.setRawHeaders(b'content-type', [b'application/json; charset=UTF-8']) content = kwargs['json'] json = json_dumps(content, separators=(u',', u':')).encode('utf-8') bodyProducer = self._data_to_body_producer(json) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): if kwargs.get('browser_like_redirects', False): wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
class HttpPostPublisher(BasePublisher): """ Publish metrics via HTTP POST """ def __init__(self, username, password, url='https://localhost:8443/api/metrics/store', buflen=defaultMetricBufferSize, pubfreq=defaultPublishFrequency): super(HttpPostPublisher, self).__init__(buflen, pubfreq) self._username = username self._password = password self._needsAuth = False self._authenticated = False if self._username: self._needsAuth = True self._cookieJar = CookieJar() self._agent = CookieAgent(Agent(reactor), self._cookieJar) self._url = url self._agent_suffix = os.path.basename( sys.argv[0].rstrip(".py")) if sys.argv[0] else "python" reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) def _metrics_published(self, response, llen, remaining=0): if response.code != 200: if response.code == UNAUTHORIZED: self._authenticated = False self._cookieJar.clear() raise IOError("Expected HTTP 200, but received %d from %s" % (response.code, self._url)) if self._needsAuth: self._authenticated = True log.debug("published %d metrics and received response: %s", llen, response.code) finished = defer.Deferred() response.deliverBody(ResponseReceiver(finished)) if remaining: reactor.callLater(0, self._put, False) return finished def _response_finished(self, result): # The most likely result is the HTTP response from a successful POST, # which should be JSON formatted. if isinstance(result, str): log.debug("response was: %s", json.loads(result)) # We could be called back because _publish_failed was called before us elif isinstance(result, int): log.info("queue still contains %d metrics", result) # Or something strange could have happend else: log.warn("Unexpected result: %s", result) def _shutdown(self): log.debug('shutting down [publishing]') if len(self._mq): self._make_request() def _make_request(self): metrics = [] for x in xrange(HTTP_BATCH): if not self._mq: break metrics.append(self._mq.popleft()) if not metrics: return defer.succeed(None) serialized_metrics = json.dumps({"metrics": metrics}) body_writer = StringProducer(serialized_metrics) headers = Headers({ 'User-Agent': ['Zenoss Metric Publisher: %s' % self._agent_suffix], 'Content-Type': ['application/json'] }) if self._needsAuth and not self._authenticated: log.info("Adding auth for metric http post %s", self._url) headers.addRawHeader( 'Authorization', basic_auth_string_content(self._username, self._password)) d = self._agent.request('POST', self._url, headers, body_writer) d.addCallbacks(self._metrics_published, errback=self._publish_failed, callbackArgs=[len(metrics), len(self._mq)], errbackArgs=[metrics]) d.addCallbacks(self._response_finished, errback=self._publish_failed, errbackArgs=[metrics]) return d def _put(self, scheduled): """ Push the buffer of metrics to the specified Redis channel @param scheduled: scheduled invocation? """ if scheduled: self._reschedule_pubtask(scheduled) if len(self._mq) == 0: return defer.succeed(0) log.debug('trying to publish %d metrics', len(self._mq)) return self._make_request()
class CouchDB(object): """ CouchDB client: hold methods for accessing a couchDB. """ def __init__(self, host, port=5984, dbName=None, username=None, password=None, protocol='http', disable_log=False, version=(1, 0, 1), cache=None): """ Initialize the client for given host. @param host: address of the server. @type host: C{str} @param port: if specified, the port of the server. @type port: C{int} @param dbName: if specified, all calls needing a database name will use this one by default. Note that only lowercase characters (a-z), digits (0-9), or any of the characters _, $, (, ), +, -, and / are allowed. @type dbName: C{str} @param username: the username @type username: C{unicode} @param password: the password @type password: C{unicode} """ if disable_log: # since this is the db layer, and we generate a lot of logs, # let people disable them completely if they want to. levels = ['trace', 'debug', 'info', 'warn', 'error', 'exception'] class FakeLog(object): pass def nullfn(self, *a, **k): pass self.log = FakeLog() for level in levels: self.log.__dict__[level] = types.Methodtype(nullfn, self.log) else: self.log = logging.getLogger('paisley') from twisted.internet import reactor # t.w.c imports reactor from twisted.web.client import Agent try: from twisted.web.client import CookieAgent self.log.debug('using twisted.web.client.CookieAgent') except: from paisley.tcompat import CookieAgent self.log.debug('using paisley.tcompat.CookieAgent') agent = Agent(reactor) self.client = CookieAgent(agent, http.cookiejar.CookieJar()) self.host = host self.port = int(port) self.username = username self.password = password self._cache = cache self._authenticator = None self._authLC = None # looping call to keep us authenticated self._session = {} self.url_template = "%s://%s:%s%%s" % (protocol, self.host, self.port) if dbName is not None: self.bindToDB(dbName) self.log.debug("[%s%s:%s/%s] init new db client", '%s@' % (username, ) if username else '', host, port, dbName if dbName else '') self.version = version def parseResult(self, result): """ Parse JSON result from the DB. """ return json.loads(result) def bindToDB(self, dbName): """ Bind all operations asking for a DB name to the given DB. """ for methname in ["createDB", "deleteDB", "infoDB", "listDoc", "openDoc", "saveDoc", "deleteDoc", "openView", "tempView"]: method = getattr(self, methname) newMethod = partial(method, dbName) setattr(self, methname, newMethod) # Database operations def createDB(self, dbName): """ Creates a new database on the server. @type dbName: str """ # Responses: {u'ok': True}, 409 Conflict, 500 Internal Server Error, # 401 Unauthorized # 400 {"error":"illegal_database_name","reason":"Only lowercase # characters (a-z), digits (0-9), and any of the characters _, $, (, # ), +, -, and / are allowed. Must begin with a letter."} return self.put("/%s/" % (_namequote(dbName), ), "", descr='CreateDB' ).addCallback(self.parseResult) def cleanDB(self, dbName): """ Clean old view indexes for the database on the server. @type dbName: str """ # Responses: 200, 404 Object Not Found return self.post("/%s/_view_cleanup" % (_namequote(dbName), ), "", descr='cleanDB' ).addCallback(self.parseResult) def compactDB(self, dbName): """ Compacts the database on the server. @type dbName: str """ # Responses: 202 Accepted, 404 Object Not Found return self.post("/%s/_compact" % (_namequote(dbName), ), "", descr='compactDB' ).addCallback(self.parseResult) def compactDesignDB(self, dbName, designName): """ Compacts the database on the server. @type dbName: str @type designName: str """ # Responses: 202 Accepted, 404 Object Not Found return self.post("/%s/_compact/%s" % (_namequote(dbName), designName), "", descr='compactDesignDB' ).addCallback(self.parseResult) def deleteDB(self, dbName): """ Deletes the database on the server. @type dbName: str """ # Responses: {u'ok': True}, 404 Object Not Found return self.delete("/%s/" % (_namequote(dbName), ) ).addCallback(self.parseResult) def listDB(self): """ List the databases on the server. """ # Responses: list of db names return self.get("/_all_dbs", descr='listDB').addCallback( self.parseResult) def getVersion(self): """ Returns the couchDB version. """ # Responses: {u'couchdb': u'Welcome', u'version': u'1.1.0'} # Responses: {u'couchdb': u'Welcome', u'version': u'1.1.1a1162549'} d = self.get("/", descr='version').addCallback(self.parseResult) def cacheVersion(result): self.version = self._parseVersion(result['version']) return result return d.addCallback(cacheVersion) def _parseVersion(self, versionString): def onlyInt(part): import re intRegexp = re.compile("^(\d+)") m = intRegexp.search(part) if not m: return None return int(m.expand('\\1')) ret = tuple(onlyInt(_) for _ in versionString.split('.')) return ret def infoDB(self, dbName): """ Returns info about the couchDB. """ # Responses: {u'update_seq': 0, u'db_name': u'mydb', u'doc_count': 0} # 404 Object Not Found return self.get("/%s/" % (_namequote(dbName), ), descr='infoDB' ).addCallback(self.parseResult) # Document operations def listDoc(self, dbName, reverse=False, startkey=None, endkey=None, include_docs=False, limit=-1, **obsolete): """ List all documents in a given database. """ # Responses: {u'rows': [{u'_rev': -1825937535, u'_id': u'mydoc'}], # u'view': u'_all_docs'}, 404 Object Not Found import warnings if 'count' in obsolete: warnings.warn("listDoc 'count' parameter has been renamed to " "'limit' to reflect changing couchDB api", DeprecationWarning) limit = obsolete.pop('count') if obsolete: raise AttributeError("Unknown attribute(s): %r" % ( obsolete.keys(), )) uri = "/%s/_all_docs" % (_namequote(dbName), ) args = {} if reverse: args["reverse"] = "true" if startkey: args["startkey"] = json.dumps(startkey) if endkey: args["endkey"] = json.dumps(endkey) if include_docs: args["include_docs"] = True if limit >= 0: args["limit"] = int(limit) if args: uri += "?%s" % (urlencode(args), ) return self.get(uri, descr='listDoc').addCallback(self.parseResult) def openDoc(self, dbName, docId, revision=None, full=False, attachment=""): """ Open a document in a given database. @type docId: C{unicode} @param revision: if specified, the revision of the document desired. @type revision: C{unicode} @param full: if specified, return the list of all the revisions of the document, along with the document itself. @type full: C{bool} @param attachment: if specified, return the named attachment from the document. @type attachment: C{str} """ # Responses: {u'_rev': -1825937535, u'_id': u'mydoc', ...} # 404 Object Not Found docIdUri = docId.encode('utf-8') # on special url's like _design and _local no slash encoding is needed, # and doing so would hit a 301 redirect if not docIdUri.startswith('_'): docIdUri = _namequote(docIdUri) uri = "/%s/%s" % (_namequote(dbName), docIdUri) if revision is not None: uri += "?%s" % (urlencode({"rev": revision.encode('utf-8')}), ) elif full: uri += "?%s" % (urlencode({"full": "true"}), ) elif attachment: uri += "/%s" % quote(attachment) # No parsing return self.get(uri, descr='openDoc', isJson=False) # just the document if self._cache: try: return self._cache.get(docId) except: pass return self.get(uri, descr='openDoc').addCallback( self.parseResult).addCallback( self._cacheResult, docId) def _cacheResult(self, value, docId): if self._cache: self._cache.store(docId, value) return value def addAttachments(self, document, attachments): """ Add attachments to a document, before sending it to the DB. @param document: the document to modify. @type document: C{dict} @param attachments: the attachments to add. @type attachments: C{dict} """ document.setdefault("_attachments", {}) for name, data in attachments.items(): data = b64encode(data) document["_attachments"][name] = {"type": "base64", "data": data} def saveDoc(self, dbName, body, docId=None): """ Save/create a document to/in a given database. @param dbName: identifier of the database. @type dbName: C{str} @param body: content of the document. @type body: C{str} or any structured object @param docId: if specified, the identifier to be used in the database. @type docId: C{unicode} """ # Responses: {'rev': '1-9dd776365618752ddfaf79d9079edf84', # 'ok': True, 'id': '198abfee8852816bc112992564000295'} # 404 Object not found (if database does not exist) # 409 Conflict, 500 Internal Server Error if not isinstance(body, str): body = json.dumps(body) if docId is not None: d = self.put("/%s/%s" % (_namequote(dbName), _namequote(docId.encode('utf-8'))), body, descr='saveDoc') else: d = self.post("/%s/" % (_namequote(dbName), ), body, descr='saveDoc') return d.addCallback(self.parseResult) def deleteDoc(self, dbName, docId, revision): """ Delete a document on given database. @param dbName: identifier of the database. @type dbName: C{str} @param docId: the document identifier to be used in the database. @type docId: C{unicode} @param revision: the revision of the document to delete. @type revision: C{unicode} """ # Responses: {u'_rev': 1469561101, u'ok': True} # 500 Internal Server Error return self.delete("/%s/%s?%s" % ( _namequote(dbName), _namequote(docId.encode('utf-8')), urlencode({'rev': revision.encode('utf-8')}))).addCallback( self.parseResult) # View operations def openView(self, dbName, docId, viewId, **kwargs): """ Open a view of a document in a given database. """ # Responses: # 500 Internal Server Error (illegal database name) def buildUri(dbName=dbName, docId=docId, viewId=viewId, kwargs=kwargs): return "/%s/_design/%s/_view/%s?%s" % ( _namequote(dbName), _namequote(docId.encode('utf-8')), viewId, urlencode(kwargs)) # if there is a "keys" argument, remove it from the kwargs # dictionary now so that it doesn't get double JSON-encoded body = None if "keys" in kwargs: body = json.dumps({"keys": kwargs.pop("keys")}) # encode the rest of the values with JSON for use as query # arguments in the URI for k, v in kwargs.items(): if k == 'keys': # we do this below, for the full body pass else: kwargs[k] = json.dumps(v) # we keep the paisley API, but couchdb uses limit now if 'count' in kwargs: kwargs['limit'] = kwargs.pop('count') # If there's a list of keys to send, POST the # query so that we can upload the keys as the body of # the POST request, otherwise use a GET request if body: return self.post( buildUri(), body=body, descr='openView').addCallback( self.parseResult) else: return self.get( buildUri(), descr='openView').addCallback( self.parseResult) def addViews(self, document, views): """ Add views to a document. @param document: the document to modify. @type document: C{dict} @param views: the views to add. @type views: C{dict} """ document.setdefault("views", {}) for name, data in views.items(): document["views"][name] = data def tempView(self, dbName, view): """ Make a temporary view on the server. """ if not isinstance(view, str): view = json.dumps(view) d = self.post("/%s/_temp_view" % (_namequote(dbName), ), view, descr='tempView') return d.addCallback(self.parseResult) def getSession(self): """ Get a session from the server using the supplied credentials. """ self.log.debug("[%s:%s%s] POST %s", self.host, self.port, '_session', 'getSession') postdata = "name=%s&password=%s" % ( self.username.encode('utf-8'), self.password.encode('utf-8')) self.log.debug("[%s:%s%s] POST data %s", self.host, self.port, '_session', 'getSession') d = self._getPage("/_session", method="POST", postdata=postdata, isJson=False, headers={ 'Content-Type': ['application/x-www-form-urlencodeddata', ], 'Accept': ['*/*', ], }) d.addCallback(self.parseResult) def getSessionCb(result): # save the response of getSession, including roles # {u'ok': True, u'name': u'user/[email protected]', u'roles': [u'xbnjwxg', u'confirmed', u'hoodie:read:user/xbnjwxg', u'hoodie:write:user/xbnjwxg']} self.log.debug("[%s:%s%s] POST result %r", self.host, self.port, '_session', result) self._session = result return result d.addCallback(getSessionCb) return d def getSessionRoles(self): """ @rtype: C{list} of C{unicode} """ if self._session: return self._session['roles'] return [] # Basic http methods def _getPage(self, uri, method="GET", postdata=None, headers=None, isJson=True): """ C{getPage}-like. """ def cb_recv_resp(response): d_resp_recvd = Deferred() content_type = response.headers.getRawHeaders('Content-Type', [''])[0].lower().strip() decode_utf8 = 'charset=utf-8' in content_type or \ content_type == 'application/json' response.deliverBody(ResponseReceiver(d_resp_recvd, decode_utf8=decode_utf8)) return d_resp_recvd.addCallback(cb_process_resp, response) def cb_process_resp(body, response): # twisted.web.error imports reactor from twisted.web import error as tw_error # Emulate HTTPClientFactory and raise t.w.e.Error # and PageRedirect if we have errors. if response.code > 299 and response.code < 400: raise tw_error.PageRedirect(response.code, body) # When POST'ing to replicate, CouchDB can return 404 # instead of 401, with error: unauthorized in the body if response.code in [401, 404]: error = None if response.code == 404: try: b = json.loads(body) error = b['error'] except: pass if response.code == 401 or error == 'unauthorized': if self._authenticator: self.log.debug("401, authenticating") d = self._authenticator.authenticate(self) d.addCallback(lambda _: self._startLC()) d.addCallback(lambda _: self._getPage( uri, method, postdata, headers, isJson)) return d if response.code > 399: raise tw_error.Error(response.code, body) return body url = uri.encode('utf-8') if not headers: headers = {} if isJson: headers["Accept"] = ["application/json"] headers["Content-Type"] = ["application/json"] headers["User-Agent"] = ["paisley"] url = (self.url_template % (uri,)).encode('utf-8') if self.username: headers["Authorization"] = ["Basic %s" % b64encode( "%s:%s" % (self.username, self.password))] body = StringProducer(postdata) if postdata else None d = self.client.request(method, url, Headers(headers), body) d.addCallback(cb_recv_resp) return d def _startLC(self): self.log.debug("startLC") # start a looping call to keep us authenticated with cookies if self._authLC: self._authLC.stop() def loop(): self.log.debug('looping authentication') self.get('') # FIXME: can we query this value instead ? AUTH_WINDOW = 300 # half of default self._authLC = task.LoopingCall(loop) self._authLC.start(AUTH_WINDOW) def get(self, uri, descr='', isJson=True): """ Execute a C{GET} at C{uri}. """ self.log.debug("[%s:%s%s] GET %s", self.host, self.port, short_print(uri), descr) return self._getPage(uri, method="GET", isJson=isJson) def post(self, uri, body, descr=''): """ Execute a C{POST} of C{body} at C{uri}. """ self.log.debug("[%s:%s%s] POST %s: %s", self.host, self.port, short_print(uri), descr, short_print(repr(body))) return self._getPage(uri, method="POST", postdata=body) def put(self, uri, body, descr=''): """ Execute a C{PUT} of C{body} at C{uri}. """ self.log.debug("[%s:%s%s] PUT %s: %s", self.host, self.port, short_print(uri), descr, short_print(repr(body))) return self._getPage(uri, method="PUT", postdata=body) def delete(self, uri, descr=''): """ Execute a C{DELETE} at C{uri}. """ self.log.debug("[%s:%s%s] DELETE %s", self.host, self.port, short_print(uri), descr) return self._getPage(uri, method="DELETE") # map to an object def map(self, dbName, docId, objectFactory, *args, **kwargs): """ @type docId: unicode """ # return cached version if in cache try: return self._cache.getObject(docId) except (KeyError, AttributeError): # KeyError when docId does not exist # AttributeError when we don't have a cache d = self.openDoc(dbName, docId) def cb(doc): obj = objectFactory(*args, **kwargs) obj.fromDict(doc) self.mapped(docId, obj) return obj d.addCallback(cb) return d def mapped(self, key, obj): if self._cache: self._cache.mapped(key, obj)
def command(self, user, channel, msg): if len(msg) < 2: self.msg(channel, "Need a filter and a showname") return name_filter = msg[0] offset = 1 while msg[-1][:2] == "--": arg = msg.pop() if arg == "--previous": offset = 0 show = self.factory.resolve(" ".join(msg[1:]), channel) if show is None: return if not show["folder"]: self.msg(channel, "No FTP folder given for {}".format(show["series"])) return if not show["xdcc_folder"]: self.msg(channel, "No XDCC folder given for {}".format(show["series"])) return episode = show["current_ep"] + offset guid = uuid.uuid4().hex while os.path.exists(guid): guid = uuid.uuid4().hex os.mkdir(guid) # Step 1: Search FTP for complete episode, or premux + xdelta ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.ftp_user, self.factory.config.ftp_pass).connectTCP(self.factory.config.ftp_host, self.factory.config.ftp_port) ftp.changeDirectory("/{}/{:02d}/".format(show["folder"], episode)) filelist = FTPFileListProtocol() yield ftp.list(".", filelist) files = [x["filename"] for x in filelist.files if x["filetype"] != "d"] complete = fnmatch.filter(files, "[[]Commie[]]*{}*.mkv".format(name_filter)) xdelta = fnmatch.filter(files, "*{}*.xdelta".format(name_filter)) premux = fnmatch.filter(files, "*{}*.mkv".format(name_filter)) if complete: # Step 1a: Download completed file if len(complete) > 1: self.msg(channel, "Too many completed files match the filter: {}".format(", ".join(complete))) return else: complete = complete[0] self.notice(user, "Found complete file: {}".format(complete)) complete_len = [x["size"] for x in filelist.files if x["filename"] == complete][0] complete_downloader = Downloader("{}/{}".format(guid, complete)) yield ftp.retrieveFile(complete, complete_downloader) if complete_downloader.done() != complete_len: self.msg(channel, "Aborted releasing {}: Download of complete file had incorrect size.".format(show["series"])) yield ftp.quit() ftp.fail(None) return elif xdelta and premux: # Step 1b: Download premux + xdelta, merge into completed file if len(premux) > 1: self.msg(channel, "Too many premux files match the filter: {}".format(", ".join(premux))) return else: premux = premux[0] if len(xdelta) > 1: self.msg(channel, "Too many xdelta files match the filter: {}".format(", ".join(xdelta))) return else: xdelta = xdelta[0] self.notice(user, "Found xdelta and premux: {} and {}".format(xdelta, premux)) if not os.path.isfile("{}/{}".format(self.factory.config.premux_dir, premux)): premux_len = [x["size"] for x in filelist.files if x["filename"] == premux][0] success = yield cache(self, user, ftp, premux, premux_len) if not success: self.msg(channel, "Aborted releasing {}: Download of premux file had incorrect size.".format(show["series"])) yield ftp.quit() ftp.fail(None) return shutil.copyfile("{}/{}".format(self.factory.config.premux_dir, premux), "{}/{}".format(guid, premux)) xdelta_len = [x["size"] for x in filelist.files if x["filename"] == xdelta][0] xdelta_downloader = Downloader("{}/{}".format(guid, xdelta)) yield ftp.retrieveFile(xdelta, xdelta_downloader) if xdelta_downloader.done() != xdelta_len: self.msg(channel, "Aborted releasing {}: Download of xdelta file had incorrect size.".format(show["series"])) yield ftp.quit() ftp.fail(None) return code = yield getProcessValue(getPath("xdelta3"), args=["-f","-d","{}/{}".format(guid, xdelta)], env=os.environ) if code != 0: self.msg(channel, "Aborted releasing {}: Couldn't merge premux and xdelta.".format(show["series"])) yield ftp.quit() ftp.fail(None) return self.notice(user, "Merged premux and xdelta") complete = fnmatch.filter(os.listdir(guid), "[[]Commie[]]*.mkv") if not complete: self.msg(channel, "No completed file found") return elif len(complete) > 1: self.msg(channel, "Too many completed files found after merging: {}".format(", ".join(complete))) return else: complete = complete[0] if not complete: self.msg(channel, "Aborted releasing {}: Couldn't find completed file after merging.".format(show["series"])) yield ftp.quit() ftp.fail(None) return else: self.msg(channel, "Aborted releasing {}: Couldn't find completed episode.".format(show["series"])) yield ftp.quit() ftp.fail(None) return yield ftp.quit() ftp.fail(None) # Step 1c: Verify CRC crc = complete[-13:-5] # Extract CRC from filename try: with open("{}/{}".format(guid, complete), "rb") as f: calc = "{:08X}".format(binascii.crc32(f.read()) & 0xFFFFFFFF) except: self.msg(channel, "Aborted releasing {}: Couldn't open completed file for CRC verification.".format(show["series"])) return if crc != calc: self.msg(channel, "Aborted releasing {}: CRC failed verification. Filename = '{}', Calculated = '{}'.".format(show["series"], crc, calc)) return # Step 1d: Determine version number match = re.search("(v\d+)", complete) version = match.group(1) if match is not None else "" # Step 2: Create torrent try: torrent = makeTorrent(complete, guid) except: self.msg(channel, "Aborted releasing {}: Couldn't create torrent.".format(show["series"])) raise self.notice(user, "Created torrent") # Step 3: Upload episode to XDCC server try: ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.xdcc_user, self.factory.config.xdcc_pass).connectTCP(self.factory.config.xdcc_host, self.factory.config.xdcc_port) store, finish = ftp.storeFile("./{}/{}/{}".format(self.factory.config.xdcc_folder, show["xdcc_folder"], complete)) sender = yield store with open("{}/{}".format(guid, complete), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish yield ftp.quit() ftp.fail(None) except: self.msg(channel, "Aborted releasing {}: Couldn't upload completed episode to XDCC server.".format(show["series"])) raise self.notice(user, "Uploaded to XDCC") # Step 4: Upload episode to seedbox try: ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.seed_user, self.factory.config.seed_pass).connectTCP(self.factory.config.seed_host, self.factory.config.seed_port) store, finish = ftp.storeFile("./{}/{}".format(self.factory.config.seed_file_folder, complete)) sender = yield store with open("{}/{}".format(guid, complete), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish yield ftp.quit() ftp.fail(None) except: self.msg(channel, "Aborted releasing {}: Couldn't upload completed episode to seedbox.".format(show["series"])) raise self.notice(user, "Uploaded to seedbox") # Step 5: Start seeding torrent try: ftp = yield ClientCreator(reactor, FTPClient, self.factory.config.seed_user, self.factory.config.seed_pass).connectTCP(self.factory.config.seed_host, self.factory.config.seed_port) store, finish = ftp.storeFile("./{}/{}".format(self.factory.config.seed_torrent_folder, torrent)) sender = yield store with open("{}/{}".format(guid, torrent), "rb") as f: sender.transport.write(f.read()) sender.finish() yield finish yield ftp.quit() ftp.fail(None) except: self.msg(channel, "Aborted releasing {}: Couldn't upload torrent to seedbox.".format(show["series"])) raise self.notice(user, "Seeding started") # Step 6: Upload torrent to Nyaa nyaagent = CookieAgent(Agent(reactor), cookielib.CookieJar()) response = yield nyaagent.request("POST","http://www.nyaa.eu/?page=login", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({"loginusername": self.factory.config.nyaa_user,"loginpassword": self.factory.config.nyaa_pass})))) body = yield returnBody(response) if "Login successful" not in body: self.msg(channel, "Aborted releasing {}: Couldn't login to Nyaa.".format(show["series"])) with open("{}/{}".format(guid, "nyaa_login.html"), "wb") as f: f.write(body) return twitter_list = rheinbowify('Follow [url="https://twitter.com/RHExcelion"]@RHExcelion[/url], [url="https://twitter.com/johnnydickpants"]@jdp[/url], and the rest of Commie at [url="https://twitter.com/RHExcelion/commie-devs"]@Commie-Devs[/url].') post_data = MultiPartProducer({"torrent": "{}/{}".format(guid, torrent)},{ "name": complete, "catid": "1_37", "info": "#[email protected]", "description": "Visit us at [url]http://commiesubs.com[/url] for the latest updates and news.\n{}".format(twitter_list), "remake": "0", "anonymous": "0", "hidden": "0", "rules": "1", "submit": "Upload" }) response = yield nyaagent.request("POST","http://www.nyaa.eu/?page=upload", Headers({'Content-Type': ['multipart/form-data; boundary={}'.format(post_data.boundary)]}), post_data) if response.code != 200: nyaa_codes = { 418: "I'm a teapot (You're doing it wrong)", 460: "Missing Announce URL", 461: "Already Exists", 462: "Invalid File", 463: "Missing Data", 520: "Configuration Broken" } self.msg(channel, "Aborted releasing {}: Couldn't upload torrent to Nyaa. Error #{:d}: {}".format(show["series"], response.code, nyaa_codes[response.code])) return self.notice(user, "Uploaded to Nyaa") # Step 7: Get torrent link from Nyaa body = yield returnBody(response) match = re.search("http://www.nyaa.eu/\?page=view&tid=[0-9]+", body) if not match: self.msg(channel, "Aborted releasing {}: Couldn't find torrent link in Nyaa's response.".format(show["series"])) with open("{}/{}".format(guid, "nyaa_submit.html"), "wb") as f: f.write(body) return info_link = match.group(0).replace("&","&") download_link = info_link.replace("view","download") self.notice(user, "Got Nyaa torrent link") # Step 8: Upload torrent link to TT ttagent = CookieAgent(Agent(reactor), cookielib.CookieJar()) response = yield ttagent.request("POST","http://tokyotosho.info/login.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({"username": self.factory.config.tt_user,"password": self.factory.config.tt_pass,"submit": "Submit"})))) body = yield returnBody(response) if "Logged in." not in body: self.msg(channel, "Couldn't login to TT. Continuing to release {} regardless.".format(show["series"])) with open("{}/{}".format(guid, "tt_login.html"), "wb") as f: f.write(body) else: response = yield ttagent.request("POST","http://tokyotosho.info/new.php", Headers({'Content-Type': ['application/x-www-form-urlencoded']}), FileBodyProducer(StringIO(urllib.urlencode({ "type": "1", "url": download_link, "comment": "#[email protected]", "website": "http://www.commiesubs.com/", "send": "Submit New Torrent" })))) body = yield returnBody(response) if "Torrent Submitted" not in body: self.msg(channel, "Couldn't upload torrent to TT. Continuing to release {} regardless.".format(show["series"])) with open("{}/{}".format(guid, "tt_submit.html"), "wb") as f: f.write(body) else: self.notice(user, "Uploaded to TT") # Step 9: Create blog post blog = Proxy("http://commiesubs.com/xmlrpc.php") slug = show["blog_link"].split("/")[-2] categories = ["The Bread Lines"] result = yield blog.callRemote("wp.getTerms", 0, self.factory.config.blog_user, self.factory.config.blog_pass, "category") for term in result: if term["slug"] == slug: categories.append(term["name"]) try: yield blog.callRemote("wp.newPost", 0, # Blog ID self.factory.config.blog_user, # Username self.factory.config.blog_pass, # Password { # Content "post_type": "post", "post_status": "publish", "comment_status": "open", "post_title": "{} {:02d}{}".format(show["series"], episode, version), "post_content": "<a href=\"{}\">Torrent</a>".format(info_link), "terms_names": {"category": categories} } ) self.notice(user, "Created blog post") except: self.msg(channel, "Couldn't create blog post. Continuing to release {} regardless.".format(show["series"])) # Step 10: Mark show finished on showtimes data = yield self.factory.load("show","update", data={"id":show["id"],"method":"next_episode"}) if "status" in data and not data["status"]: self.msg(channel, data["message"]) self.msg(channel, "{} released. Torrent @ {}".format(show["series"], info_link)) # Step 11: Update the topic self.factory.update_topic() # Step 12: Clean up shutil.rmtree(guid, True)
def request(self, method, url, **kwargs): method = method.encode('ascii').upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) if isinstance(url, unicode): url = URL.fromText(url).asURI().asText().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(v, (bytes, unicode)): h.addRawHeader(k, v) elif isinstance(v, list): h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') # since json=None needs to be serialized as 'null', we need to # explicitly check kwargs for this key has_json = 'json' in kwargs if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = str(uuid.uuid4()).encode('ascii') headers.setRawHeaders( b'content-type', [ b'multipart/form-data; boundary=' + boundary]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( b'content-type', [b'application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = self._data_to_body_producer(data) elif has_json: # If data is sent as json, set Content-Type as 'application/json' headers.setRawHeaders( b'content-type', [b'application/json; charset=UTF-8']) content = kwargs['json'] json = json_dumps(content, separators=(u',', u':')).encode('utf-8') bodyProducer = self._data_to_body_producer(json) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): if kwargs.get('browser_like_redirects', False): wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def request( self, method, url, *, params=None, headers=None, data=None, files=None, json=_NOTHING, auth=None, cookies=None, allow_redirects=True, browser_like_redirects=False, unbuffered=False, reactor=None, timeout=None, _stacklevel=2, ): """ See :func:`treq.request()`. """ method = method.encode('ascii').upper() if isinstance(url, DecodedURL): parsed_url = url.encoded_url elif isinstance(url, EncodedURL): parsed_url = url elif isinstance(url, str): # We use hyperlink in lazy mode so that users can pass arbitrary # bytes in the path and querystring. parsed_url = EncodedURL.from_text(url) else: parsed_url = EncodedURL.from_text(url.decode('ascii')) # Join parameters provided in the URL # and the ones passed as argument. if params: parsed_url = parsed_url.replace( query=parsed_url.query + tuple(_coerced_query_params(params))) url = parsed_url.to_uri().to_text().encode('ascii') headers = self._request_headers(headers, _stacklevel + 1) bodyProducer, contentType = self._request_body(data, files, json, stacklevel=_stacklevel + 1) if contentType is not None: headers.setRawHeaders(b'Content-Type', [contentType]) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if allow_redirects: if browser_like_redirects: wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request(method, url, headers=headers, bodyProducer=bodyProducer) if reactor is None: from twisted.internet import reactor if timeout: delayedCall = reactor.callLater(timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not unbuffered: d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
class RestApiTestMixin(object): def start_rest_api(self): root_page = resource.Resource() root_page.putChild('api', make_rest_api()) site = AuthenticatedSite(root_page) site.credentialFactories = [ BasicCredentialFactory("CloudMailing API"), DigestCredentialFactory("md5", "CloudMailing API") ] site.credentialsCheckers = [AdminChecker()] site.sessionFactory = UTSession self.p = reactor.listenTCP(0, site, interface="127.0.0.1") self.port = self.p.getHost().port self.api_base_url = 'http://127.0.0.1:%d/api/' % self.port self.agent = None def stop_rest_api(self): return self.p.stopListening() self.agent = None def log(self, msg): print msg return msg @staticmethod def cb_decode_json(body): return json.loads(body) def call_api(self, verb, url, expected_status_code=http_status.HTTP_200_OK, headers=None, data=None, pre_read_body_cb=None, credentials=None): def cbResponse(response): # print 'Response version:', response.version # print 'Response code:', response.code # print 'Response phrase:', response.phrase # print 'Response headers:' # print pformat(list(response.headers.getAllRawHeaders())) if expected_status_code: self.assertEqual( expected_status_code, response.code, "Bad result code for request '%s %s'" % (verb, url)) return response def cb_load_body(response): d = readBody(response) if response.code != http_status.HTTP_204_NO_CONTENT: d.addCallback(RestApiTestMixin.cb_decode_json) return d _headers = {'User-Agent': ['Twisted Web Client Example']} if credentials is not None: _headers['authorization'] = [ 'basic %s' % base64.encodestring('%s:%s' % credentials) ] if headers: _headers.update(headers) if self.agent is None: self.agent = CookieAgent(Agent(reactor), CookieJar()) body = None if data is not None: body = JsonProducer(data) d = self.agent.request(verb, self.api_base_url + url, Headers(_headers), body) d.addCallback(cbResponse) if pre_read_body_cb: d.addCallback(pre_read_body_cb) d.addCallback(cb_load_body) return d