def do(node_id, msg): if node_id in self.relay_links: relay_link = self.relay_links[node_id] msg = self.build_dht_response(self.serialize_message(msg)) relay_link.protocol.messages_received.put_nowait(msg) return 1 try: # Send a message directly to a node in the "DHT" call = dht_msg_endpoint + "?call=put&" call += urlencode({"dest_node_id": node_id}) + "&" msg = self.serialize_message(msg) call += urlencode({"msg": msg}) + "&" call += urlencode({"node_id": self.node_id}) + "&" call += urlencode({"password": self.password}) + "&" call += urlencode({"list_pop": list_pop}) # Make API call. ret = requests.get(call, timeout=5) self.handles.append(ret) if "success" not in ret.text: return 0 return 1 except Exception as e: # Reschedule call. self.debug_print("DHT PUT TIMED OUT") self.debug_print(e) self.debug_print("Rescheduling DHT PUT") self.debug_print("PUT FAILED") return 0
def do(args): # Requests a mutex from the server. call = dht_msg_endpoint + "?call=find_neighbours&" call += urlencode({"node_id": self.node_id}) + "&" call += urlencode({"password": self.password}) + "&" call += urlencode({"network_id": self.network_id}) # Make API call. ret = requests.get(call, timeout=5).text ret = json.loads(ret) if type(ret) == dict: ret = [ret] # Convert to kademlia neighbours. neighbours = [] for neighbour in ret: if not is_ip_valid(neighbour["ip"]): continue neighbour["port"] = int(neighbour["port"]) if not is_valid_port(neighbour["port"]): continue knode = KadNode(id=binascii.unhexlify( neighbour["id"].encode("ascii")), ip=neighbour["ip"], port=neighbour["port"], can_test=int(neighbour["can_test"])) neighbours.append(knode) self.neighbours = neighbours self.is_neighbours_ready.set() return 0
def put(self, node_id, msg, no=1): self.debug_print("Sim DHT Put " + str(node_id) + ": " + str(msg)) if node_id in self.relay_links: relay_link = self.relay_links[node_id] msg = self.build_dht_response(msg) print("in relay link put") relay_link.protocol.messages_received.put_nowait(msg) return if no >= 300: return else: no += 1 try: # Send a message directly to a node in the "DHT" call = dht_msg_endpoint + "?call=put&" call += urlencode({"node_id": node_id}) + "&" call += urlencode({"msg": str(msg)}) # Make API call. response = requests.get(call, timeout=5).text self.debug_print(response) return except Exception as e: # Reschedule call. self.debug_print("DHT PUT TIMED OUT") self.debug_print(e) time.sleep(1) self.debug_print("Rescheduling DHT PUT") self.put(node_id, msg, no) self.debug_print("PUT FAILED")
def getLoginURL(self, redirect_url): """ Returns the url to redirect the user to for user consent """ oauth_params = {"redirect_uri": self.loginUri, "client_id": self.clientId, "response_type": "code"} if redirect_url is not None: oauth_params["state"] = urlencode(dict(redirect=redirect_url)) oauth_params.update(self.authUriAdditionalParams) return defer.succeed("%s?%s" % (self.authUri, urlencode(oauth_params)))
def do(args): # Requests a mutex from the server. call = dht_msg_endpoint + "?call=last_alive&" call += urlencode({"node_id": self.node_id}) + "&" call += urlencode({"password": self.password}) # Make API call. ret = requests.get(call, timeout=5) return 0
def getLoginURL(self, redirect_url): """ Returns the url to redirect the user to for user consent """ oauth_params = {'redirect_uri': self.loginUri, 'client_id': self.clientId, 'response_type': 'code'} if redirect_url is not None: oauth_params['state'] = urlencode(dict(redirect=redirect_url)) oauth_params.update(self.authUriAdditionalParams) return defer.succeed("%s?%s" % (self.authUri, urlencode(oauth_params)))
def aws_federate(self): logger.debug("Attempting to open AWS console.") creds = { "sessionId": self.credentials["AccessKeyId"], "sessionKey": self.credentials["SecretAccessKey"], "sessionToken": self.credentials["SessionToken"], } query = urlencode({ "Action": "getSigninToken", "Session": json.dumps(creds), }) logger.debug("Web Console params: {}".format(query)) url_tuple = urlparse("https://signin.aws.amazon.com/federation") url = urlunparse(url_tuple._replace(query=query)) try: token = requests.get(url).json() except requests.exceptions.ConnectionError as e: self.exit( "Unable to contact AWS to open web console : {}".format(e)) return None account_id = self.role_arn.split(":")[4] account_alias = get_alias(self.role_map, account_id) role = self.role_arn.split(":")[5].split("/")[-1] issuer_url_query = urlencode({"account": account_alias, "role": role}) issuer_url = urlunparse( ("https", self.issuer_domain, "/", "", issuer_url_query, "")) query = urlencode({ "Action": "login", "Destination": "https://console.aws.amazon.com/", "SigninToken": token["SigninToken"], "Issuer": issuer_url, }) url = urlunparse(url_tuple._replace(query=query)) logger.debug("Web browser console URL: {}".format(url)) if self.print_url: print(url) self.state = "finished" elif self.opened_tab: self.state = "aws_federate" self.web_state["awsFederationUrl"] = url else: self.opened_tab = True webbrowser.open_new_tab(url) self.state = "finished" return url
def upload(self, info, localpath): #{"filename":"3b27551d18f8cdacc63aed191da6b919.jpg","type":"FILE","content_type":"image/jpeg","size":13797} url = '%s/ws/%s/upload/web?%s' % (self._download_root, info['zone'], urlencode(self.params_token)) filename = os.path.basename(localpath) statinfo = os.stat(localpath) data = { "filename": filename, "type": "FILE", "content_type": mimetypes.guess_type(filename)[0], "size": os.path.getsize(localpath) } files = {'files': (filename, open(localpath, "r"))} #files = {'file': open(localpath, "r")} ress = self.session.post( url, data=json.dumps(data)).json() #, files = files) if len(ress) and 'url' in ress[0]: res = ress[0] print("I'm post file %s" % filename) response = requests.options(res['url']) res = requests.post(res['url'], files=files).json() mtime = int(statinfo.st_mtime * 1000) data = { "data": { "signature": res['singleFile']['fileChecksum'], "wrapping_key": res['singleFile']['wrappingKey'], "reference_signature": res['singleFile']['referenceChecksum'], "receipt": res['singleFile']['receipt'], "size": res['singleFile']['size'] }, "command": "add_file", "document_id": None, "path": { "starting_document_id": info['docwsid'], "path": filename }, "allow_conflict": True, "file_flags": { "is_writable": True, "is_executable": False, "is_hidden": False }, "mtime": mtime, "btime": mtime } print("%s" % json.dumps(data)) url = '%s/ws/%s/update/documents?errorBreakdown=true?%s' % ( self._download_root, info['zone'], urlencode( self.params_token)) response = self.session.post(url, data=json.dumps(data))
def do(args): # Requests a mutex from the server. call = dht_msg_endpoint + "?call=get_mutex&" call += urlencode({"node_id": self.node_id}) + "&" call += urlencode({"password": self.password}) # Make API call. ret = requests.get(call, timeout=5).text if "1" in ret or "2" in ret: self.has_mutex = int(ret) self.is_mutex_ready.set() return 0
def do(args): # Requests a mutex from the server. call = dht_msg_endpoint + "?call=get_mutex&" call += urlencode({"node_id": self.node_id}) + "&" call += urlencode({"password": self.password}) # Make API call. ret = requests.get(call, timeout=5).text if "1" in ret or "0" in ret: self.has_mutex = int(ret) self.is_mutex_ready.set() return 0
def get_stream_server_rtmfp(self, server, relayer): stream_info = json.loads(relayer) data = { "name": stream_info.get("name"), "peer": "false", "referer": a2b_hex("6d6f6264726f2e6d65").decode("utf-8"), "casting": "false", "wifi": "false", "token": self.get_token(), "playpath": stream_info.get("playpath"), } for tries in range(10): try: lb_info = self.api_request("/utils/loadbalancer", data) except ValueError: lb_info = {} break except RequestException: lb_info = {} continue try: test = socket.socket() test.settimeout(5) test.connect( (lb_info.get("server"), stream_info.get("port", 80))) break except Exception: lb_info = {} finally: test.close() rtmfp_url = "rtmfp://{0}/{1}".format( lb_info.get("server", stream_info.get("server")), stream_info.get("playpath")) rtmfp_netgroup = stream_info.get("netgroup") fallbackurl = "rtmfp://{0}/{1}".format( lb_info.get("fallbackServer", stream_info.get("server")), stream_info.get("fallbackUrl").replace(" ", ""), ) path = "/stream?{0}".format( urlencode({ "url": rtmfp_url, "netgroup": rtmfp_netgroup, "fallbackurl": fallbackurl })) fallback_path = "/stream?{0}".format(urlencode({"url": fallbackurl})) return (urljoin(server, path), urljoin(server, fallback_path))
def dataset_list(self, **kwargs): """Return relevant information concerning the datasets of your project List all datasets in the Project and create the dataset objects raise ----- Exception Dataset not accessible """ sitools_url = self.url.split("/")[0] + "//" + self.url.split( "//")[1].split("/")[0] kwargs.update({'media': 'json'}) url = self.url + '/datasets' + '?' + urlencode(kwargs) data = [] try: result = load(urlopen(url)) if len(result['data']) != 0: for i, dataset in enumerate(result['data']): ds_url = sitools_url + dataset['url'] data.append(Dataset(ds_url)) except HTTPError: out_mess = ("Error in Project.dataset_list() :\nCannot access dataset list %s" "\nContact [email protected] and report that issue\n" % url) stderr.write(out_mess) raise return data
def test_poll(self): with temporary_dir() as dir: class TestPantsHandler(PantsHandler): def __init__(self, request, client_address, server): # TODO(6071): BaseHTTPServer.BaseHTTPRequestHandler is an old-style class, so we must # invoke its __init__ like this. # This will become unnecessary when we no longer support python2. PantsHandler.__init__( self, settings=ReportingServer.Settings( info_dir=dir, template_dir=dir, assets_dir=dir, root=dir, allowed_clients=['ALL'], ), renderer=None, request=request, client_address=client_address, server=server, ) safe_file_dump(os.path.join(dir, "file"), "hello") with http_server(TestPantsHandler) as port: response = requests.get("http://127.0.0.1:{}/poll?{}".format( port, urlencode({"q": json.dumps([{"id": "0", "path": "file"}])}), )) self.assertEqual(response.json(), {"0": "hello"})
def test_make_seasons_list(self, mock_listitem): responses.add(responses.POST, config.TOKEN_URL, body=json.dumps({'token': 'abcdef'}), status=200) responses.add(responses.GET, config.SEASONS_URL, body=self.SEASONS_JSON, status=200) mock_listitem.side_effect = fakes.FakeListItem mock_plugin = fakes.FakePlugin() with mock.patch.dict('sys.modules', xbmcplugin=mock_plugin): import resources.lib.index as index index.make_seasons_list() expected_url = 'plugin://{addonid}/?{params}'.format( addonid='plugin.video.afl-video', params=unquote_plus( urlencode({ 'season': 'CD_S2020014', 'current_round': 'CD_R202001401', 'name': 'AFL Premiership 2020' }))) observed_url = mock_plugin.directory[0].get('url') expected = urlparse(expected_url) observed = urlparse(observed_url) for x in range(6): if x == 4: self.assertEqual(dict(parse_qsl(expected[x])), dict(parse_qsl(observed[x]))) else: self.assertEqual(expected[x], observed[x])
def get_vod_list(self): list_url = urljoin(self.api_url, "vods.nettv/") post_data = { "user_id": self.user, "check": "5", "version": self.apk_build, "key": self.rapi_key, } post_encoded = urlencode(post_data) + "&" content_length = len(post_encoded) headers = OrderedDict([ ("Referer", self.config["SXNpc2VrZWxvX3Nlc2lzdGltdV95ZXppbm9tYm9sbzAw"]), ("Meta", self.api_key), ("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8"), ("User-Agent", self.user_agent), ("Connection", "Keep-Alive"), ("Accept-Encoding", "gzip"), ("Content-Length", str(content_length)), ]) req = requests.Request("POST", list_url, data=post_encoded) prepped = self.s.prepare_request(req) prepped.headers = headers r = self.s.send(prepped, timeout=5, verify=False) r.raise_for_status() return r.json()
def play(c_id): mytv.update_live_channels() image_headers = {"User-Agent": mytv.user_agent} stream_list = mytv.get_streams_by_channel_id(int(c_id)) if stream_list.count() > 1: select_list = [] for stream in stream_list: select_list.append("Stream {0} {1}".format(stream.token, stream.stream_id)) dialog = xbmcgui.Dialog() ret = dialog.select("Choose Stream", select_list) # if not selected_stream = stream_list[ret] else: selected_stream = stream_list[0] resolved_stream = mytv.resolve_stream(selected_stream) image = xbmc_curl_encode( (selected_stream.livechannel.image_path, image_headers)) title = selected_stream.livechannel.name li = ListItem(title, path=xbmc_curl_encode(resolved_stream)) li.setArt({"thumb": image, "icon": image}) if "playlist.m3u8" in resolved_stream[0]: li.setContentLookup(False) li.setMimeType("application/vnd.apple.mpegurl") if addon.getSetting("inputstream") == "true": if sys.version_info[0] == 2: li.setProperty("inputstreamaddon", "inputstream.adaptive") else: li.setProperty("inputstream", "inputstream.adaptive") li.setProperty("inputstream.adaptive.manifest_type", "hls") li.setProperty("inputstream.adaptive.stream_headers", urlencode(resolved_stream[1])) xbmcplugin.setResolvedUrl(plugin.handle, True, li)
def delete(self): recordName = self._asset_record['recordName'] recordType = self._asset_record['recordType'] recordChangeTag = self._master_record['recordChangeTag'] json_data = ('{"query":{"recordType":"CheckIndexingState"},' '"zoneID":{"zoneName":"PrimarySync"}}') json_data = ('{"operations":[{' '"operationType":"update",' '"record":{' '"recordName":"%s","recordType":"%s",' '"recordChangeTag":"%s",' '"fields":{"isDeleted":{"value":1}' '}}}],' '"zoneID":{' '"zoneName":"PrimarySync"' '},"atomic":true}' % (recordName, recordType, recordChangeTag)) endpoint = self._service._service_endpoint params = urlencode(self._service.params) url = ('%s/records/modify?%s' % (endpoint, params)) return self._service.session.post( url, data=json_data, headers={'Content-type': 'text/plain'} )
def test_get_verification_url(self): # without redirect_url verification_url = get_verification_url(**{ "redirect_url": None, "request": self.custom_request, "verification_key": self.verification_key }) self.assertEqual( verification_url, ('http://testserver/api/v1/profiles/verify_email?' 'verification_key=%s' % self.verification_key), ) # with redirect_url verification_url = get_verification_url(**{ "redirect_url": self.redirect_url, "request": self.custom_request, "verification_key": self.verification_key }) string_query_params = urlencode({ 'verification_key': self.verification_key, 'redirect_url': self.redirect_url }) self.assertEqual( verification_url, ('http://testserver/api/v1/profiles/verify_email?%s' % string_query_params) )
def parse(self, text): """Parses a text with a running GATE server.""" if not self.server: self.__start_server() with open('/dev/shm/text-{}'.format(os.getpid()), 'wt') as outf: print(text, file=outf) url = 'http://{}/process?{}'.format( self.gate_url, urlencode({ 'run': self.modules, 'text': text.encode('utf-8') })) try: reply = self.__send_request(url) if reply: with open('/dev/shm/xml-{}'.format(os.getpid()), 'wb') as outf: outf.write(reply) parsed = self.parser.parse_gate_xml(reply, self.get_anas) if self.restart_every: self.parsed += len(parsed) if self.parsed >= self.restart_every: self.restart_server() return parsed except GateError as ge: self.__stop_server() raise except: self.__stop_server() raise
def build_obtain_token_url(self, client_id, redirect_uri, scope): return "{}/oauth/authorize?{}".format(config['SP_MONEY_URL'], urlencode({ "client_id": client_id, "redirect_uri": redirect_uri, "scope": " ".join(scope) }))
def move_picture_to_recently_deleted(icloud, photo): url = '{}/records/modify?{}'.format(icloud.photos._service_endpoint, urlencode(icloud.photos.params)) headers = {'Content-type': 'text/plain'} mr = {'fields': {'isDeleted': {'value': 1}}} mr['recordChangeTag'] = photo._asset_record['recordChangeTag'] mr['recordName'] = photo._asset_record['recordName'] mr['recordType'] = 'CPLAsset' op = dict( operationType='update', record=mr, ) operations = [] operations.append(op) post_data = json.dumps( dict( atomic=True, desiredKeys=['isDeleted'], operations=operations, zoneID={'zoneName': 'PrimarySync'}, )) icloud.photos.session.post(url, data=post_data, headers=headers).json()
def _build_request_url(self, path, params=None): if params is None: params = {} url = "http://%s%s/%s" % (self.API_SERVER, self.API_BASE, path) if params: url += "?%s" % urlencode(params) return url
def refresh(self, refresh_token=None): """Gets fresh access_token and refresh_token :param refresh_token: Refresh Token :raises ValueError: if Refresh Token value not specified :raises `intuitlib.exceptions.AuthClientError`: if response status != 200 """ token = refresh_token or self.refresh_token if token is None: raise ValueError('Refresh token not specified') headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': get_auth_header(self.client_id, self.client_secret) } body = {'grant_type': 'refresh_token', 'refresh_token': token} send_request('POST', self.token_endpoint, headers, self, body=urlencode(body), session=self)
def url_replace(context, **kwargs): """ This function keeps the web URL with all the GET parameters and only updates the ones specified. """ query = context['request'].GET.dict() query.update(kwargs) return urlencode(query)
def on_task_output(self, task, config): """Mark all accepted episodes as acquired on MyEpisodes""" if not task.accepted: # Nothing accepted, don't do anything return username = config['username'] password = config['password'] cookiejar = http.cookiejar.CookieJar() opener = request.build_opener(request.HTTPCookieProcessor(cookiejar)) baseurl = request.Request('http://www.myepisodes.com/login.php?') loginparams = parse.urlencode({'username': username, 'password': password, 'action': 'Login'}) try: logincon = opener.open(baseurl, loginparams) loginsrc = logincon.read() except URLError as e: log.error('Error logging in to myepisodes: %s' % e) return if str(username) not in loginsrc: raise plugin.PluginWarning(('Login to myepisodes.com failed, please check ' 'your account data or see if the site is down.'), log) for entry in task.accepted: try: self.mark_episode(task, entry, opener) except plugin.PluginWarning as w: log.warning(str(w))
def delete(self): """Deletes the photo.""" json_data = ('{"query":{"recordType":"CheckIndexingState"},' '"zoneID":{"zoneName":"PrimarySync"}}') json_data = ('{"operations":[{' '"operationType":"update",' '"record":{' '"recordName":"%s",' '"recordType":"%s",' '"recordChangeTag":"%s",' '"fields":{"isDeleted":{"value":1}' "}}}]," '"zoneID":{' '"zoneName":"PrimarySync"' '},"atomic":true}' % ( self._asset_record["recordName"], self._asset_record["recordType"], self._master_record["recordChangeTag"], )) endpoint = self._service.service_endpoint params = urlencode(self._service.params) url = "%s/records/modify?%s" % (endpoint, params) return self._service.session.post( url, data=json_data, headers={"Content-type": "text/plain"})
def __init__(self, service_root, session, params): self.session = session self.params = dict(params) self._service_root = service_root self.service_endpoint = ( "%s/database/1/com.apple.photos.cloud/production/private" % self._service_root) self._albums = None self.params.update({"remapEnums": True, "getCurrentSyncToken": True}) url = "%s/records/query?%s" % (self.service_endpoint, urlencode(self.params)) json_data = ('{"query":{"recordType":"CheckIndexingState"},' '"zoneID":{"zoneName":"PrimarySync"}}') request = self.session.post(url, data=json_data, headers={"Content-type": "text/plain"}) response = request.json() indexing_state = response["records"][0]["fields"]["state"]["value"] if indexing_state != "FINISHED": raise PyiCloudServiceNotActivatedException( "iCloud Photo Library not finished indexing. " "Please try again in a few minutes.") # TODO: Does syncToken ever change? # pylint: disable=fixme # self.params.update({ # 'syncToken': response['syncToken'], # 'clientInstanceId': self.params.pop('clientId') # }) self._photo_assets = {}
def test_poll(self): with temporary_dir() as dir: class TestPantsHandler(PantsHandler): def __init__(self, request, client_address, server): # TODO(6071): BaseHTTPServer.BaseHTTPRequestHandler is an old-style class, so we must # invoke its __init__ like this. # This will become unnecessary when we no longer support python2. PantsHandler.__init__( self, settings=ReportingServer.Settings( info_dir=dir, template_dir=dir, assets_dir=dir, root=dir, allowed_clients=['ALL'], ), renderer=None, request=request, client_address=client_address, server=server, ) safe_file_dump(os.path.join(dir, "file"), "hello") with http_server(TestPantsHandler) as port: response = requests.get("http://127.0.0.1:{}/poll?{}".format( port, urlencode({"q": json.dumps([{ "id": "0", "path": "file" }])}), )) self.assertEqual(response.json(), {"0": "hello"})
def list_channels(cat=None): list_items = [] for channel in channel_list.get("eY2hhbm5lbHNfbGlzdA=="): if channel.get("cat_id") == cat: if (len([ stream for stream in channel.get("Qc3RyZWFtX2xpc3Q=") if stream.get("AdG9rZW4=", "0") in new_channels.implemented ]) == 0): continue title = new_channels.custom_base64(channel.get("ZY19uYW1l")) icon = new_channels.custom_base64(channel.get("abG9nb191cmw=")[1:]) image = "{0}|{1}".format(icon, urlencode({"User-Agent": user_agent})) c_id = channel.get("rY19pZA==") li = ListItem(title, offscreen=True) li.setProperty("IsPlayable", "true") li.setInfo(type="Video", infoLabels={ "Title": title, "mediatype": "video" }) li.setArt({"thumb": image, "icon": image}) li.setContentLookup(False) url = plugin.url_for(play, c_id=c_id) list_items.append((url, li, False)) xbmcplugin.addDirectoryItems(plugin.handle, list_items) xbmcplugin.setContent(plugin.handle, "videos") xbmcplugin.endOfDirectory(plugin.handle)
def generate_xero_refresh_token(authorization_code: str) -> str: """ Generate Xero refresh token from authorization code """ api_data = { 'grant_type': 'authorization_code', 'code': authorization_code, 'redirect_uri': settings.XERO_REDIRECT_URI } auth = '{0}:{1}'.format(settings.XERO_CLIENT_ID, settings.XERO_CLIENT_SECRET) auth = base64.b64encode(auth.encode('utf-8')) request_header = { 'Accept': 'application/json', 'Content-type': 'application/x-www-form-urlencoded', 'Authorization': 'Basic {0}'.format(str(auth.decode())) } token_url = settings.XERO_TOKEN_URI response = requests.post(url=token_url, data=urlencode(api_data), headers=request_header) if response.status_code == 200: return json.loads(response.text)['refresh_token'] elif response.status_code == 401: raise InvalidTokenError('Wrong client secret or/and refresh token', response.text) elif response.status_code == 500: raise InternalServerError('Internal server error', response.text)
def __init__(self, service_root, session, params): self.session = session self.params = dict(params) self._service_root = service_root self._service_endpoint = \ ('%s/database/1/com.apple.photos.cloud/production/private' % self._service_root) self._albums = None self.params.update({'remapEnums': True, 'getCurrentSyncToken': True}) url = ('%s/records/query?%s' % (self._service_endpoint, urlencode(self.params))) json_data = ('{"query":{"recordType":"CheckIndexingState"},' '"zoneID":{"zoneName":"PrimarySync"}}') request = self.session.post(url, data=json_data, headers={'Content-type': 'text/plain'}) response = request.json() indexing_state = response['records'][0]['fields']['state']['value'] if indexing_state != 'FINISHED': raise PyiCloudServiceNotActivatedErrror( ('iCloud Photo Library not finished indexing. Please try ' 'again in a few minutes'), None) # TODO: Does syncToken ever change? # self.params.update({ # 'syncToken': response['syncToken'], # 'clientInstanceId': self.params.pop('clientId') # }) self._photo_assets = {}
def play_vod(): mytv.update_vod_channels() image_headers = {"User-Agent": mytv.user_agent} channel = int(plugin.args["channel"][0]) stream_list = mytv.get_vodstreams_by_channel_id(channel) if stream_list.count() > 1: select_list = [] for stream in stream_list: select_list.append(stream.quality) dialog = xbmcgui.Dialog() ret = dialog.select("Choose Stream", select_list) # if not selected_stream = stream_list[ret] else: selected_stream = stream_list[0] resolved_stream = mytv.resolve_stream(selected_stream) image = xbmc_curl_encode((selected_stream.vodchannel.image_path, image_headers)) title = selected_stream.vodchannel.name li = ListItem(title, path=xbmc_curl_encode(resolved_stream)) li.setArt({"thumb": image, "icon": image}) if "playlist.m3u8" in resolved_stream[0]: li.setContentLookup(False) li.setMimeType("application/vnd.apple.mpegurl") if addon.getSetting("inputstream") == "true": if int(xbmc.__version__[0]) < 3: li.setProperty("inputstreamaddon", "inputstream.adaptive") else: li.setProperty("inputstream", "inputstream.adaptive") li.setProperty("inputstream.adaptive.manifest_type", "hls") li.setProperty("inputstream.adaptive.stream_headers", urlencode(resolved_stream[1])) xbmcplugin.setResolvedUrl(plugin.handle, True, li)
def test_get_verification_url(self): # without redirect_url verification_url = get_verification_url( **{ "redirect_url": None, "request": self.custom_request, "verification_key": self.verification_key }) self.assertEqual( verification_url, ('http://testserver/api/v1/profiles/verify_email?' 'verification_key=%s' % self.verification_key), ) # with redirect_url verification_url = get_verification_url( **{ "redirect_url": self.redirect_url, "request": self.custom_request, "verification_key": self.verification_key }) string_query_params = urlencode({ 'verification_key': self.verification_key, 'redirect_url': self.redirect_url }) self.assertEqual(verification_url, ('http://testserver/api/v1/profiles/verify_email?%s' % string_query_params))
def get_bearer_token(self, auth_code, realm_id=None): """Gets access_token and refresh_token using authorization code :param auth_code: Authorization code received from redirect_uri :param realm_id: Realm ID/Company ID of the QBO company :raises `intuitlib.exceptions.AuthClientError`: if response status != 200 """ realm = realm_id or self.realm_id if realm is not None: self.realm_id = realm headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': get_auth_header(self.client_id, self.client_secret) } body = { 'grant_type': 'authorization_code', 'code': auth_code, 'redirect_uri': self.redirect_uri } send_request('POST', self.token_endpoint, headers, self, body=urlencode(body), session=self)
def handle_POST(self): self.verifyAllowRemote() try: post_args = urlencode(self.request["form"]) if sys.version_info >= (3, 0): post_args = post_args.encode() logger.debug("Logging into %s" % self._login) bundle_paths.BundleInstaller().validate_server_cert( self._login, self._sslpol) # Forward post arguments, including username and password. with closing(urlopen(self._login, post_args, URLOPEN_TIMEOUT)) as f: root = safe_lxml.parse(f).getroot() token = root.xpath("a:id", namespaces=NSMAP)[0].text if self.request["output_mode"] == "json": self.response.setHeader('content-type', 'application/json') sessDict = {"response": {"sessionKey": token}} self.response.write(json.dumps(sessDict)) else: # Generate response. response = etree.Element("response") sessionKey = etree.SubElement(response, "sessionKey") sessionKey.text = token self.response.setHeader('content-type', 'text/xml') self.response.write( etree.tostring(response, pretty_print=True)) logger.debug("Login successful") except HTTPError as e: if e.code in [401, 405]: # Returning 401 logs off current session # Splunkbase retuns 405 when only password is submitted raise splunk.RESTException(400, e.msg) raise splunk.RESTException(e.code, e.msg) except Exception as e: logger.exception(e) raise splunk.AuthenticationFailed
def on_task_output(self, task, config): for entry in task.accepted: if task.options.test: log.info('Would add into sabnzbd: %s' % entry['title']) continue params = self.get_params(config) # allow overriding the category if 'category' in entry: # Dirty hack over the next few lines to strip out non-ascii # chars. We're going to urlencode this, which causes # serious issues in python2.x if it's not ascii input. params['cat'] = ''.join([x for x in entry['category'] if ord(x) < 128]) params['name'] = ''.join([x for x in entry['url'] if ord(x) < 128]) # add cleaner nzb name (undocumented api feature) params['nzbname'] = ''.join([x for x in entry['title'] if ord(x) < 128]) request_url = config['url'] + urlencode(params) log.debug('request_url: %s' % request_url) try: response = task.requests.get(request_url) except RequestException as e: log.critical('Failed to use sabnzbd. Requested %s' % request_url) log.critical('Result was: %s' % e.args[0]) entry.fail('sabnzbd unreachable') if task.options.debug: log.exception(e) continue if 'error' in response.text.lower(): entry.fail(response.text.replace('\n', '')) else: log.info('Added `%s` to SABnzbd' % (entry['title']))
def log_event(settings, event, source, sourcetype, host, index): if event is None: sys.stderr.write("ERROR No event provided\n") return False query = [('source', source), ('sourcetype', sourcetype), ('index', index)] if host: query.append(('host', host)) url = '%s/services/receivers/simple?%s' % (settings.get('server_uri'), urlencode(query)) try: encoded_body = unicode(event).encode('utf-8') req = Request( url, encoded_body, {'Authorization': 'Splunk %s' % settings.get('session_key')}) res = urlopen(req) if 200 <= res.code < 300: sys.stderr.write( "DEBUG receiver endpoint responded with HTTP status=%d\n" % res.code) return True else: sys.stderr.write( "ERROR receiver endpoint responded with HTTP status=%d\n" % res.code) return False except HTTPError as e: sys.stderr.write("ERROR Error sending receiver request: %s\n" % e) except URLError as e: sys.stderr.write("ERROR Error sending receiver request: %s\n" % e) except Exception as e: sys.stderr.write("ERROR Error %s\n" % e) return False
def list(self, node_id=None, password=None): if not self.networking: return [] node_id = node_id or self.node_id password = password or self.password try: # Limit check time to prevent DoSing check server. current = time.time() if self.last_check: elapsed = current - self.last_check if elapsed >= self.check_interval: self.last_check = current else: return [] else: self.last_check = current # Record DHT list. self.debug_print("In sim DHT list") # Get messages send to us in the "DHT" call = dht_msg_endpoint + "?call=list&" call += urlencode({"node_id": node_id}) + "&" call += urlencode({"password": password}) # Make API call. messages = requests.get(call, timeout=5).text messages = json.loads(messages) self.debug_print("DHT MSG: " + str(messages)) # List. if type(messages) == dict: messages = [messages] # Return a list of responses. ret = [] if type(messages) == list: for msg in messages: dht_response = self.build_dht_response(msg) ret.append(dht_response) return ret except Exception as e: self.debug_print("Exception in dht msg list") print(e) return []
def search(self, task, entry, config=None): txheaders = { 'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-us,en;q=0.5', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'Keep-Alive': '300', 'Connection': 'keep-alive', } nzbs = set() for search_string in entry.get('search_strings', [entry['title']]): query = entry['title'] url = u'http://newzleech.com/?%s' % str(urlencode({'q': query.encode('latin1'), 'm': 'search', 'group': '', 'min': 'min', 'max': 'max', 'age': '', 'minage': '', 'adv': ''})) # log.debug('Search url: %s' % url) page = task.requests.get(url, headers=txheaders) soup = get_soup(page.text) for item in soup.find_all('table', attrs={'class': 'contentt'}): subject_tag = item.find('td', attrs={'class': 'subject'}).__next__ subject = ''.join(subject_tag.find_all(text=True)) complete = item.find('td', attrs={'class': 'complete'}).contents[0] size = item.find('td', attrs={'class': 'size'}).contents[0] nzb_url = 'http://newzleech.com/' + item.find('td', attrs={'class': 'get'}).next.get('href') # generate regexp from entry title and see if it matches subject regexp = query wildcardize = [' ', '-'] for wild in wildcardize: regexp = regexp.replace(wild, '.') regexp = '.*' + regexp + '.*' # log.debug('Title regexp: %s' % regexp) if re.match(regexp, subject): log.debug('%s matches to regexp' % subject) if complete != u'100': log.debug('Match is incomplete %s from newzleech, skipping ..' % query) continue log.info('Found \'%s\'' % query) try: size_num = float(size[:-3]) except (ValueError, TypeError): log.error('Failed to parse_size %s' % size) size_num = 0 # convert into megabytes if 'GB' in size: size_num *= 1024 if 'KB' in size: size_num /= 1024 # choose largest file nzbs.add(Entry(title=subject, url=nzb_url, content_size=size_num, search_sort=size_num)) return nzbs
def query_string(self): return urlencode( [ (key, value) for key, values in iteritems(self.query) for value in values ] )
def compute_attributes(self, **kwargs): """Compute_attributes builds value for instance Project """ kwargs.update({'media': 'json'}) url = self.url + '?' + urlencode(kwargs) result = load(urlopen(url)) self.name = result['project']['name'] self.description = result['project']['description']
def getUserAvatar(self, email, size, defaultAvatarUrl): # construct the url gravatar_url = "//www.gravatar.com/avatar/" gravatar_url += hashlib.md5(email.lower()).hexdigest() + "?" if self.default != "url": defaultAvatarUrl = self.default gravatar_url += urlencode({'s': str(size), 'd': defaultAvatarUrl}) raise resource.Redirect(gravatar_url)
def _build_url(self, **kwargs): """Builds the url with query parameters from the arguments""" params = self.params.copy() params.update(kwargs) log.debug('Configured parameters: {}'.format(params)) url = '{}/api?'.format(self.base_url) url = '{}{}'.format(url, urlencode(params)) return url
def lookup_myepisodes_id(self, entry, opener, session): """Populates myepisodes_id field for an entry, and returns the id. Call will also set entry field `myepisode_id` if successful. Return: myepisode id Raises: LookupError if entry does not have field series_name """ # Don't need to look it up if we already have it. if entry.get('myepisodes_id'): return entry['myepisodes_id'] if not entry.get('series_name'): raise LookupError('Cannot lookup myepisodes id for entries without series_name') series_name = entry['series_name'] # First check if we already have a myepisodes id stored for this series myepisodes_info = session.query(MyEpisodesInfo).\ filter(MyEpisodesInfo.series_name == series_name.lower()).first() if myepisodes_info: entry['myepisodes_id'] = myepisodes_info.myepisodes_id return myepisodes_info.myepisodes_id # Get the series name from thetvdb to increase match chance on myepisodes if entry.get('tvdb_series_name'): query_name = entry['tvdb_series_name'] else: try: series = lookup_series(name=series_name, tvdb_id=entry.get('tvdb_id')) query_name = series.seriesname except LookupError as e: log.warning('Unable to lookup series `%s` from tvdb, using raw name.' % series_name) query_name = series_name baseurl = request.Request('http://www.myepisodes.com/search.php?') params = parse.urlencode({'tvshow': query_name, 'action': 'Search myepisodes.com'}) try: con = opener.open(baseurl, params) txt = con.read() except URLError as e: log.error('Error searching for myepisodes id: %s' % e) matchObj = re.search(r'&showid=([0-9]*)">' + query_name + '</a>', txt, re.MULTILINE | re.IGNORECASE) if matchObj: myepisodes_id = matchObj.group(1) db_item = session.query(MyEpisodesInfo).filter(MyEpisodesInfo.myepisodes_id == myepisodes_id).first() if db_item: log.info('Changing name to `%s` for series with myepisodes_id %s' % (series_name.lower(), myepisodes_id)) db_item.series_name = series_name.lower() else: session.add(MyEpisodesInfo(series_name.lower(), myepisodes_id)) entry['myepisodes_id'] = myepisodes_id return myepisodes_id
def extract(self, text, service, timeout, mode="text"): """Extract named entities from document with 'service'. 'service' can be any of the constants defined in this module. """ """ submit document """ self.http.request("POST", "/api/document", urlencode({ mode: text, "key": self.api_key}), self._headers ) response = self.http.getresponse() if int(response.status/100) != 2: raise Exception("%s %s" % (response.status, response.reason)) json = response.read() data = json_loads(json.decode("utf-8")) id_document = data["idDocument"] """ annotate document """ self.http.request("POST", "/api/annotation", urlencode({"extractor": service, "idDocument": id_document, "timeout": timeout, "key": self.api_key}), self._headers ) response = self.http.getresponse() if int(response.status/100) != 2: raise Exception("%s %s" % (response.status, response.reason)) json = response.read() data = json_loads(json.decode("utf-8")) id_annotation = data["idAnnotation"] """ get extraction from the annotation """ self.http.request("GET", "/api/entity" + "?key=%s&idAnnotation=%s" % (self.api_key,id_annotation), headers = self._headers ) response = self.http.getresponse() if int(response.status/100) != 2: raise Exception("%s %s" % (response.status, response.reason)) json = response.read().decode("utf-8") data = json_loads(json) return data
def result(*args, **kwargs): url = url_root for component in args: if not url.endswith("/"): url += "/" url += quote("%s" % component, safe="") if kwargs: url += "?" + urlencode(kwargs) return url
def test_get_verification_email_data_with_verification_and_redirect_urls( self): email_data = self._get_email_data(include_redirect_url=True) encoded_url = urlencode({ 'verification_key': self.verification_key, 'redirect_url': self.redirect_url }) self.assertIn( encoded_url.replace('&', '&'), email_data.get('message_txt') )
def testOperationHistory(self): options = {"foo": "bar", "foo2": "bar2"} self.addResponse("operation-history", []) response = self.api.operation_history(options) self.assertEqual(response, []) self.assertEqual(responses.calls[0].request.body, urlencode(options) )
def list(self, node_id=None, password=None, list_pop=1, timeout=None): if not self.networking: return [] node_id = node_id or self.node_id password = password or self.password try: # Get messages send to us in the "DHT" call = dht_msg_endpoint + "?call=list&" call += urlencode({"node_id": node_id}) + "&" call += urlencode({"password": password}) + "&" call += urlencode({"list_pop": list_pop}) # Make API call. if timeout is None: if LONG_POLLING: timeout = None else: timeout = 4 ret = requests.get(call, timeout=timeout) self.handles.append(ret) content_gen = ret.iter_content() messages = ret.text messages = json.loads(messages) # List. if type(messages) == dict: messages = [messages] # Return a list of responses. ret = [] if type(messages) == list: for msg in messages: dht_response = self.build_dht_response(msg) ret.append(dht_response) return ret except Exception as e: print("EXCEPTION IN DHT MSG LIST") self.debug_print("Exception in dht msg list") print(e) return []
def get_service_url(client, service, parameters): """ Figure out the URL for the given service call. This isn't publicly available from the ObsPy client, we need to use internal APIs, so those messy details are encapsulated here. """ try: return client._create_url_from_parameters(service, {}, parameters) except: return "%s %s %s" % ( client.base_url, service, urlencode(parameters) )
def request(self, api_query, url=None): """ e.g. {'action': 'query', 'meta': 'userinfo'}. format=json not required. Function returns a python dict that resembles the api's json response """ api_query['format'] = 'json' url = url or self.base_url return self.mwoauth.post(url + "/api.php?" + urlencode(api_query), content_type="text/plain").data
def add_query_params(url, params): scheme, netloc, path, query_string, fragment = urlsplit(url) query_params = parse_qs(query_string) for name, value in iteritems(params): if value: query_params[name] = [value] new_query_string = urlencode(query_params, doseq=True) return urlunsplit((scheme, netloc, path, new_query_string, fragment))
def build_config(self, config): log.debug(type(config)) if config['category'] == 'tv': config['category'] = 'tvsearch' if 'url' not in config: if 'apikey' in config and 'website' in config: params = {'t': config['category'], 'apikey': config['apikey'], 'extended': 1} config['url'] = config['website'] + '/api?' + urlencode(params) return config
def getUserAvatar(self, email, size, defaultAvatarUrl): # construct the url emailBytes = unicode2bytes(email.lower()) emailHash = hashlib.md5(emailBytes) gravatar_url = "//www.gravatar.com/avatar/" gravatar_url += emailHash.hexdigest() + "?" if self.default != "url": defaultAvatarUrl = self.default url = {'d': defaultAvatarUrl, 's': str(size)} sorted_url = sorted(url.items(), key=lambda x: x[0]) gravatar_url += urlencode(sorted_url) raise resource.Redirect(gravatar_url)
def testResponsePayment(self): self.addResponse("process-payment", {"status": "success"}) options = { "foo": "bar", "foo2": "bar2", } response = self.api.process_payment(options) self.assertEqual(response, {"status": "success"}) self.assertEqual(responses.calls[0].request.body, urlencode(options) )