def query_hph( method='get', **argvs): data = OrderedDict(sorted(argvs.items(), key=lambda t: t[0])) time_s = int(time.time()) data_str = unquote(urlencode(data)) hash_str = 'private_key={0}&ts={1}&{2}'.format(private_key, time_s, data_str) sign = hashlib.md5(hash_str.encode('utf-8')).hexdigest() data.update({'sign': sign, 'ts': str(time_s)}) par_str = unquote(urlencode(data)) url = build_url(url=URL, qs=par_str) try: response = requests.get(url) response_dict = response.json() except requests.RequestException as e: return False response_code = response_dict.get('code') if response_code == 0: return response_dict['data'] else: print('no info') return False
def rtmp_publish_url(self, stream_id, app_name=None): if app_name is None: app_name = self._app_name url_path = '/' + urljoin(quote_plus(app_name)+'/', quote_plus(stream_id)) query = {'vhost': self._play_domain} if not self._auth_type: return 'rtmp://' + self._publish_domain + url_path + '?' + urlencode(query) elif self._auth_type == self.AUTH_TYPE_A: query['auth_key'] = self._calc_auth_token(url_path) return 'rtmp://' + self._publish_domain + url_path + '?' + urlencode(query) else: raise IVRError('Unknown auth type {0}'.format(self._auth_type))
def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if data is None: return '' elif isinstance(data, basestring): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for k, vs in sorted(CaseInsensitiveDict(data).lower_items()): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v)) return urlencode(result, doseq=True) else: return data
def build_url(*parts, **query): query = urlencode(query) return '{scheme}://{base}/{path}{query}'.format( scheme='https', base=settings.QUANTOPIAN_HOST, path='/'.join(p.lstrip('/') for p in parts), query='?' + query if query else '')
def _send(self): """POST the user's question and all required information to the Cleverbot service Cleverbot obfuscates how it generates the 'icognocheck' token. The token is currently the md5 checksum of the 10th through 36th characters of the encoded data. This may change in the future. """ # Set data as appropriate if self.conversation: linecount = 1 for line in reversed(self.conversation): linecount += 1 self.data['vText' + str(linecount)] = line if linecount == 8: break # Generate the token enc_data = urlencode(self.data) digest_txt = enc_data[9:35] token = hashlib.md5(digest_txt.encode('utf-8')).hexdigest() self.data['icognocheck'] = token # POST the data to Cleverbot and return return self.session.post(self.SERVICE_URL, data=self.data, headers=self.headers)
def _get_auth(self): self.url = 'https://api.put.io/login' self.session.headers['Accept'] = 'application/json' next_params = { 'client_id': self.client_id, 'response_type': 'token', 'redirect_uri': self.redirect_uri } post_data = { 'name': self.username, 'password': self.password, 'next': '/v2/oauth2/authenticate?' + urlencode(next_params) } try: response = self.session.post(self.url, data=post_data, allow_redirects=False) response.raise_for_status() response = self.session.get(response.headers['location'], allow_redirects=False) response.raise_for_status() resulting_uri = '{redirect_uri}#access_token=(.*)'.format( redirect_uri=re.escape(self.redirect_uri)) self.auth = re.search(resulting_uri, response.headers['location']).group(1) except Exception as error: helpers.handle_requests_exception(error) self.auth = None return self.auth
def _get(self, method, format, params=None, headers=None, raw=False): """Performs GET request""" from requests.compat import urlencode url = self._build_url(method, format) if params: url = "{0}?{1}".format(url, urlencode(params, True)) return self._request(url, None, headers, format, raw)
async def _send(self): """POST the user's question and all required information to the Cleverbot service Cleverbot obfuscates how it generates the 'icognocheck' token. The token is currently the md5 checksum of the 10th through 36th characters of the encoded data. This may change in the future. """ # Set data as appropriate if self.conversation: linecount = 1 for line in reversed(self.conversation): linecount += 1 self.data['vText' + str(linecount)] = line if linecount == 8: break # Generate the token enc_data = urlencode(self.data) digest_txt = enc_data[9:35] token = hashlib.md5(digest_txt.encode('utf-8')).hexdigest() self.data['icognocheck'] = token # POST the data to Cleverbot and return resp = await self.session.post(self.SERVICE_URL, data=self.data, headers=self.headers) return resp
def get_url_from_form(self, td): parameters = {tag['name']: tag['value'] for tag in td.form.find_all('input', {'type': 'hidden'})} file_with_parameters = '?'.join((td.form['action'], urlencode(parameters))) return urljoin(self.base_url, file_with_parameters)
def updateCache(self): # check if we should update if not self.shouldUpdate(): return # clear cache self._clearCache() # set updated self.setLastUpdate() cl = [] for group in [ "alt.binaries.hdtv", "alt.binaries.hdtv.x264", "alt.binaries.tv", "alt.binaries.tvseries", "alt.binaries.teevee" ]: urlArgs = {"max": 50, "g": group} url = self.provider.urls['rss'] + "?" + urlencode(urlArgs) logger.log("Cache update URL: {}".format(url), logger.DEBUG) for item in self.getRSSFeed(url)["entries"] or []: ci = self._parseItem(item) if ci: cl.append(ci) if len(cl) > 0: cache_db_con = self._getDB() cache_db_con.mass_action(cl)
def wrapper(self): html = fixtures(filename).decode('utf-8') params = OrderedDict() s = re.search(r'name="s"\svalue="(?P<s_value>[^"]+)', html) if s: params['s'] = s.group('s_value') params['jschl_vc'] = re.search(r'name="jschl_vc" value="(\w+)"', html).group(1) params['pass'] = re.search(r'name="pass" value="(.+?)"', html).group(1) params['jschl_answer'] = jschl_answer submit_uri = '{}/cdn-cgi/l/chk_jschl?{}'.format( url, urlencode(params)) responses.add(ChallengeResponse(url=url, body=fixtures(filename))) def onRedirect(request): # We don't register the last response unless the redirect occurs responses.add(DefaultResponse(url=url, body=requested_page)) responses.add(RedirectResponse(url=submit_uri, callback=onRedirect)) return test(self, **cfscrape_kwargs)
def get_authorize_url(self, client_id, audience=None, state=None, redirect_uri=None, response_type='code', scope='openid', quote_via=quote_plus): """ use quote_via=urllib.quote to to urlencode spaces into "%20", the default is "+" """ params = { 'client_id': client_id, 'audience': audience, 'response_type': response_type, 'scope': scope, 'state': state, 'redirect_uri': redirect_uri } query = urlencode(params, doseq=True, quote_via=quote_via) \ if _ver > '34' \ else '&'.join(['{}={}'.format(quote_via(k, safe=''), quote_via(v, safe='')) for k, v in params.items()]) return urlunparse( ['https', self.domain, '/authorize', None, query, None])
def _send(self): """POST the user's question and all required information to the Cleverbot API Cleverbot tries to prevent unauthorized access to its API by obfuscating how it generates the 'icognocheck' token. The token is currently the md5 checksum of the 10th through 36th characters of the encoded data. This may change in the future. TODO: Order is not guaranteed when urlencoding dicts. This hasn't been a problem yet, but let's look into ordered dicts or tuples instead. """ # Set data as appropriate if self.conversation: linecount = 1 for line in reversed(self.conversation): linecount += 1 self.data['vText' + str(linecount)] = line if linecount == 8: break # Generate the token enc_data = urlencode(self.data) digest_txt = enc_data[9:35] token = hashlib.md5(digest_txt.encode('utf-8')).hexdigest() self.data['icognocheck'] = token # POST the data to Cleverbot's API and return return self.session.post(Cleverbot.API_URL, data=self.data, headers=Cleverbot.headers)
def reply(self, url, special_reply_content=None): """ :param special_reply_content: will use this reply instead of self.replies :return: if success return True, else return False """ assert self.logged, 'Did not login successfully!' assert self._reply_contents or special_reply_content, 'No reply text!' resp = self.get(url) followup_value = self.pat_followup_value.search(resp.text).group(1).strip(r'\"') # 为什么不用parse_qs? 因为cc98 url上的boardid有些大写有些小写,不是统一的, # cc98 is too SB # 为什么不用lower降成小写?因为我写完上面那句话才想起来的,为了保留上面那句话 # I is too SB qs_list = parse_qsl(urlparse(url).query) boardid = qs_list[0][1] rootid = qs_list[1][1] reply_url = self.REPLY_BASE_URL + '?' + urlencode((('method', 'fastreply'), ('BoardID', boardid))) cookies_password = parse_qs(resp.request.headers['cookie']).get('password')[0] post_reply = special_reply_content if special_reply_content else random.choice(self._reply_contents) post_form = { 'followup': followup_value, 'RootID': rootid, 'star': '1', 'UserName': self.username, 'passwd': cookies_password, 'Expression': 'face7.gif', 'Content': post_reply, 'signflag': 'yes', } self._reply_resp = self.post(reply_url, data=post_form) return self._reply_resp.ok
def params(self): return urlencode(OrderedDict([ ('cbtt', self.getvalue('station')), ('interval', 'instant'), ('format', 2), ('back', 360) ]))
def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals results = [] if self.show and not self.show.is_anime: return results for mode in search_strings: items = [] logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) for search_string in search_strings[mode]: if mode != 'RSS': logger.log(u"Search string: {}".format(search_string.decode("utf-8")), logger.DEBUG) params = { "page": 'rss', "cats": '1_0', # All anime "sort": 2, # Sort Descending By Seeders "order": 1 } if mode != 'RSS': params["term"] = search_string search_url = self.url + '?' + urlencode(params) logger.log(u"Search URL: %s" % search_url, logger.DEBUG) summary_regex = ur"(\d+) seeder\(s\), (\d+) leecher\(s\), \d+ download\(s\) - (\d+.?\d* [KMGT]iB)(.*)" s = re.compile(summary_regex, re.DOTALL) results = [] for curItem in self.cache.getRSSFeed(search_url)['entries'] or []: title = curItem['title'] download_url = curItem['link'] if not all([title, download_url]): continue seeders, leechers, torrent_size, verified = s.findall(curItem['summary'])[0] size = convert_size(torrent_size) or -1 # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: if mode != 'RSS': logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format (title, seeders, leechers), logger.DEBUG) continue if self.confirmed and not verified and mode != 'RSS': logger.log(u"Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it", logger.DEBUG) continue item = title, download_url, size, seeders, leechers if mode != 'RSS': logger.log(u"Found result: %s with %s seeders and %s leechers" % (title, seeders, leechers), logger.DEBUG) items.append(item) # For each search mode sort all the items by seeders if available items.sort(key=lambda tup: tup[3], reverse=True) results += items return results
def _webhooks_hub_request(self, topic_url: str, mode: str, params: dict = None, method: str = 'POST') -> requests.Response: """Send request to Twitch Webhooks Hub. :param: topic_url: Subscribing topic url. :param mode: Suscription mode. :param params: Subscription params. :param method: Request method. :return: Received response. """ url = join_urls(self.BASE_URL, self.WEBHOOKS_HUB_ENDPOINT) urlencoded_params = urlencode(params) cb_url = join_urls(self._callback_url, self._session_id) return requests.request( method, url, data={ 'hub.mode': mode, 'hub.topic': f'{topic_url}?{urlencoded_params}', 'hub.callback': cb_url, 'hub.lease_seconds': self.LEASE_SECONDS # TODO: support hub.secret for production # "hub.secret":"s3cRe7", }, headers=self._headers)
def updateCache(self): # check if we should update if not self.shouldUpdate(): return # clear cache self._clearCache() # set updated self.setLastUpdate() cl = [] for group in ["alt.binaries.hdtv", "alt.binaries.hdtv.x264", "alt.binaries.tv", "alt.binaries.tvseries", "alt.binaries.teevee"]: urlArgs = {"max": 50, "g": group} url = self.provider.urls['rss'] + "?" + urlencode(urlArgs) logger.log("Cache update URL: {}".format(url), logger.DEBUG) for item in self.getRSSFeed(url)["entries"] or []: ci = self._parseItem(item) if ci: cl.append(ci) if len(cl) > 0: cache_db_con = self._getDB() cache_db_con.mass_action(cl)
def _get_auth(self): next_params = { 'client_id': self.client_id, 'response_type': 'token', 'redirect_uri': self.redirect_uri } post_data = { 'name': self.username, 'password': self.password, 'next': '/v2/oauth2/authenticate?' + urlencode(next_params) } try: response = self.session.post(self.url, data=post_data, allow_redirects=False) response.raise_for_status() response = self.session.get(response.headers['location'], allow_redirects=False) response.raise_for_status() resulting_uri = '{redirect_uri}#access_token=(.*)'.format( redirect_uri=re.escape(self.redirect_uri)) self.auth = re.search(resulting_uri, response.headers['location']).group(1) except Exception as error: helpers.handle_requests_exception(error) self.auth = None return self.auth
def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if data is None: return '' elif isinstance(data, six.string_types): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for k, vs in sorted(CaseInsensitiveDict(data).lower_items()): if isinstance(vs, six.string_types) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v)) return urlencode(result, doseq=True) else: return data
def __call__(self, req): """Sign the request""" req = self.fill_all_headers(req) str_to_sign = self.gen_str_to_sign(req) logger.debug("string to sign is:\n{0}".format(str_to_sign)) hmac_sig = hmac.HMAC(self._private_key, utils.to_bytes(str_to_sign), hashlib.sha1) signature = base64.b64encode(hmac_sig.digest()).decode("utf8") if self._expires: url = urlsplit(req.url) params = dict(parse_qsl(url.query)) params.update( dict(Expires=self._expires, UCloudPublicKey=self._public_key, Signature=signature)) req.url = "{0}://{1}{2}?{3}".format(url.scheme, url.netloc, url.path, urlencode(params)) else: req.headers["Authorization"] = "UCloud {0}:{1}".format( self._public_key, signature) # remove empty header for key, val in req.headers.copy().items(): if not val: logger.debug("deleting empty header key: {0}".format(key)) del req.headers[key] return req
def authentication_url(self): params = { 'client_id': self.client_id, 'response_type': 'token', 'redirect_uri': self.redirect_uri } return self.auth_path + "?" + urlencode(params)
def scrape_attachments_page(self, agenda_item_id, attachments_page_url): print("scrape Attachment " + attachments_page_url) html = requests.get(attachments_page_url).text soup = BeautifulSoup(html) txt = soup.get_text() if "Auf die Anlage konnte nicht zugegriffen werden oder Sie existiert nicht mehr." in txt: print("Zu TOP " + agenda_item_id + " fehlt mindestens eine Anlage") yield ('404', {'agenda_item_id': agenda_item_id, 'attachmentsPageURL': attachments_page_url}) else: for forms in soup.find_all('form'): title = forms.get_text() parameters = {form['name']: form['value'] for form in forms.find_all('input', {'type': 'hidden'})} file_with_parameters = '?'.join((forms['action'], urlencode(parameters))) url = self.base_url + file_with_parameters yield ('OK', {'sid': self.meeting_id, 'agenda_item_id': agenda_item_id, 'attachment_title': title, 'attachment_file_url': url})
def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KODI"): # pylint: disable=too-many-arguments """Handles communication to KODI servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the KODI API via HTTP host: KODI webserver host:port username: KODI webserver username password: KODI webserver password Returns: Returns response.result for successful commands or False if there was an error """ # fill in omitted parameters if not username: username = sickbeard.KODI_USERNAME if not password: password = sickbeard.KODI_PASSWORD if not host: logger.log(u'No %s host passed, aborting update' % dest_app, logger.WARNING) return False for key in command: if isinstance(command[key], text_type): command[key] = command[key].encode('utf-8') enc_command = urlencode(command) logger.log(u"%s encoded API command: %r" % (dest_app, enc_command), logger.DEBUG) # url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) # maybe need for old plex? url = 'http://%s/kodiCmds/kodiHttp/?%s' % (host, enc_command) try: req = Request(url) # if we have a password, use authentication if password: base64string = base64.encodestring('%s:%s' % (username, password))[:-1] authheader = "Basic %s" % base64string req.add_header("Authorization", authheader) logger.log(u"Contacting %s (with auth header) via url: %s" % (dest_app, ss(url)), logger.DEBUG) else: logger.log(u"Contacting %s via url: %s" % (dest_app, ss(url)), logger.DEBUG) try: response = urlopen(req) except (BadStatusLine, URLError) as e: logger.log(u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) return False result = response.read().decode(sickbeard.SYS_ENCODING) response.close() logger.log(u"%s HTTP response: %s" % (dest_app, result.replace('\n', '')), logger.DEBUG) return result except Exception as e: logger.log(u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) return False
def parse(self, data, mode): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ items = [] json_data = data.get('data', {}) torrent_rows = json_data.get('torrents', []) for row in torrent_rows: try: title = row.pop('name', '') download_url = '{0}{1}'.format( self.urls['download'], urlencode({ 'id': row.pop('id', ''), 'passkey': self.passkey })) if not all([title, download_url]): continue seeders = try_int(row.pop('seeders', 0)) leechers = try_int(row.pop('leechers', 0)) # Filter unseeded torrent if seeders < min(self.minseed, 1): if mode != 'RSS': log.debug( "Discarding torrent because it doesn't meet the" " minimum seeders: {0}. Seeders: {1}", title, seeders) continue size = convert_size(row.pop('size', -1), -1) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, } if mode != 'RSS': log.debug( 'Found result: {0} with {1} seeders and {2} leechers', title, seeders, leechers) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): log.exception('Failed parsing provider.') return items
def _send_to_kodi(command, host=None, username=None, password=None, dest_app="KODI"): # pylint: disable=too-many-arguments """Handles communication to KODI servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the KODI API via HTTP host: KODI webserver host:port username: KODI webserver username password: KODI webserver password Returns: Returns response.result for successful commands or False if there was an error """ # fill in omitted parameters if not username: username = app.KODI_USERNAME if not password: password = app.KODI_PASSWORD if not host: logger.log(u'No %s host passed, aborting update' % dest_app, logger.WARNING) return False for key in command: if isinstance(command[key], text_type): command[key] = command[key].encode('utf-8') enc_command = urlencode(command) logger.log(u"%s encoded API command: %r" % (dest_app, enc_command), logger.DEBUG) # url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) # maybe need for old plex? url = 'http://%s/kodiCmds/kodiHttp/?%s' % (host, enc_command) try: req = Request(url) # if we have a password, use authentication if password: base64string = base64.encodestring('%s:%s' % (username, password))[:-1] authheader = "Basic %s" % base64string req.add_header("Authorization", authheader) logger.log(u"Contacting %s (with auth header) via url: %s" % (dest_app, ss(url)), logger.DEBUG) else: logger.log(u"Contacting %s via url: %s" % (dest_app, ss(url)), logger.DEBUG) try: response = urlopen(req) except (BadStatusLine, URLError) as e: logger.log(u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) return False result = response.read().decode(app.SYS_ENCODING) response.close() logger.log(u"%s HTTP response: %s" % (dest_app, result.replace('\n', '')), logger.DEBUG) return result except Exception as e: logger.log(u"Couldn't contact %s HTTP at %r : %r" % (dest_app, url, ex(e)), logger.DEBUG) return False
def buildAuthUrl(self): args = { 'client_id': self.client_id, 'redirect_uri': self.redirect_uri, 'scope': ' '.join(self.SCOPE), 'response_type': 'code', } return self.AUTHORIZATION_URL + '?' + urlencode(args)
def _searchFieldsFormatter(self, searchFields): # Creates an encoded URL fragment for searching for a value within a specific field. # If multiple fields are specified, a single string is returned sf = [ urlencode({k: v}) for k, v in searchFields.records() if k in settings.searchable_fields ] return '&'.join(sf)
def _get_title_and_url(self, item): title = item.get('name', '').replace(' ', '.') url = self.urls['download'] + urlencode({ 'id': item['id'], 'passkey': self.passkey }) return title, url
def _searchFieldsFormatter(self, searchFields): """ Creates an encoded URL fragment for searching for a value within a specific field. If multiple fields are specified, a single string is returned """ sf = [urlencode({k:v}) for k,v in searchFields.items() if k in settings.searchable_fields] return '&'.join(sf)
def get_url(self, path='/', websocket=False, remote=True, attach_api_key=True, userId=None, pass_uid=False, **query): '''construct a url for an emby request Parameters ---------- path : str uri path(excluding domain and port) of get request for emby websocket : bool, optional if true, then `ws(s)` are used instead of `http(s)` remote : bool, optional if true, remote-address is used (default True) attach_api_key : bool, optional if true, apikey is added to the query (default True) userId : str, optional uid to use, if none, default is used pass_uid : bool, optional if true, uid is added to the query (default False) query : karg dict additional parameters to set (part of url after the `?`) Also See -------- get : getJson : post : delete : Returns ------- full url ''' userId = userId or self.userid if attach_api_key and self.api_key: query.update({'api_key':self.api_key, 'deviceId': self.device_id}) if pass_uid: query['userId'] = userId if remote: url = self.urlremote or self.url else: url = self.url if websocket: scheme = {'http':'ws', 'https':'wss'}[url.scheme] else: scheme = url.scheme netloc = url.netloc + '/emby' url = urlunparse((scheme, netloc, path, '', '{params}', '')).format( UserId = userId, ApiKey = self.api_key, DeviceId = self.device_id, params = urlencode(query) ) return url[:-1] if url[-1] == '?' else url
def _build_endpoint(self, api_path, params=None, query_params=None): """ Helper function to form API URL. The base URL is '<protocol>://<hostname>[:<port>]/<api base url>' e.g. 'https://to.somedomain.net/api/0.1/' :param api_path: The path to the API end-point that you want to call which does not include the base URL e.g. ``user/login``, ``servers``, etc. This string can contain substitution parameters as denoted by a valid field_name replacement field specification as per :meth:`str.format` e.g. ``cachegroups/{id}`` or ``cachegroups/{id:d}`` :type api_path: str :param params: If :meth:`str.format` field_name replacement field specifications exists in the ``api_path`` use this dictionary to perform replacements of the specifications with the value(s) in the dictionary that match the parameter name(s) e.g. ``{param_id}`` or ``{param_id:d}`` in ``api_string`` is replaced by value in ``params['param_id']``. :type params: Union[Dict[str, Any], None] :param query_params: URL query params to provide to the end-point e.g. ``{ 'sort': 'asc', 'maxresults': 200 }`` which translates to something like ``?sort=asc&maxresults=200`` which is appended to the request URL :type query_params: Union[Dict[str, Any], None] :return: The base url plus the passed and possibly substituted ``api_path`` to form a complete URL to the API resource to request :rtype: str :raises: ValueError """ new_api_path = api_path # Replace all parameters in the new_api_path path, if required try: # Make the parameters values safe for adding to URLs url_params = {k: compat.quote(str(v)) if isinstance(v, str)\ else v for k, v in iteritems(params)} log_with_debug_info( logging.DEBUG, u'URL parameters are: [{0}]'.format(url_params)) qparams = u'' if query_params: # Process the URL query parameters qparams = u'?{0}'.format(compat.urlencode(query_params)) log_with_debug_info( logging.DEBUG, u'URL query parameters are: [{0}]'.format(qparams)) new_api_path = api_path.format(**url_params) + qparams except KeyError as e: msg = ( u'Expecting a value for keyword argument [{0}] for format field ' u'specification [{1!r}]') msg = msg.format(e, api_path) log_with_debug_info(logging.ERROR, msg) raise ValueError(msg) except ValueError as e: msg = ( u'One or more values do not match the format field specification ' u'[{0!r}]; Supplied values: {1!r} ') msg = msg.format(api_path, params) log_with_debug_info(logging.ERROR, msg) raise ValueError(msg) retval = compat.urljoin(self.api_base_url, new_api_path) log_with_debug_info(logging.DEBUG, u'Built end-point to return: {0}'.format(retval)) return retval
def get_user_by_samaccountname(self, username): data = urlencode( {'search': f"profile.samAccountName eq \"{username}\""}, quote_via=quote) url = urljoin(self.base_url, f"api/v1/users?{data}") page = self._get(url) return page[0]["id"]
def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, force=False): if not sickbeard.USE_PROWL and not force: return False if prowl_api is None: prowl_api = sickbeard.PROWL_API if len(prowl_api) == 0: return False if prowl_priority is None: prowl_priority = sickbeard.PROWL_PRIORITY title = sickbeard.PROWL_MESSAGE_TITLE logger.log( "PROWL: Sending notice with details: title=\"{0}\" event=\"{1}\", message=\"{2}\", priority={3}, api={4}" .format(title, event, message, prowl_priority, prowl_api), logger.DEBUG) http_handler = HTTPSConnection("api.prowlapp.com") data = { 'apikey': prowl_api, 'application': title, 'event': event, 'description': message.encode('utf-8'), 'priority': prowl_priority } try: http_handler.request( "POST", "/publicapi/add", headers={'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) except (SSLError, HTTPException, socket.error): logger.log("Prowl notification failed.", logger.ERROR) return False response = http_handler.getresponse() request_status = response.status if request_status == 200: logger.log("Prowl notifications sent.", logger.INFO) return True elif request_status == 401: logger.log("Prowl auth failed: {0}".format(response.reason), logger.ERROR) return False else: logger.log("Prowl notification failed.", logger.ERROR) return False
def _getRSSData(self): search_params = { 'user': provider.username, 'api': provider.api_key, 'eng': 1, 'catid': '19,20' # SD,HD } rss_url = self.provider.urls['rss'] + '?' + urlencode(search_params) logger.log(u"Cache update URL: %s" % rss_url, logger.DEBUG) return self.getRSSFeed(rss_url)
def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, force=False): if not app.USE_PROWL and not force: return False if prowl_api is None: prowl_api = ','.join(app.PROWL_API) if not prowl_api: return False if prowl_priority is None: prowl_priority = app.PROWL_PRIORITY title = app.PROWL_MESSAGE_TITLE log.debug( u'PROWL: Sending notice with details: title="{0}" event="{1}", message="{2}", priority={3}, api={4}', title, event, message, prowl_priority, prowl_api) http_handler = HTTPSConnection('api.prowlapp.com') data = { 'apikey': prowl_api, 'application': title, 'event': event, 'description': message.encode('utf-8'), 'priority': prowl_priority } try: http_handler.request( 'POST', '/publicapi/add', headers={'Content-type': 'application/x-www-form-urlencoded'}, body=urlencode(data)) except (SSLError, HTTPException, socket.error): log.error(u'Prowl notification failed.') return False response = http_handler.getresponse() request_status = response.status if request_status == 200: log.info(u'Prowl notifications sent.') return True elif request_status == 401: log.error(u'Prowl auth failed: {0}', response.reason) return False else: log.error(u'Prowl notification failed.') return False
def encode_http_params(data): """ Encode HTTP parameters to URL query string :param data: data as text or tuple/list :type data: str or bytes or tuple or list :return: str -- encoded data """ if isinstance(data, (str, bytes)): return urlencode(data) elif hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if vs is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, vs.encode('utf-8') if isinstance(vs, str) else vs)) return urlencode(result, doseq=True) else: raise ValueError('Invalid argument')
def _multiDictFormatter(self, param_name, values): # Creates an encoded URL fragment for parameters that are dictionaries. url_parts = [] for k in values.keys(): param_scoped_name = '%s[%s][]' % (param_name, k) #check the dict values are not strings if isinstance(values[k], basestring): raise ValueError("Multi-value parameters should not be strings.") for value in values[k]: url_parts.append( urlencode(({param_scoped_name: value})) ) return '&'.join(url_parts)
def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, force=False): if not sickbeard.USE_PROWL and not force: return False if prowl_api is None: prowl_api = sickbeard.PROWL_API if len(prowl_api) == 0: return False if prowl_priority is None: prowl_priority = sickbeard.PROWL_PRIORITY title = sickbeard.PROWL_MESSAGE_TITLE logger.log( u'PROWL: Sending notice with details: title="{0}" event="{1}", message="{2}", priority={3}, api={4}'.format( title, event, message, prowl_priority, prowl_api ), logger.DEBUG, ) http_handler = HTTPSConnection("api.prowlapp.com") data = { "apikey": prowl_api, "application": title, "event": event, "description": message.encode("utf-8"), "priority": prowl_priority, } try: http_handler.request( "POST", "/publicapi/add", headers={"Content-type": "application/x-www-form-urlencoded"}, body=urlencode(data), ) except (SSLError, HTTPException, socket.error): logger.log(u"Prowl notification failed.", logger.ERROR) return False response = http_handler.getresponse() request_status = response.status if request_status == 200: logger.log(u"Prowl notifications sent.", logger.INFO) return True elif request_status == 401: logger.log(u"Prowl auth failed: {0}".format(response.reason), logger.ERROR) return False else: logger.log(u"Prowl notification failed.", logger.ERROR) return False
def oauth_url(): """Returns a URL for starting the OAuth process, including a fresh state that it saves""" state = SlackOAuthState() state.save() params = { 'client_id': CLIENT_ID, 'scope': SCOPES, 'redirect_uri': URL_ROOT[:-1] + url_for('slack_auth'), 'state': state.state, 'team': TEAM_ID } return 'https://slack.com/oauth/authorize' + '?' + urlencode(params)
def _build_query(self, query_params): result = [] for k, vs in to_key_val_list(query_params): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v) ) return urlencode(result, doseq=True)
def parse(self, data, mode): """ Parse search results for items. :param data: The raw response from a search :param mode: The current mode used to search, e.g. RSS :return: A list of items found """ items = [] json_data = data.get('data', {}) torrent_rows = json_data.get('torrents', []) for row in torrent_rows: try: title = row.pop('name', '') download_url = '{0}?{1}'.format( self.urls['download'], urlencode({'id': row.pop('id', ''), 'passkey': self.passkey})) if not all([title, download_url]): continue seeders = try_int(row.pop('seeders', 0)) leechers = try_int(row.pop('leechers', 0)) # Filter unseeded torrent if seeders < self.minseed: if mode != 'RSS': log.debug("Discarding torrent because it doesn't meet the" ' minimum seeders: {0}. Seeders: {1}', title, seeders) continue size = convert_size(row.pop('size', -1), -1) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'pubdate': None, } if mode != 'RSS': log.debug('Found result: {0} with {1} seeders and {2} leechers', title, seeders, leechers) items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): log.exception('Failed parsing provider.') return items
def _multiDictFormatter(self, param_name, values): # Creates an encoded URL fragment for parameters that are dictionaries. url_parts = [] for k in values.keys(): param_scoped_name = '%s[%s][]' % (param_name, k) #check the dict values are not strings if isinstance(values[k], str): raise ValueError("Multi-value parameters should not be strings.") for value in values[k]: value = value.encode('utf-8') url_parts.append( urlencode(({param_scoped_name: value})) ) return '&'.join(url_parts)
def parse(self, data, mode): """ Parse search results from data :param data: response data :param mode: search mode :return: search results """ results = [] if self._check_auth_from_data(data): json_items = data.get('data', '') if not json_items: sickrage.app.log.error( 'Resulting JSON from provider is not correct, not parsing it' ) return results for item in json_items.get('torrents', []): try: title = item.pop('name', '') download_url = '{0}{1}'.format( self.urls['download'], urlencode({ 'id': item.pop('id', ''), 'passkey': self.passkey })) if not all([title, download_url]): continue seeders = try_int(item.pop('seeders', 0)) leechers = try_int(item.pop('leechers', 0)) info_hash = item.pop('info_hash', '') size = convert_size(item.pop('size', -1), -1) item = { 'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': info_hash } if mode != 'RSS': sickrage.app.log.debug( 'Found result: {0}'.format(title)) results.append(item) except Exception: sickrage.app.log.error("Failed parsing provider") return results
def execute(self, endpoint, version, path_sequence, qs_params): path = '/'.join([endpoint, version] + path_sequence) querystring = urlencode(qs_params) request_url = "{}/?{}".format(path, querystring) log.info('Endpoint Request: {}'.format(request_url)) resp = requests.get(request_url) log.info('Response ({}s): {} / {}'.format( resp.status_code, resp.reason, resp.elapsed.total_seconds())) log.debug('Response Body: {}'.format(resp.content)) return resp.content
def list_all_entities(self, ent_name): if not ent_name.endswith("/"): # avoid redirection round trip ent_name += "/" endpoint = urljoin(self.url, ent_name) # yay, pagination next_page_url = endpoint + "?" + urlencode({"page_number": 1}) while next_page_url: res = self.session.get(next_page_url) for item in res["result"]: yield item next_page_url = res.get("page", {}).get("next_page")
def req(self, url=None, method=None, params=None, body=None, headers={}): if url is None: url = self.url if url is None: raise LoadFailed("No URL provided") if params is None: params = getattr(self, 'params', None) if isinstance(params, str): url += '?' + params params = None if self.debug: if params: from requests.compat import urlencode debug_url = url + '?' + urlencode(params, doseq=True) else: debug_url = url self.debug_string = "%s: %s" % (method, debug_url) print(self.debug_string) if self.username is not None and self.password is not None: auth = (self.username, self.password) else: auth = None all_headers = self.headers.copy() all_headers.update(headers) resp = self.client.request( method, url, params=params, headers=all_headers, auth=auth, data=body, ) resp.connection.close() if resp.status_code < 200 or resp.status_code > 299: raise LoadFailed( resp.text, path=url, code=resp.status_code, ) if self.binary: return resp.content else: return resp.text
def dialog(self, redirect_uri, scope=(), state='', implicit=False): if implicit: response_type = 'token' else: response_type = 'code' params = dict( response_type=response_type, client_id=self.client_id, redirect_uri=redirect_uri, scope=' '.join(scope), state=state, ) return "%s?%s" % (self.dialog_url, urlencode(params))
def req(self, url=None, method=None, params=None, body=None, headers={}): if url is None: url = self.url if url is None: raise LoadFailed("No URL provided") if params is None: params = getattr(self, 'params', None) if isinstance(params, str): url += '?' + params params = None if self.debug: if params: from requests.compat import urlencode debug_url = url + '?' + urlencode(params, doseq=True) else: debug_url = url self.debug_string = "%s: %s" % (method, debug_url) print(self.debug_string) if self.username is not None and self.password is not None: auth = (self.username, self.password) else: auth = None all_headers = self.headers.copy() all_headers.update(headers) resp = requests.request( method, url, params=params, headers=all_headers, auth=auth, data=body, ) if resp.status_code < 200 or resp.status_code > 299: raise LoadFailed( resp.text, path=url, code=resp.status_code, ) if self.binary: return resp.content else: return resp.text
def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, force=False): if not sickbeard.USE_PROWL and not force: return False if prowl_api is None: prowl_api = sickbeard.PROWL_API if len(prowl_api) == 0: return False if prowl_priority is None: prowl_priority = sickbeard.PROWL_PRIORITY title = sickbeard.PROWL_MESSAGE_TITLE logger.log(u"PROWL: Sending notice with details: title=\"%s\" event=\"%s\", message=\"%s\", priority=%s, api=%s" % (title, event, message, prowl_priority, prowl_api), logger.DEBUG) http_handler = HTTPSConnection("api.prowlapp.com") data = {'apikey': prowl_api, 'application': title, 'event': event, 'description': message.encode('utf-8'), 'priority': prowl_priority} try: http_handler.request("POST", "/publicapi/add", headers={'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) except (SSLError, HTTPException, socket.error): logger.log(u"Prowl notification failed.", logger.ERROR) return False response = http_handler.getresponse() request_status = response.status if request_status == 200: logger.log(u"Prowl notifications sent.", logger.INFO) return True elif request_status == 401: logger.log(u"Prowl auth failed: %s" % response.reason, logger.ERROR) return False else: logger.log(u"Prowl notification failed.", logger.ERROR) return False
def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, force=False): if not app.USE_PROWL and not force: return False if prowl_api is None: prowl_api = ','.join(app.PROWL_API) if not prowl_api: return False if prowl_priority is None: prowl_priority = app.PROWL_PRIORITY title = app.PROWL_MESSAGE_TITLE log.debug(u'PROWL: Sending notice with details: title="{0}" event="{1}", message="{2}", priority={3}, api={4}', title, event, message, prowl_priority, prowl_api) http_handler = HTTPSConnection('api.prowlapp.com') data = {'apikey': prowl_api, 'application': title, 'event': event, 'description': message.encode('utf-8'), 'priority': prowl_priority} try: http_handler.request('POST', '/publicapi/add', headers={'Content-type': 'application/x-www-form-urlencoded'}, body=urlencode(data)) except (SSLError, HTTPException, socket.error): log.error(u'Prowl notification failed.') return False response = http_handler.getresponse() request_status = response.status if request_status == 200: log.info(u'Prowl notifications sent.') return True elif request_status == 401: log.error(u'Prowl auth failed: {0}', response.reason) return False else: log.error(u'Prowl notification failed.') return False
def _send_telegram_msg(self, title, msg, id=None, api_key=None): """ Sends a Telegram notification :param title: The title of the notification to send :param msg: The message string to send :param id: The Telegram user/group id to send the message to :param api_key: Your Telegram bot API token :returns: True if the message succeeded, False otherwise """ id = sickbeard.TELEGRAM_ID if id is None else id api_key = sickbeard.TELEGRAM_APIKEY if api_key is None else api_key logger.log('Telegram in use with API KEY: %s' % api_key, logger.DEBUG) message = '%s : %s' % (title.encode(), msg.encode()) payload = urlencode({'chat_id': id, 'text': message}) telegram_api = 'https://api.telegram.org/bot%s/%s' req = Request(telegram_api % (api_key, 'sendMessage'), payload) success = False try: urlopen(req) message = 'Telegram message sent successfully.' success = True except IOError as e: message = 'Unknown IO error: %s' % e if hasattr(e, b'code'): error_message = { 400: 'Missing parameter(s). Double check your settings or if the channel/user exists.', 401: 'Authentication failed.', 420: 'Too many messages.', 500: 'Server error. Please retry in a few moments.', } if e.code in error_message: message = error_message.get(e.code) else: http_status_code.get(e.code, message) except Exception as e: message = 'Error while sending Telegram message: %s ' % e finally: logger.log(message, logger.INFO) return success, message
def test_02_category_packages(self): querystring_filter_app = { 'category__slug': self.app.slug } base_url = reverse('apiv3:package_list') url_app_pkg = "%s?%s" % (base_url, urlencode(querystring_filter_app)) response_app_pkg = self.client.get(url_app_pkg) # check that the request was successful self.assertEqual(response_app_pkg.status_code, 200) # check that we have correct number of packages in filter raw_json_app_pkg = response_app_pkg.content.decode("utf-8") app_pkg = json.loads(raw_json_app_pkg) app_pkg_count = int(app_pkg['meta']['total_count']) self.assertEqual(app_pkg_count, self.app.package_set.count() + 1) # Check that we have filter applied correclty app_package_slug_list = self.app.package_set.values_list('slug', flat=True) self.assertIn(self.pkg1.slug, app_package_slug_list) self.assertIn(self.pkg2.slug, app_package_slug_list)