def setup_oauth(): """ Authorize your app via identifier. Code inspired by: http://thomassileo.com/blog/2013/01/25/using-twitter-rest-api-v1-dot-1-with-python/ """ # Request token oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET) r = requests.post(url=REQUEST_TOKEN_URL, auth=oauth) credentials = parse_qs(r.content) resource_owner_key = credentials[b'oauth_token'][0].decode(encoding='UTF-8') resource_owner_secret = credentials[b'oauth_token_secret'][0].decode(encoding='UTF-8') # Authorize authorize_url = AUTHORIZE_URL + resource_owner_key print('Please go here and authorize: ' + authorize_url) verifier = input('Please input the verifier: ') oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET, resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, verifier=verifier) # Finally, Obtain the Access Token r = requests.post(url=ACCESS_TOKEN_URL, auth=oauth) credentials = parse_qs(r.content) token = credentials[b'oauth_token'][0].decode(encoding='UTF-8') secret = credentials[b'oauth_token_secret'][0].decode(encoding='UTF-8') return token, secret
def do_GET(self): req = parse(self.path) if req.path in '/version': self.wfile.write(API.VERSION) # get metric elif req.path in API.METRIC_GET_URLS: query = parse_qs(req.query) key, host = None, None if 'key' in query: key = query['key'][0] if 'host' in query: host = query['host'][0] resp = self.sender.get_metric(key, host) if resp[0] is not None: result = '{0}\t{1}\t{2}'.format( key, resp[0], resp[1]) if platform.PY3: result = bytearray(result, 'utf-8') self.wfile.write(result) else: self.wfile.write(API.METRIC_NOT_FOUND) # get list of metric elif req.path in API.METRIC_LIST_URLS: query, host, result = parse_qs(req.query), None, '' if 'host' in query: host = query['host'][0] for val in self.sender.list_metrics(host): result += '{0}\t{1}\t{2}\n'.format( val[0], val[1][0], val[1][1]) if platform.PY3: result = bytearray(result, 'utf-8') self.wfile.write(result) else: # unknown path self.wfile.write(API.UNKNOWN_VERSION)
def youtube_download(video_url): video_id = parse_qs(urlparse(video_url).query)['v'][0] url_data = urlopen('http://www.youtube.com/get_video_info?&video_id=' + video_id).read() url_info = parse_qs(unquote(url_data.decode('utf-8'))) token_value = url_info['token'][0] download_url = "http://www.youtube.com/get_video?video_id={0}&t={1}&fmt=18".format( video_id, token_value) video_title = url_info['title'][0] if 'title' in url_info else '' # Unicode filenames are more trouble than they're worth filename = video_title.encode('ascii', 'ignore').decode('ascii').replace("/", "-") + '.mp4' print("\t Downloading '{}' to '{}'...".format(video_title, filename)) try: download = urlopen(download_url).read() f = open(filename, 'wb') f.write(download) f.close() except Exception as e: print("\t Downlad failed! {}".format(str(e))) print("\t Skipping...") else: print("\t Done.")
def extract_youtube_id(url): if not url.startswith("http://") and not url.startswith("https://"): url = "http://" + url # give credit where credit is due: http://stackoverflow.com/a/7936523 query = up.urlparse(url) if query.hostname == 'youtu.be': # http://youtu.be/YOUTUBE-ID&key_n=val_n return query.path[1:] if query.hostname == 'm.youtube.com': if not query.query: # http://m.youtube.com/#/watch?v=YOUTUBE-ID&key_n=val_n p = up.parse_qs(query.fragment) return p['/watch?v'][0] else: # http://m.youtube.com/watch?v=YOUTUBE-ID&key_n=val_n p = up.parse_qs(query.query) return p['v'][0] if query.hostname in ('www.youtube.com', 'youtube.com'): if query.path == '/watch': # http://www.youtube.com/watch?v=YOUTUBE-ID&key_n=val_n p = up.parse_qs(query.query) return p['v'][0] if query.path[:7] == '/embed/': # http://www.youtube.com/embed/YOUTUBE-ID&key_n=val_n return query.path.split('/')[2] if query.path[:3] == '/v/': # http://www.youtube.com/v/YOUTUBE-ID?key_n=val_n return query.path.split('/')[2] return CommentFetcher.INVALID_ID
def test_flow(self): url = self.sp.make_auth_req() status, headers, _ = self.getPage(url) assert status == '303 See Other' url = self.get_redirect_location(headers) req = parse_qs(urlsplit(url).query) assert 'SAMLRequest' in req assert 'RelayState' in req action, body = self.idp.handle_auth_req(req['SAMLRequest'][0], req['RelayState'][0], BINDING_HTTP_REDIRECT, 'test1') status, headers, body = self.getPage(action, method='POST', body=urlencode(body)) assert status == '302 Found' url = self.get_redirect_location(headers) req = parse_qs(urlsplit(url).query) assert 'SAMLResponse' in req assert 'RelayState' in req resp = self.sp.parse_authn_request_response(req['SAMLResponse'][0], BINDING_HTTP_REDIRECT) identity = resp.ava assert identity["displayName"][0] == "Test1" assert identity["sn"][0] == "test1@valueA" assert identity['o'][0] == "Small university"
def __init__(self, hxsServer, ctimeout=4, parentproxy=None): basesocket.__init__(self) if not isinstance(hxsServer, ParentProxy): hxsServer = ParentProxy(hxsServer, hxsServer) self.hxsServer = hxsServer self.timeout = ctimeout if parentproxy and not isinstance(parentproxy, ParentProxy): parentproxy = ParentProxy(parentproxy, parentproxy) self.parentproxy = parentproxy self.PSK = urlparse.parse_qs(self.hxsServer.parse.query).get('PSK', [''])[0] self.method = urlparse.parse_qs(self.hxsServer.parse.query).get('method', [DEFAULT_METHOD])[0].lower() self.hash_algo = urlparse.parse_qs(self.hxsServer.parse.query).get('hash', [DEFAULT_HASH])[0].upper() self.serverid = (self.hxsServer.username, self.hxsServer.hostname) self.cipher = None self._data_bak = None self.readable = 0 self.writeable = 0 self.pooled = 0 # TODO: send custom headers self._http_obfs = self.hxsServer.query.get('obfs', [''])[0] == 'http' self._http_header = b'GET / HTTP/1.1\r\n' self._http_header += b'Host: %s\r\n' % self.hxsServer.query.get('hostname', ['www.baidu.com'])[0].encode() self._http_header += b'User-Agent: %s\r\n' % self.hxsServer.query.get('UA', ['curl/7.18.1'])[0].encode() self._http_header += b'Upgrade: websocket\r\nConnection: Upgrade\r\n' self._http_header += b'Sec-WebSocket-Key: ' + base64.b64encode(os.urandom(16)) self._http_header += b'\r\n\r\n' self._header_sent = False self._header_received = False
def parse_request(self, req): """ Method to parse request and set to class request attributes - self.path - url path "path/to/something" - self.method - http method type - self.headers - http headers - self.body - request body data - self.query_data - request query data :param req: request string """ headers = {} lines = req.splitlines() in_body = False body = '' for line in lines[1:]: if line.strip() == "": in_body = True if in_body: body += line else: k, v = line.split(":", 1) headers[k.strip()] = v.strip() method, path, _ = lines[0].split() self.path = path.lstrip("/") self.method = method self.headers = headers self.body = parse_qs(body) if '?' in path: self.path, query_string = self.path.split("?") self.query_data = parse_qs(query_string)
def match_request(self, method, url, params=None): """Test if response answers request.""" if method.lower() != self.method.lower(): return False if params: url = str(yarl.URL(url).with_query(params)) # regular expression matching if self._url_parts is None: return self._url.search(url) is not None req = urlparse(url.lower()) if self._url_parts.scheme and req.scheme != self._url_parts.scheme: return False if self._url_parts.netloc and req.netloc != self._url_parts.netloc: return False if (req.path or '/') != (self._url_parts.path or '/'): return False # Ensure all query components in matcher are present in the request request_qs = parse_qs(req.query) matcher_qs = parse_qs(self._url_parts.query) for key, vals in matcher_qs.items(): for val in vals: try: request_qs.get(key, []).remove(val) except ValueError: return False return True
def setup_oauth(): """Authorize your app via identifier.""" # Request token oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET) r = requests.post(url=REQUEST_TOKEN_URL, auth=oauth) credentials = parse_qs(r.content) print (credentials) print #gets the oauth token from the credentials and slice it. oauthToken = (str(credentials[b'oauth_token'])) oauthToken = oauthToken[3:len(oauthToken)-2] print (oauthToken) #gets the oauth secret from the credentials and slice it. oauthSecret = (str(credentials[b'oauth_token_secret'])) oauthSecret = oauthSecret[3:len(oauthSecret)-2] print (oauthSecret) resource_owner_key = oauthToken """credentials.get('oauth_token')""" resource_owner_secret = oauthSecret """credentials.get('oauth_token_secret')""" # Authorize authorize_url = AUTHORIZE_URL + str(resource_owner_key) print ('Please go here and authorize: ' + authorize_url) verifier = input('Please input the verifier: ') oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET, resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, verifier=verifier) print ("") print ("") print # Finally, Obtain the Access Token r = requests.post(url=ACCESS_TOKEN_URL, auth=oauth) credentials = parse_qs(r.content) print (credentials) accessToken = (str(credentials[b'oauth_token'])) accessToken = accessToken[3:len(accessToken)-2] print (accessToken) #gets the oauth secret from the credentials and slice it. accessSecret = (str(credentials[b'oauth_token_secret'])) accessSecret = accessSecret[3:len(accessSecret)-2] token = accessToken secret = accessSecret return token, secret
def return_informations(self, new_url): """ Return: - Video title - Video file name - Video qualities - Video type - Direct URL """ try: response = urlopen(new_url) data = response.read() info = parse_qs(str(data)) title = info['title'][0] stream_map = info['url_encoded_fmt_stream_map'][0] v_info = stream_map.split(",") quality, v_type, direct_url = [], [], [] except Exception: self.exit_app() for video in v_info: item = parse_qs(video) quality.append(item['quality'][0]) v_type.append(item['type'][0]) direct_url.append(item['url'][0]) return title, quality, v_type, direct_url
def response_content(self, url, request): ''' Fake HTTP responses for use with HTTMock in tests. ''' scheme, host, path, _, query, _ = urlparse(url.geturl()) data_dirname = join(dirname(__file__), 'data') local_path = None if (host, path) == ('www.carsonproperty.info', '/ArcGIS/rest/services/basemap/MapServer/1/query'): qs = parse_qs(query) body_data = parse_qs(request.body) if request.body else {} if qs.get('returnIdsOnly') == ['true']: local_path = join(data_dirname, 'us-ca-carson-ids-only.json') elif body_data.get('outSR') == ['4326']: local_path = join(data_dirname, 'us-ca-carson-0.json') if (host, path) == ('www.carsonproperty.info', '/ArcGIS/rest/services/basemap/MapServer/1'): qs = parse_qs(query) if qs.get('f') == ['json']: local_path = join(data_dirname, 'us-ca-carson-metadata.json') if local_path: type, _ = guess_type(local_path) with open(local_path, 'rb') as file: return response(200, file.read(), headers={'Content-Type': type}) raise NotImplementedError(url.geturl())
def testProcessSLORequestInvalidValid(self): """ Tests the process_slo method of the OneLogin_Saml2_Auth class Case Invalid Logout Request """ settings_info = self.loadSettingsJSON() request_data = self.get_request() message = self.file_contents(join(self.data_path, 'logout_requests', 'logout_request_deflated.xml.base64')) request_data['get_data']['SAMLRequest'] = message auth = OneLogin_Saml2_Auth(request_data, old_settings=settings_info) target_url = auth.process_slo(True) parsed_query = parse_qs(urlparse(target_url)[4]) self.assertEqual(len(auth.get_errors()), 0) slo_url = settings_info['idp']['singleLogoutService']['url'] self.assertIn(slo_url, target_url) self.assertIn('SAMLResponse', parsed_query) #self.assertNotIn('RelayState', parsed_query) auth.set_strict(True) auth.process_slo(True) # Fail due destination missmatch self.assertEqual(auth.get_errors(), ['invalid_logout_request']) auth.set_strict(False) target_url_2 = auth.process_slo(True) parsed_query_2 = parse_qs(urlparse(target_url_2)[4]) self.assertEqual(len(auth.get_errors()), 0) slo_url = settings_info['idp']['singleLogoutService']['url'] self.assertIn(slo_url, target_url_2) self.assertIn('SAMLResponse', parsed_query_2)
def testLoginIsPassive(self): """ Tests the login method of the OneLogin_Saml2_Auth class Case Logout with no parameters. A AuthN Request is built with IsPassive and redirect executed """ settings_info = self.loadSettingsJSON() return_to = u'http://example.com/returnto' settings_info['idp']['singleSignOnService']['url'] auth = OneLogin_Saml2_Auth(self.get_request(), old_settings=settings_info) target_url = auth.login(return_to) parsed_query = parse_qs(urlparse(target_url)[4]) sso_url = settings_info['idp']['singleSignOnService']['url'] self.assertIn(sso_url, target_url) self.assertIn('SAMLRequest', parsed_query) request = compat.to_string(OneLogin_Saml2_Utils.decode_base64_and_inflate(parsed_query['SAMLRequest'][0])) self.assertNotIn('IsPassive="true"', request) auth_2 = OneLogin_Saml2_Auth(self.get_request(), old_settings=settings_info) target_url_2 = auth_2.login(return_to, False, False) parsed_query_2 = parse_qs(urlparse(target_url_2)[4]) self.assertIn(sso_url, target_url_2) self.assertIn('SAMLRequest', parsed_query_2) request_2 = compat.to_string(OneLogin_Saml2_Utils.decode_base64_and_inflate(parsed_query_2['SAMLRequest'][0])) self.assertNotIn('IsPassive="true"', request_2) auth_3 = OneLogin_Saml2_Auth(self.get_request(), old_settings=settings_info) target_url_3 = auth_3.login(return_to, False, True) parsed_query_3 = parse_qs(urlparse(target_url_3)[4]) self.assertIn(sso_url, target_url_3) self.assertIn('SAMLRequest', parsed_query_3) request_3 = compat.to_string(OneLogin_Saml2_Utils.decode_base64_and_inflate(parsed_query_3['SAMLRequest'][0])) self.assertIn('IsPassive="true"', request_3)
def response_content(self, url, request): """ Fake HTTP responses for use with HTTMock in tests. """ scheme, host, path, _, query, _ = urlparse(url.geturl()) data_dirname = join(dirname(__file__), "data") local_path = None if (host, path) == ("www.carsonproperty.info", "/ArcGIS/rest/services/basemap/MapServer/1/query"): qs = parse_qs(query) body_data = parse_qs(request.body) if request.body else {} if qs.get("returnIdsOnly") == ["true"]: local_path = join(data_dirname, "us-ca-carson-ids-only.json") elif qs.get("returnCountOnly") == ["true"]: local_path = join(data_dirname, "us-ca-carson-count-only.json") elif body_data.get("outSR") == ["4326"]: local_path = join(data_dirname, "us-ca-carson-0.json") if (host, path) == ("www.carsonproperty.info", "/ArcGIS/rest/services/basemap/MapServer/1"): qs = parse_qs(query) if qs.get("f") == ["json"]: local_path = join(data_dirname, "us-ca-carson-metadata.json") if local_path: type, _ = guess_type(local_path) with open(local_path, "rb") as file: return response(200, file.read(), headers={"Content-Type": type}) raise NotImplementedError(url.geturl())
def decrypt_request(self, request, action="GET"): """ Decrypts the request and extracts these information: :return path: full path without host:port (first and last / are removed) :return parts: list of query parts :return in_data: json object of the associated request data """ o = urlparse.urlparse(request.get_path()) path = o.path query = o.query # prepare query path: remove first and last '/' if exists if path[0] == '/': path = path[1:] if path[-1] == '/': path = path[:-1] parts = str(path).split('/') in_data = None if action == "GET": in_data = urlparse.parse_qs(query, keep_blank_values=True) else: data = request.read_data() if data != None: in_data = json.loads(str(data)) else: in_data = urlparse.parse_qs(query, keep_blank_values=True) #print(json.dumps(in_data, sort_keys=False, indent=4, separators=(',', ': '))) return (path, parts, in_data)
def get_filename(urllib_response): """ Attempt to get the filename out of the response headers. Failing that, use the filename part of the URL. Sanitize the filename to make it safe to use locally. """ cd_header = urllib_response.headers.get('Content-Disposition', '') _, params = cgi.parse_header(cd_header) split_url = urllib.parse.urlsplit(urllib_response.geturl()) if 'filename' in params: path = params['filename'] elif split_url.query and 'file' in parse_qs(split_url.query): path = parse_qs(split_url.query)['file'][0] elif split_url.query and 'filename' in parse_qs(split_url.query): path = parse_qs(split_url.query)['filename'][0] else: path = split_url.path filename = posixpath.basename(path) filename = secure_filename(filename) return filename
def urlSimilarity(url1, url2): url_1 = parse.urlparse(url1) url_2 = parse.urlparse(url2) similarity = 0 if url_1.scheme == url_2.scheme: similarity += 5 if url_1.netloc == url_2.netloc: similarity += 10 path_component_1 = url_1.path.split('/')[1:] path_component_2 = url_2.path.split('/')[1:] for e1,e2 in zip(path_component_1, path_component_2): if e1 == e2: similarity += 1 else: break k1 = parse.parse_qs(url_1.query) k2 = parse.parse_qs(url_2.query) similarity += len(k1.keys() & k2.keys()) for keys in (k1.keys() & k2.keys()): if k1[keys] == k2[keys]: similarity += 1 return similarity
def authn(environ, session): # verify the username+password if environ["REQUEST_METHOD"] == "POST": query = parse_qs(get_body(environ)) else: # Assume environ["REQUEST_METHOD"] == "GET" query = parse_qs(environ["QUERY_STRING"]) try: assert uma_as.PASSWD[query["login"][0]] == query["password"][0] except (KeyError, AssertionError): return Unauthorized(), {} #uid = uma_as.UID2EPPN[query["login"][0]] uid = query["login"][0] cval = {"user": uid, "authn": PASSWORD} headers = [CookieHandler.create_cookie("%s" % (cval,), "sso", COOKIE_NAME)] session["user"] = uid try: op = query["operation"][0] if op == "chose_permissions": return chose_permissions(environ, session) elif op == "set_permission": return set_permission(environ, session) elif op == "manage": return manage(uid, headers) else: pass except KeyError: pass return Response(), {}
def do_GET(self): if self.path != "/": try: parsed = urlparse(self.path) username = parse_qs(parsed.query)['username'][0] ia = parse_qs(parsed.query)['ia'][0] #print("username= "******"Ia= "+parse_qs(parsed.query)['ia'][0]) print("Nouveau Joueur, Username= "******", IA= "+ia) #Créer le joueur thJoueur = PlayerThread(self.server.gameThread,username,ia) thJoueur.start() except KeyError: pass self.send_response(200) self.send_header('content-type', 'application/json') self.end_headers() # Send the html message self.server.gameThread.update() data = self.server.gameThread.data self.wfile.write(bytes(data,'ascii')) return
def match(expected_str, actual_str): expected = urlparse(expected_str) actual = urlparse(actual_str) assert expected.scheme == actual.scheme assert expected.netloc == actual.netloc assert expected.path == actual.path self.assertEqual(parse_qs(expected.query), parse_qs(actual.query))
def setup_oauth(self): """Authorize your app via identifier.""" # Request token oauth = OAuth1(self.CONSUMER_KEY, client_secret=self.CONSUMER_SECRET) r = requests.post(url=self.REQUEST_TOKEN_URL, auth=oauth) credentials = parse.parse_qs(r.content) resource_owner_key = credentials[b'oauth_token'][0].decode() resource_owner_secret = credentials[b'oauth_token_secret'][0].decode() # Authorize authorize_url = self.AUTHORIZE_URL + resource_owner_key print('Please go here and authorize: ' + authorize_url) verifier = input('Please input the verifier pin: ') oauth = OAuth1(self.CONSUMER_KEY, client_secret=self.CONSUMER_SECRET, resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, verifier=verifier) # Finally, Obtain the Access Token r = requests.post(url=self.ACCESS_TOKEN_URL, auth=oauth) credentials = parse.parse_qs(r.content) token = credentials[b'oauth_token'][0].decode() secret = credentials[b'oauth_token_secret'][0].decode() return token, secret
def do_POST(self): """ receives post, handles it """ print(_('receiving POST...')) data_string = self.rfile.read(int(self.headers['Content-Length'])).decode('UTF-8') self.send_response(200) message = bytes('OK', 'UTF-8') self.send_header("Content-type", "text") self.send_header("Content-length", str(len(message))) self.end_headers() self.wfile.write(message) print(_('connection closed')) # parse requested path + query string _parsed = urlparse(self.path) path = _parsed.path query_string = parse_qs(_parsed.query) print(_("incoming path: {}").format(path)) # parse incoming data payload = parse_qs(data_string) print(_("payload {}").format(payload)) self._handle_incoming(path, query_string, payload)
def serialize_raw_response(self): """ """ response = self.raw_reponse headers = response.headers if 'json' in headers['content-type']: result = response.json() elif 'image/' in headers['content-type']: mimetype = headers['content-type'] result = {"data": getattr(response, 'content', {}), "mime-type": mimetype, "url": response.url} elif "access_token" in parse_qs(response.text): query_str = parse_qs(response.text) if "access_token" in query_str: result = {"access_token": query_str["access_token"][0]} if "expires" in query_str: result["expires"] = query_str["expires"][0] else: pass raise GraphAPIError(response.json()) else: raise GraphAPIError('Maintype was not text, image, or querystring') self._response = result
def test_correct_url_returned(): url = 'www.mysite.com?utm_source=source&utm_medium=medium&utm_campaign=campaign&utm_content=content&utm_term=term' composed_url = url_utm_ga('www.mysite.com', 'source', 'medium', 'campaign', 'content', 'term') parsed = urlparse(url) assert parse_qs(parsed.query) == parse_qs(urlparse(composed_url).query)
def process_command(self): self.url = urlparse.urlparse(self.path) self.reqpath = urlparse.unquote(self.url.path).decode('utf-8') self.query = urlparse.parse_qs(self.url.query) q = urlparse.parse_qs(self.url.query) self.query = {} for n in q: self.query[n.decode('utf-8')] = [y.decode('utf-8') for y in q[n]] self.process_ipv6_normalize() self.process_xff() self.dump_req() self.sql_conn = sql.UnladenSqlConn(self.server.sql_engine) for handler_name in self.server.config['httpd']['handlers']: if handler_name in self.handler_modules: handler_module = self.handler_modules['handler_name'] else: handler_module = __import__('unladen.httpd.handlers.%s' % handler_name, fromlist=['unladen.httpd.handlers']) self.handler_modules['handler_name'] = handler_module handler_claimed = False try: handler_instance = handler_module.UnladenRequestHandler(self) handler_claimed = handler_instance.process_request(self.reqpath) except Exception as e: self.logger.exception(e) self.send_error(httplib.INTERNAL_SERVER_ERROR, str(e)) self.sql_conn.close() return if handler_claimed: self.sql_conn.close() return self.sql_conn.close() self.send_error(httplib.BAD_REQUEST)
def do_POST(self): global PATH context = {"Page":Page_class()} mypath = self.path.split('?', 1) if mypath[0] == "/": mypath[0] = "/index.pyhp" filename = PATH + mypath[0] print(filename) data = "" args = {} if 'Content-Length' in self.headers.keys(): length = int(self.headers['Content-Length']) args = urlparse.parse_qs(self.rfile.read(length).decode('utf-8')) elif len(mypath) > 1: args = urlparse.parse_qs(mypath[1]) try: with open(filename, "r") as fp: data = fp.read() except Exception: return self.handle_error(404, "file %s not found" % filename) self.send_response(200) #self.send_header("Content-type", "text/html") self.end_headers() context['Page'].args = args context['Page'].client = self.client_address self.wfile.write(bytes(parse_file(data, context),"UTF-8"))
def setup_oauth(): """Authorize your app via identifier.""" # Request token oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET) r = requests.post(url=REQUEST_TOKEN_URL, auth=oauth) credentials = parse_qs(r.content) resource_owner_key_b = credentials.get(b'oauth_token') resource_owner_key_a = str(resource_owner_key_b) resource_owner_key = resource_owner_key_a[3:-2] resource_owner_secret_b = credentials.get(b'oauth_token_secret') resource_owner_secret=str(resource_owner_secret_b)[3:-2] # Authorize authorize_url = AUTHORIZE_URL + resource_owner_key print ('Please go here and authorize: ' + authorize_url) verifier = input('Please input the verifier: ') oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET, resource_owner_key=resource_owner_key, resource_owner_secret=resource_owner_secret, verifier=verifier) # Finally, Obtain the Access Token r = requests.post(url=ACCESS_TOKEN_URL, auth=oauth) credentials = parse_qs(r.content) token_a = credentials.get(b'oauth_token') token = str(token_a)[3:-2] secret_b = credentials.get(b'oauth_token_secret') secret = str(secret_b)[3:-2] return token, secret
def __parse_phone(self, path): """ Parse /phone requests. """ if self.path == "/phone/reset": self.server.window.reset_phone() return self.REPLY_OK if self.path.startswith("/phone/add"): url = urlparse.urlparse(self.path) params = urlparse.parse_qs(url.query) if "number" not in params: return "need-number" if "name" not in params: return "need-name" self.server.db.addPhoneNumber(params["number"][0], params["name"][0]) self.server.conn.phone.reload_users() return self.REPLY_OK if self.path.startswith("/phone/del"): url = urlparse.urlparse(self.path) params = urlparse.parse_qs(url.query) if "number" not in params: return "need-number" self.server.db.removePhoneNumber(params["number"][0]) self.server.conn.phone.reload_users() return self.REPLY_OK if self.path == "/phone/book": return json.dumps(self.server.db.getPhoneBook(), indent=4) return self.REPLY_ERR
def authenticate(self, code=None, redirect_uri=None): graph = ObservableGraphAPI() args = { 'client_id': settings.FACEBOOK_APP_ID, 'client_secret': settings.FACEBOOK_APP_SECRET, 'redirect_uri': redirect_uri, 'code': code } try: data = graph.get('/oauth/access_token', **args) except exceptions.FacebookError as e: message = "Facebook login failed %s" % e.message code_used_message = 'This authorization code has been used.' if e.code == 100 and e.message == code_used_message: logger.info(message) else: logger.warning(message) return None except exceptions.FacepyError as e: logger.warning("Facebook login connection error") return None try: access_token = urlparse.parse_qs(data)['access_token'][-1] expires = urlparse.parse_qs(data)['expires'][-1] except KeyError as e: args['client_secret'] = '*******%s' % args['client_secret'][-4:] logger.error(e, extra={'facebook_response': data, 'sent_args': args}) return None expires_at = self._timestamp_to_datetime(expires) user = USER_FACTORY.get_user(access_token, expires_at) return user
def __init__(self, video_url): infoUrl = 'https://www.youtube.com/get_video_info?video_id=' try: vidid = re.search(r'v=([a-zA-Z0-9-_]*)', video_url).group(1) except: raise RuntimeError("bad video url") infoUrl += vidid + "&asv=3&el=detailpage&hl=en_US" opener = build_opener() ua = ("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64;" "Trident/5.0)") opener.addheaders = [('User-Agent', ua)] self.keywords = "" allinfo = parse_qs(decode_if_py3(opener.open(infoUrl).read())) self._setmetadata(allinfo) streamMap = allinfo['url_encoded_fmt_stream_map'][0].split(',') smap = [parse_qs(sm) for sm in streamMap] js = None if not smap[0].get("sig", ""): # vevo! watchurl = "https://www.youtube.com/watch?v=" + vidid watchinfo = opener.open(watchurl).read().decode("UTF-8") match = re.search(r';ytplayer.config = ({.*?});', watchinfo) try: myjson = json.loads(match.group(1)) except: raise RuntimeError('Problem handling this video') args = myjson['args'] streamMap = args['url_encoded_fmt_stream_map'].split(",") html5player = myjson['assets']['js'] js = opener.open(html5player).read().decode("UTF-8") smap = [parse_qs(sm) for sm in streamMap] self.streams = [Stream(sm, opener, self.title, js) for sm in smap]
def addUserIdent(self, info=False): param = parse_qs(urlparse('url?' + info).query) return gf.addUserIdent(param, HOSTclient, HOSTserv, self)
def getGame(self, info=False): param = parse_qs(urlparse('url?' + info).query) return gf.getGame(param, self)
def endDelGame(self, info=False): gf.cookie = cherrypy.request.cookie param = parse_qs(urlparse('url?' + info).query) return gf.endDelGame(param, self)
def savePassword(self, info=False): gf.cookie = cherrypy.request.cookie param = parse_qs(urlparse('url?' + info).query) return gf.savePassword(param, self)
def updateUser(self, info=False): gf.cookie = cherrypy.request.cookie param = parse_qs(urlparse('url?' + info).query) return gf.updateUser(param, self)
def confInsc(self, data=False): pos = cherrypy.request.query_string.find('?') info = cherrypy.request.query_string[pos + 1:] param = parse_qs(urlparse('url?' + info).query) return gf.confInsc(param, HOSTclient, self)
def updUser(self, info=False): param = parse_qs(urlparse('url?' + info).query) return gf.updateUser(param, self)
def getClubParcTrous(self, info=False): param = parse_qs(urlparse('url?' + info).query) return gf.getClubParcTrous(param, self)
def searchResult(self, info=False): param = parse_qs(urlparse('url?' + info).query) return gf.searchResult(param, self)
def identUser(self, info=False, user=False, passw=False): if user: info = "user="******"&pass=" + passw cookieOut = cherrypy.response.cookie param = parse_qs(urlparse('url?' + info).query) return gf.authUser(param, self, cookieOut)
"""Chapter 1: Pythonic Thinking""" """Item 5: Write Helper Functions Instead of Complex Expressions""" from urllib.parse import parse_qs my_values = parse_qs('red=5&blue=0&green=', keep_blank_values=True) print('Red: ', my_values.get('red')) print('Green: ', my_values.get('green')) print('Opacity: ', my_values.get('opacity')) # For query string 'red=5&blue=0&green=' red = my_values.get('red', [''])[0] or 0 green = my_values.get('green', [''])[0] or 0 opacity = my_values.get('opacity', [''])[0] or 0 print(f'Red: {red!r}') print(f'Green: {green!r}') print(f'Opacity: {opacity!r}') def get_first_int(values, key, default=0): found = values.get(key, ['']) if found[0]: return int(found[0]) else: return default if __name__ == '__main__': green = get_first_int(my_values, 'green')
def setGolfGPS(self, info=False): gf.cookie = cherrypy.request.cookie param = parse_qs(urlparse('url?' + info).query) return gf.setGolfGPS(param, self)
def get_all_query_parts(sas_uri): url_parts = parse.urlsplit(sas_uri) return parse.parse_qs(url_parts.query)
def modify_query(params): query_params = parse_qs(flask.request.query_string.decode("utf-8")) query_params.update(params) return urlencode(query_params, doseq=True)
def generate_batch_hubs(context, request): '''search for the input params and return the trackhub''' results = {} txt = request.matchdict['txt'] param_list = parse_qs(request.matchdict['search_params'].replace( ',,', '&')) if len(request.matchdict) == 3: # Should generate a HTML page for requests other than trackDb.txt if txt != TRACKDB_TXT: data_policy = '<br /><a href="http://encodeproject.org/ENCODE/terms.html">ENCODE data use policy</p>' return generate_html(context, request) + data_policy assembly = str(request.matchdict['assembly']) if 'status' in param_list: del FILE_QUERY['status'] params = dict(param_list, **FILE_QUERY) results = [] params['assembly'] = [assembly] # if files.file_format is a input param if 'files.file_format' in param_list: params['files.file_format'] = param_list['files.file_format'] path = '/search/?%s' % urlencode(params, True) for result in request.embed(path, as_user=True)['@graph']: if 'files' in result: for f in result['files']: if f['file_format'] in BIGWIG_FILE_TYPES + BIGBED_FILE_TYPES: results.append(result) break else: path = '/search/?%s' % urlencode(params, True) results = request.embed(path, as_user=True)['@graph'] trackdb = '' for i, experiment in enumerate(results): if i < 5: if i == 0: trackdb = generate_trackDb(experiment, 'full', None) else: trackdb = trackdb + NEWLINE + generate_trackDb( experiment, 'full', None) else: trackdb = trackdb + NEWLINE + generate_trackDb( experiment, 'hide', None) return trackdb elif txt == HUB_TXT: return NEWLINE.join(get_hub('search')) elif txt == GENOMES_TXT: path = '/search/?%s' % urlencode(param_list, True) results = request.embed(path, as_user=True) g_text = '' if 'assembly' in param_list: for assembly in param_list.get('assembly'): if g_text == '': g_text = NEWLINE.join(get_genomes_txt(assembly)) else: g_text = g_text + 2 * NEWLINE + NEWLINE.join( get_genomes_txt(assembly)) else: for facet in results['facets']: if facet['field'] == 'assembly': for term in facet['terms']: if term['doc_count'] != 0: if g_text == '': g_text = NEWLINE.join( get_genomes_txt(term['key'])) else: g_text = g_text + 2 * NEWLINE + NEWLINE.join( get_genomes_txt(term['key'])) return g_text
def test_can_show_notifications( client_request, logged_in_client, service_one, mock_get_notifications, mock_get_service_statistics, mock_get_service_data_retention, mock_has_no_jobs, user, extra_args, expected_update_endpoint, expected_limit_days, page_title, status_argument, expected_api_call, page_argument, expected_page_argument, to_argument, expected_to_argument, mocker, fake_uuid, ): client_request.login(user(fake_uuid)) if expected_to_argument: page = client_request.post('main.view_notifications', service_id=SERVICE_ONE_ID, status=status_argument, page=page_argument, _data={'to': to_argument}, _expected_status=200, **extra_args) else: page = client_request.get('main.view_notifications', service_id=SERVICE_ONE_ID, status=status_argument, page=page_argument, **extra_args) text_of_first_row = page.select('tbody tr')[0].text assert '6502532222' in text_of_first_row assert ('template content' in text_of_first_row or 'template subject' in text_of_first_row) assert 'Delivered' in text_of_first_row assert page_title in page.h1.text.strip() path_to_json = page.find("div", {'data-key': 'notifications'})['data-resource'] url = urlparse(path_to_json) assert url.path == '/services/{}/notifications{}'.format( SERVICE_ONE_ID, expected_update_endpoint, ) query_dict = parse_qs(url.query) if status_argument: assert query_dict['status'] == [status_argument] if expected_page_argument: assert query_dict['page'] == [str(expected_page_argument)] assert 'to' not in query_dict mock_get_notifications.assert_called_with( limit_days=expected_limit_days, page=expected_page_argument, service_id=SERVICE_ONE_ID, status=expected_api_call, template_type=list(extra_args.values()), to=expected_to_argument, ) json_response = logged_in_client.get( url_for('main.get_notifications_as_json', service_id=service_one['id'], status=status_argument, **extra_args)) json_content = json.loads(json_response.get_data(as_text=True)) assert json_content.keys() == { 'counts', 'notifications', 'service_data_retention_days' }
def test_it_includes_the_response_type_in_a_query_param(self, views): response = views.authorize() query_params = parse_qs(urlparse(response.location).query) assert query_params["response_type"] == ["code"]
def parseParams(self, environ): self.environ = environ qs = None if 'QUERY_STRING' in environ: qs = parse.parse_qs(environ['QUERY_STRING']) if 'REQUEST_URI' in environ: URI = environ['REQUEST_URI'] else: URI = environ['PATH_INFO'] if self.cfg.WEB_PREFIX in URI: self.Wrapper = self.webWrapper self.modulePath = os.path.join(self.cfg.WEB_PREFIX, self.moduleFile) if self.cfg.OPDS_PREFIX in URI: self.Wrapper = self.opdsWrapper self.modulePath = os.path.join(self.cfg.OPDS_PREFIX, self.moduleFile) self.modulePath = os.path.normpath(self.modulePath) if 'id' in qs: self.id_value = qs.get("id")[0] else: self.id_value = "0" if self.id_value.isdigit(): if len(self.id_value) > 1: self.type_value = int(self.id_value[0:2]) if len(self.id_value) > 2: self.slice_value = int(self.id_value[2:]) if 'page' in qs: page = qs.get("page")[0] if page.isdigit(): self.page_value = int(page) if 'searchType' in qs: searchType = qs.get("searchType")[0].strip() if searchType == 'books': self.type_value = 71 if searchType == 'authors': self.type_value = 72 if searchType == 'series': self.type_value = 73 if 'searchTerm' in qs: self.searchTerm = qs.get("searchTerm")[0].strip() if self.type_value != 71 and self.type_value != 72 and self.type_value != 73: self.type_value = 7 self.slice_value = -1 self.id_value = '%02d&searchTerm=%s' % (self.type_value, self.searchTerm) else: self.searchTerm = '' if 'alpha' in qs: salpha = qs.get("alpha")[0].strip() if salpha.isdigit(): self.alpha = int(salpha) if 'news' in qs: self.news = 1 self.nl = '&news=1' self.np = self.cfg.NEW_PERIOD if 'ser' in qs: ser = qs.get("ser")[0] if ser.isdigit(): self.ser_value = int(ser)
def connect(self): self.authenticated_user = None self.user = self.scope['user'] self.userid = self.scope['user'].id self.username = self.scope['user'].username self.channels = OrderedDict([]) st = None if self.user.is_anonymous: # Connection with auth token # Should only be Digital Twin right now params = parse_qs(self.scope['query_string'].decode('utf8')) auth_token = params.get('token', (None,))[0] if auth_token: user = Token.objects.get(key=auth_token).user if user: if user.profile.is_experimenter(): self.set_authenticated_user(user) self.accept() # Use simple twin channel for now # TODO: expand this somehow with session and additional instance or something # to allow multiple runs async_to_sync(self.channel_layer.group_add)( 'twin', self.channel_name, ) elif self.user.profile.is_experimenter(): params = parse_qs(self.scope['query_string'].decode('utf8')) teamId = params.get('teamId', (None,))[0] team = DesignTeam.objects.filter(id=teamId).first() if team: self.accept() help_channel = Channel.objects.filter(name="Help").first() st = SessionTeam.objects.filter(Q(session__status__in=Session.ACTIVE_STATES)&Q(team=team)).first() if st: user_profiles = Profile.objects.filter(team=team) for user_profile in user_profiles: if not user_profile.is_experimenter(): team_user = user_profile.user up = UserPosition.objects.filter(Q(user=team_user)&Q(session=st.session)).first() if up: precheck = False postcheck = False user_checklist = UserChecklist.objects.filter(session=st.session.id).filter(user=team_user).first() if user_checklist: precheck = user_checklist.precheck postcheck = user_checklist.postcheck info_message = up.position.name + "@#@" + str(precheck) + "@#@" + str(postcheck) channel_instance = str(help_channel.id) + "_" + str(team_user.id) + "___" + str(st.session.id) self.channels[channel_instance] = help_channel async_to_sync(self.channel_layer.group_add)( channel_instance, self.channel_name, ) self.send(text_data=json.dumps({ 'type' : 'chat.info', 'message' : info_message, 'sender' : "System", 'channel' : channel_instance })) # Send message over help channel that user has connected async_to_sync(self.channel_layer.group_send)( channel_instance, { 'type': 'session.request', 'message': "respond", 'sender': "Experimenter", 'channel': channel_instance }) # Don't include Setup channel for now #setup_channel = Channel.objects.filter(name="Setup").first() #if setup_channel: # setup_instance = str(setup_channel.id) + "___" + str(st.session.id) # self.channels[setup_instance] = setup_channel # async_to_sync(self.channel_layer.group_add)( # setup_instance, # self.channel_name, # ) # self.send(text_data=json.dumps({ # 'type' : 'chat.info', # 'message' : "Setup", # 'sender' : "System", # 'channel' : setup_instance # })) session_channel = Channel.objects.filter(name="Session").first() if session_channel: session_instance = str(session_channel.id) + "___" + str(st.session.id) self.channels[session_instance] = session_channel async_to_sync(self.channel_layer.group_add)( session_instance, self.channel_name, ) if st.session.status == Session.RUNNING: running_timer = SessionTimer.objects.filter(session=st.session).filter(timer_type=SessionTimer.RUNNING_START).first() elapsed_seconds = 0 if running_timer: current_time = datetime.now(timezone.utc) running_timestamp = running_timer.timestamp if running_timestamp: time_difference = current_time - running_timestamp elapsed_seconds = round(time_difference.total_seconds()) self.send(text_data=json.dumps({ 'type' : 'session.time', 'message' : str(elapsed_seconds), 'sender' : "System", 'channel' : session_instance })) self.send(text_data=json.dumps({ 'type' : 'chat.info', 'message' : "Session", 'sender' : "System", 'channel' : session_instance })) self.send(text_data=json.dumps({ 'type' : 'system.command', 'message' : "init", 'sender' : "System", 'channel' : session_instance })) #TODO: this should go away elif self.user.profile.is_mediator(): org_team = DesignTeam.objects.filter(organization=self.user.profile.organization).first() if org_team: session_teams = SessionTeam.objects.filter(team=org_team) a_session = None for session_team in session_teams: if session_team.session.status == Session.RUNNING: a_session = session_team.session break if a_session: self.accept() channels = Channel.objects.filter(structure=a_session.structure) for channel in channels: channel_instance = str(channel.id) + "___" + str(a_session.id) self.channels[channel_instance] = channel if self.channels: for instance in self.channels: orig_channel = self.channels[instance] async_to_sync(self.channel_layer.group_add)( instance, self.channel_name, ) self.send(text_data=json.dumps({ 'type' : 'chat.info', 'message' : orig_channel.name, 'sender' : "System", 'channel' : instance })) session_channel = Channel.objects.filter(name="Session").first() if session_channel: session_instance = str(session_channel.id) + "___" + str(a_session.id) if session_instance: self.channels[session_instance] = session_channel self.send(text_data=json.dumps({ 'type' : 'system.command', 'message' : "init", 'sender' : "System", 'channel' : session_instance })) else: st = SessionTeam.objects.filter(Q(session__status__in=Session.ACTIVE_STATES)&Q(team=self.user.profile.team)).first() if st: help_channel = Channel.objects.filter(name="Help").first() help_instance = str(help_channel.id) + "_" + str(self.user.id) + "___" + str(st.session.id) up = UserPosition.objects.filter(Q(user=self.user)&Q(session=st.session)).first() if st.session.status == Session.RUNNING: channel_positions = ChannelPosition.objects.filter(position=up.position) user_channels = Channel.objects.filter(id__in=channel_positions.values('channel')) for channel in user_channels: channel_instance = str(channel.id) + "___" + str(st.session.id) self.channels[channel_instance] = channel self.channels[help_instance] = help_channel elif st.session.status == Session.SETUP: #TODO: possibly add org level setting for including setup chat #setup_channel = Channel.objects.filter(name="Setup").first() #setup_instance = str(setup_channel.id) + "___" + str(st.session.id) #self.channels[setup_instance] = setup_channel self.channels[help_instance] = help_channel else: self.channels[help_instance] = help_channel session_channel = Channel.objects.filter(name="Session").first() if session_channel: session_instance = str(session_channel.id) + "___" + str(st.session.id) self.channels[session_instance] = session_channel dronebot_channel = Channel.objects.filter(name="DroneBot").first() show_dronebot = st.session.structure.name == "Extra" if dronebot_channel and show_dronebot: dronebot_instance = str(dronebot_channel.id) + "_" + str(self.user.id) + "___" + str(st.session.id) self.channels[dronebot_instance] = dronebot_channel self.accept() if up.position.role.name == "Process": mediator_channel = 'm_' + str(st.session.id) async_to_sync(self.channel_layer.group_add)( mediator_channel, self.channel_name, ) if self.channels: for instance in self.channels: orig_channel = self.channels[instance] async_to_sync(self.channel_layer.group_add)( instance, self.channel_name, ) self.send(text_data=json.dumps({ 'type' : 'chat.info', 'message' : orig_channel.name, 'sender' : "System", 'channel' : instance })) self.send(text_data=json.dumps({ 'type' : 'system.command', 'message' : "init", 'sender' : "System", 'channel' : session_instance })) # Send message over help channel that user has connected if help_instance: async_to_sync(self.channel_layer.group_send)( help_instance, { 'type': 'session.response', 'message': "connected", 'sender': up.position.name, 'channel': help_instance }) if st: # Send out the old messages to this user chat_logs = DataLog.objects.filter(session=st.session).filter(type__startswith="chat: ").order_by('time') for chat_log in chat_logs: sender_name = "" senderPosition = UserPosition.objects.filter(session=st.session).filter(user=chat_log.user).first() if senderPosition: sender_name = senderPosition.position.name else: sender_name = "Experimenter" channel_parse = chat_log.type.split('@') if len(channel_parse) > 1: channel_instance = channel_parse[len(channel_parse) - 1] self.send(text_data=json.dumps({ 'type' : 'chat.message', 'message' : chat_log.action, 'sender' : sender_name, 'channel' : channel_instance }))
def add_youtube_playlist_to_database(url, title, thumbnail): query = parse_qs(urlparse(url).query, keep_blank_values=True) playlist_id = query["list"][0] database.add_playlist([(playlist_id, title, thumbnail)]) result = fetch_playlist(url, playlist_id) database.add(result)
def test_conversation(): try: shutil.rmtree('db') except FileNotFoundError: pass # Alternate who's doing what service_context = build_service_context() service_context_2 = build_service_context() service = service_context.service service_2 = service_context_2.service # ======================== WebFinger ======================== info = service['webfinger'].get_request_parameters( request_args={'resource': '*****@*****.**'}) assert info['url'] == 'https://example.org/.well-known/webfinger?rel=http' \ '%3A%2F%2Fopenid.net%2Fspecs%2Fconnect%2F1.0%2Fissuer' \ '&resource=acct%3Afoobar%40example.org' webfinger_response = json.dumps({ "subject": "acct:[email protected]", "links": [{ "rel": "http://openid.net/specs/connect/1.0/issuer", "href": "https://example.org/op" }], "expires": "2018-02-04T11:08:41Z" }) response = service['webfinger'].parse_response(webfinger_response) service['webfinger'].update_service_context(resp=response) # =================== Provider info discovery ==================== info = service_context_2.service['provider_info'].get_request_parameters() assert info[ 'url'] == 'https://example.org/op/.well-known/openid-configuration' provider_info_response = json.dumps({ "version": "3.0", "token_endpoint_auth_methods_supported": [ "client_secret_post", "client_secret_basic", "client_secret_jwt", "private_key_jwt" ], "claims_parameter_supported": True, "request_parameter_supported": True, "request_uri_parameter_supported": True, "require_request_uri_registration": True, "grant_types_supported": [ "authorization_code", "implicit", "urn:ietf:params:oauth:grant-type:jwt-bearer", "refresh_token" ], "response_types_supported": [ "code", "id_token", "id_token token", "code id_token", "code token", "code id_token token" ], "response_modes_supported": ["query", "fragment", "form_post"], "subject_types_supported": ["public", "pairwise"], "claim_types_supported": ["normal", "aggregated", "distributed"], "claims_supported": [ "birthdate", "address", "nickname", "picture", "website", "email", "gender", "sub", "phone_number_verified", "given_name", "profile", "phone_number", "updated_at", "middle_name", "name", "locale", "email_verified", "preferred_username", "zoneinfo", "family_name" ], "scopes_supported": [ "openid", "profile", "email", "address", "phone", "offline_access", "openid" ], "userinfo_signing_alg_values_supported": [ "RS256", "RS384", "RS512", "ES256", "ES384", "ES512", "HS256", "HS384", "HS512", "PS256", "PS384", "PS512", "none" ], "id_token_signing_alg_values_supported": [ "RS256", "RS384", "RS512", "ES256", "ES384", "ES512", "HS256", "HS384", "HS512", "PS256", "PS384", "PS512", "none" ], "request_object_signing_alg_values_supported": [ "RS256", "RS384", "RS512", "ES256", "ES384", "ES512", "HS256", "HS384", "HS512", "PS256", "PS384", "PS512", "none" ], "token_endpoint_auth_signing_alg_values_supported": [ "RS256", "RS384", "RS512", "ES256", "ES384", "ES512", "HS256", "HS384", "HS512", "PS256", "PS384", "PS512" ], "userinfo_encryption_alg_values_supported": [ "RSA1_5", "RSA-OAEP", "RSA-OAEP-256", "A128KW", "A192KW", "A256KW", "ECDH-ES", "ECDH-ES+A128KW", "ECDH-ES+A192KW", "ECDH-ES+A256KW" ], "id_token_encryption_alg_values_supported": [ "RSA1_5", "RSA-OAEP", "RSA-OAEP-256", "A128KW", "A192KW", "A256KW", "ECDH-ES", "ECDH-ES+A128KW", "ECDH-ES+A192KW", "ECDH-ES+A256KW" ], "request_object_encryption_alg_values_supported": [ "RSA1_5", "RSA-OAEP", "RSA-OAEP-256", "A128KW", "A192KW", "A256KW", "ECDH-ES", "ECDH-ES+A128KW", "ECDH-ES+A192KW", "ECDH-ES+A256KW" ], "userinfo_encryption_enc_values_supported": [ "A128CBC-HS256", "A192CBC-HS384", "A256CBC-HS512", "A128GCM", "A192GCM", "A256GCM" ], "id_token_encryption_enc_values_supported": [ "A128CBC-HS256", "A192CBC-HS384", "A256CBC-HS512", "A128GCM", "A192GCM", "A256GCM" ], "request_object_encryption_enc_values_supported": [ "A128CBC-HS256", "A192CBC-HS384", "A256CBC-HS512", "A128GCM", "A192GCM", "A256GCM" ], "acr_values_supported": ["PASSWORD"], "issuer": OP_BASEURL, "jwks_uri": "{}/static/jwks_tE2iLbOAqXhe8bqh.json".format(OP_BASEURL), "authorization_endpoint": "{}/authorization".format(OP_BASEURL), "token_endpoint": "{}/token".format(OP_BASEURL), "userinfo_endpoint": "{}/userinfo".format(OP_BASEURL), "registration_endpoint": "{}/registration".format(OP_BASEURL), "end_session_endpoint": "{}/end_session".format(OP_BASEURL) }) resp = service_2['provider_info'].parse_response(provider_info_response) with responses.RequestsMock() as rsps: _jwks_url = "{}/static/jwks_tE2iLbOAqXhe8bqh.json".format(OP_BASEURL) rsps.add("GET", _jwks_url, body=OP_KEYJAR.export_jwks_as_json(), status=200, adding_headers={"Content-Type": "application/json"}) service_2['provider_info'].update_service_context(resp) # =================== Client registration ==================== info = service['registration'].get_request_parameters() assert info['url'] == 'https://example.org/op/registration' _body = json.loads(info['body']) assert _body == { "application_type": "web", "response_types": ["code"], "contacts": ["*****@*****.**"], "jwks_uri": "https://example.com/rp/static/jwks.json", "redirect_uris": ["{}/authz_cb".format(RP_BASEURL)], 'token_endpoint_auth_method': 'client_secret_basic', "grant_types": ["authorization_code"] } assert info['headers'] == {'Content-Type': 'application/json'} now = int(time.time()) op_client_registration_response = json.dumps({ "client_id": "zls2qhN1jO6A", "client_secret": "c8434f28cf9375d9a7", "registration_access_token": "NdGrGR7LCuzNtixvBFnDphGXv7wRcONn", "registration_client_uri": "{}/registration?client_id=zls2qhN1jO6A".format(RP_BASEURL), "client_secret_expires_at": now + 3600, "client_id_issued_at": now, "application_type": "web", "response_types": ["code"], "contacts": ["*****@*****.**"], "redirect_uris": ["{}/authz_cb".format(RP_BASEURL)], "token_endpoint_auth_method": "client_secret_basic", "grant_types": ["authorization_code"] }) response = service['registration'].parse_response( op_client_registration_response) service['registration'].update_service_context(response) # =================== Authorization ==================== STATE = 'Oh3w3gKlvoM2ehFqlxI3HIK5' NONCE = 'UvudLKz287YByZdsY3AJoPAlEXQkJ0dK' info = service_2['authorization'].get_request_parameters(request_args={ 'state': STATE, 'nonce': NONCE }) p = urlparse(info['url']) _query = parse_qs(p.query) op_authz_resp = { 'state': STATE, 'scope': 'openid', 'code': 'Z0FBQUFBQmFkdFFjUVpFWE81SHU5N1N4N01', 'iss': OP_BASEURL, 'client_id': 'zls2qhN1jO6A' } _authz_rep = AuthorizationResponse(**op_authz_resp) _resp = service_2['authorization'].parse_response( _authz_rep.to_urlencoded()) service_2['authorization'].update_service_context(_resp, key=STATE) # _item = service['authorization'].get_item(AuthorizationResponse, # 'auth_response', STATE) # =================== Access token ==================== request_args = { 'state': STATE, 'redirect_uri': service_context.get('redirect_uris')[0] } info = service['accesstoken'].get_request_parameters( request_args=request_args) assert info['url'] == 'https://example.org/op/token' _qp = parse_qs(info['body']) assert _qp == { 'grant_type': ['authorization_code'], 'redirect_uri': ['https://example.com/rp/authz_cb'], 'client_id': ['zls2qhN1jO6A'], 'state': ['Oh3w3gKlvoM2ehFqlxI3HIK5'], 'code': ['Z0FBQUFBQmFkdFFjUVpFWE81SHU5N1N4N01'] } assert info['headers'] == { 'Authorization': 'Basic ' 'emxzMnFoTjFqTzZBOmM4NDM0ZjI4Y2Y5Mzc1ZDlhNw==', 'Content-Type': 'application/x-www-form-urlencoded' } # create the IdToken _jwt = JWT(OP_KEYJAR, OP_BASEURL, lifetime=3600, sign=True, sign_alg='RS256') payload = { 'sub': '1b2fc9341a16ae4e30082965d537', 'acr': 'PASSWORD', 'auth_time': 1517736988, 'nonce': NONCE } _jws = _jwt.pack(payload=payload, recv='zls2qhN1jO6A') _resp = { "state": "Oh3w3gKlvoM2ehFqlxI3HIK5", "scope": "openid", "access_token": "Z0FBQUFBQmFkdFF", "token_type": "Bearer", 'expires_in': 600, "id_token": _jws } service_context.set('issuer', OP_BASEURL) _resp = service['accesstoken'].parse_response(json.dumps(_resp), state=STATE) assert isinstance(_resp, AccessTokenResponse) assert set(_resp['__verified_id_token'].keys()) == { 'iss', 'nonce', 'acr', 'auth_time', 'aud', 'iat', 'exp', 'sub' } service['accesstoken'].update_service_context(_resp, key=STATE) _item = service_2['authorization'].get_item(AccessTokenResponse, 'token_response', STATE) assert set(_item.keys()) == { 'state', 'scope', 'access_token', 'token_type', 'id_token', '__verified_id_token', 'expires_in', '__expires_at' } assert _item['token_type'] == 'Bearer' assert _item['access_token'] == 'Z0FBQUFBQmFkdFF' # =================== User info ==================== info = service_2['userinfo'].get_request_parameters(state=STATE) assert info['url'] == 'https://example.org/op/userinfo' assert info['headers'] == {'Authorization': 'Bearer Z0FBQUFBQmFkdFF'} op_resp = {"sub": "1b2fc9341a16ae4e30082965d537"} _resp = service_2['userinfo'].parse_response(json.dumps(op_resp), state=STATE) service_2['userinfo'].update_service_context(_resp, key=STATE) assert isinstance(_resp, OpenIDSchema) assert _resp.to_dict() == {'sub': '1b2fc9341a16ae4e30082965d537'} _item = service['authorization'].get_item(OpenIDSchema, 'user_info', STATE) assert _item.to_dict() == {'sub': '1b2fc9341a16ae4e30082965d537'}
#!/usr/bin/env python3 import os import sys from urllib.parse import parse_qs test = parse_qs(os.environ.get('QUERY_STRING', ''), keep_blank_values=True).get('test', [''])[0] sys.stdout.write( 'Content-Type: text/html\r\n\r\n' 'if (window.testRunner) {{\n' ' testRunner.dumpAsText();\n' ' testRunner.waitUntilDone();\n' '}}\n' '\n' 'window.onload = function () {{\n' ' window.location = "/security/contentSecurityPolicy/resources/echo-report.py?test={}";\n' '}}\n'.format(test))
def get_query_dict(self, *args, **kwargs): return parse_qs(self.query, *args, **kwargs)
def main_relay(**kwargs) -> dict: # clear BrowserRedirectHandler result BrowserRedirectHandler.result = None # disabled - idporten fails to register 127.0.0.1 and dynamic port numbers for now # # Bind to port 0, let the OS find an available port # server = HTTPServer(('127.0.0.1', 0), BrowserRedirectHandler) # Get the jwks from idporten (for token verification later) u = requests.get('https://{}/.well-known/openid-configuration'.format( AUTH_DOMAIN)).json()["jwks_uri"] jwks = requests.get(u).json() server = HTTPServer(('127.0.0.1', 12345), BrowserRedirectHandler) port = server.server_address[1] assert 0 < port < 65536 client_id = '38e634d9-5682-44ae-9b60-db636efe3156' # Public clients need state parameter and PKCE challenge # https://difi.github.io/felleslosninger/oidc_auth_spa.html # https://tools.ietf.org/html/draft-ietf-oauth-browser-based-apps-00 state = urlsafe_b64encode(random_bytes(16)).decode().rstrip("=") pkce_secret = urlsafe_b64encode( random_bytes(32)).decode().rstrip("=").encode() pkce_challenge = urlsafe_b64encode(sha256(pkce_secret).digest()).decode() nonce = "{}".format(int(time.time() * 1e6)) u = 'https://{}/authorize'.format(AUTH_DOMAIN) + \ quote(('?scope=openid' '&acr_values=Level3' '&client_id={}' '&redirect_uri=http://localhost:{}/token' '&response_type=code' '&state={}' '&nonce={}' '&resource={}' '&code_challenge={}' '&code_challenge_method=S256' '&ui_locales=nb'.format(client_id, port, state, nonce, API_AUDIENCE, pkce_challenge)), safe='?&=_') print(u) # Open web browser to get ID-porten authorization token webbrowser.open(u, new=0, autoraise=True) # Wait for callback from ID-porten while not hasattr(BrowserRedirectHandler.result, 'path'): server.handle_request() # Free the port, no more callbacks expected server.server_close() print("Authorization token received") # result.path is now something like # "/token?code=_Acl-x8H83rjhjhdefeefeef_xlbi_6TqauJV1Aiu_Q&state=oxw06LrtiyyWb7uj7umRSQ%3D%3D" # We must verify that state is identical to what we sent - https://tools.ietf.org/html/rfc7636 qs = parse_qs(urlparse(BrowserRedirectHandler.result.path).query) print("State was " + state) print("Query state length is " + str(len(qs['state']))) print("Query state is " + qs['state'][0]) assert len(qs['state']) == 1 and qs['state'][0] == state # Use the authorization code to get access and id token from /token payload = { 'grant_type': 'authorization_code', 'code_verifier': pkce_secret, 'code': qs['code'][0], 'redirect_uri': 'http://localhost:{}/token'.format(port), 'client_id': client_id } headers = {'Accept': 'application/json'} r = requests.post('https://{}/token'.format(AUTH_DOMAIN), headers=headers, data=payload) r.raise_for_status() js = r.json() assert js['token_type'] == 'Bearer' # Validate tokens according to https://tools.ietf.org/html/rfc7519#section-7.2 # A list of 3rd party libraries for various languages on https://jwt.io/ # python possibilites: pyjwt, python-jose, jwcrypto, authlib # We use python-jose here: jwt.decode(js['id_token'], jwks, algorithms=ALGORITHMS, issuer="https://" + AUTH_DOMAIN + "/", audience=client_id, access_token=js['access_token']) id_encoded = js['id_token'].split(".")[1] id_token = json.loads(urlsafe_b64decode(id_encoded + "==").decode()) assert id_token['nonce'] == nonce # Also validate the access token separately, this is what we have to pass # on to our APIs jwt.decode(js['access_token'], jwks, algorithms=ALGORITHMS, issuer="https://" + AUTH_DOMAIN + "/", audience=API_AUDIENCE) at_encoded = js['access_token'].split(".", 3)[1] access_token = json.loads(urlsafe_b64decode(at_encoded + "==").decode()) assert access_token['client_id'] == client_id assert access_token['token_type'] == "Bearer" assert access_token['acr'] == "Level3" assert access_token['aud'] == API_AUDIENCE print( "The token is good, expires in {} seconds".format(access_token['exp'] - int(time.time()))) print("\nBearer {}".format(js['access_token'])) header = {'Authorization': 'Bearer ' + js['access_token']} return header
def verify_and_sign_url(self, url): query_args = parse_qs(urlparse(url).query, encoding=self.encoding) if self._verify(query_args): return self._sign(query_args) return self.error_url
def _get_parms(self, request): return parse_qs(urlparse(request.uri).query)
from urllib.parse import urlparse from urllib.parse import parse_qs # Set the URL you want to webscrape from url = 'http://52.32.1.180:8080/SPLOT/SplotAnalysesServlet?action=select_model&enableSelection=false&&showModelDetails=true' # Connect to the URL response = requests.get(url) # Parse HTML and save to BeautifulSoup object¶ soup = BeautifulSoup(response.text, "html.parser") # To download the whole data set, let's do a for loop through all a tags for i in range(0, len(soup.findAll('a'))): #'a' tags are for links one_a_tag = soup.findAll('a')[i] if 'modelFile' in str(one_a_tag): link = one_a_tag['href'] url = urlparse(link).query modelName = parse_qs(url)['modelFile'] if modelName[0].endswith('.xml') and not os.path.exists( './downloads/' + modelName[0] ): #to make it sure that we do not follow bizzare models and avoid redownloading already present models download_url = 'http://52.32.1.180:8080/SPLOT/models/' + modelName[ 0] print(download_url) urllib.request.urlretrieve(download_url, './downloads/' + modelName[0]) time.sleep( 10 ) #pause the code for a sec, this time seems appropiated. But more testing is needed
def get_pagination_context(page, pages_to_show=11, url=None, size=None, justify_content=None, extra=None, parameter_name="page"): """Generate Bootstrap pagination context from a page object.""" pages_to_show = int(pages_to_show) if pages_to_show < 1: raise ValueError( "Pagination pages_to_show should be a positive integer, you specified {pages_to_show}." .format(pages_to_show=pages_to_show)) num_pages = page.paginator.num_pages current_page = page.number half_page_num = int(floor(pages_to_show / 2)) if half_page_num < 0: half_page_num = 0 first_page = current_page - half_page_num if first_page <= 1: first_page = 1 if first_page > 1: pages_back = first_page - half_page_num if pages_back < 1: pages_back = 1 else: pages_back = None last_page = first_page + pages_to_show - 1 if pages_back is None: last_page += 1 if last_page > num_pages: last_page = num_pages if last_page < num_pages: pages_forward = last_page + half_page_num if pages_forward > num_pages: pages_forward = num_pages else: pages_forward = None if first_page > 1: first_page -= 1 if pages_back is not None and pages_back > 1: pages_back -= 1 else: pages_back = None pages_shown = [] for i in range(first_page, last_page + 1): pages_shown.append(i) # parse the url parts = urlparse(url or "") params = parse_qs(parts.query) # append extra querystring parameters to the url. if extra: params.update(parse_qs(extra)) # build url again. url = urlunparse([ parts.scheme, parts.netloc, parts.path, parts.params, urlencode(params, doseq=True), parts.fragment ]) # Set CSS classes, see http://getbootstrap.com/components/#pagination pagination_css_classes = ["pagination"] if size == "small": pagination_css_classes.append("pagination-sm") elif size == "large": pagination_css_classes.append("pagination-lg") if justify_content == "start": pagination_css_classes.append("justify-content-start") elif justify_content == "center": pagination_css_classes.append("justify-content-center") elif justify_content == "end": pagination_css_classes.append("justify-content-end") return { "bootstrap_pagination_url": url, "num_pages": num_pages, "current_page": current_page, "first_page": first_page, "last_page": last_page, "pages_shown": pages_shown, "pages_back": pages_back, "pages_forward": pages_forward, "pagination_css_classes": " ".join(pagination_css_classes), "parameter_name": parameter_name, }
def post_login(self, user=None, token_result=None): # TODO: I'm not convinced this code should be in post_login. # Just putting it in here for now, but might refactor later. # This saves us a call to RAS /userinfo, but will not make sense # when there is more than one visa issuer. # Clear all of user's visas, to avoid having duplicate visas # where only iss/exp/jti differ # TODO: This is not IdP-specific and will need a rethink when # we have multiple IdPs user.ga4gh_visas_v1 = [] current_session.commit() encoded_visas = [] try: encoded_visas = flask.current_app.ras_client.get_encoded_visas_v11_userinfo( flask.g.userinfo ) except Exception as e: err_msg = "Could not retrieve visas" logger.error("{}: {}".format(e, err_msg)) raise for encoded_visa in encoded_visas: try: # Do not move out of loop unless we can assume every visa has same issuer and kid public_key = get_public_key_for_token( encoded_visa, attempt_refresh=True ) except Exception as e: # (But don't log the visa contents!) logger.error( "Could not get public key to validate visa: {}. Discarding visa.".format( e ) ) continue try: # Validate the visa per GA4GH AAI "Embedded access token" format rules. # pyjwt also validates signature and expiration. decoded_visa = validate_jwt( encoded_visa, public_key, # Embedded token must not contain aud claim aud=None, # Embedded token must contain scope claim, which must include openid scope={"openid"}, issuers=config.get("GA4GH_VISA_ISSUER_ALLOWLIST", []), # Embedded token must contain iss, sub, iat, exp claims # options={"require": ["iss", "sub", "iat", "exp"]}, # ^ FIXME 2021-05-13: Above needs pyjwt>=v2.0.0, which requires cryptography>=3. # Once we can unpin and upgrade cryptography and pyjwt, switch to above "options" arg. # For now, pyjwt 1.7.1 is able to require iat and exp; # authutils' validate_jwt (i.e. the function being called) checks issuers already (see above); # and we will check separately for sub below. options={ "require_iat": True, "require_exp": True, }, ) # Also require 'sub' claim (see note above about pyjwt and the options arg). if "sub" not in decoded_visa: raise JWTError("Visa is missing the 'sub' claim.") except Exception as e: logger.error("Visa failed validation: {}. Discarding visa.".format(e)) continue visa = GA4GHVisaV1( user=user, source=decoded_visa["ga4gh_visa_v1"]["source"], type=decoded_visa["ga4gh_visa_v1"]["type"], asserted=int(decoded_visa["ga4gh_visa_v1"]["asserted"]), expires=int(decoded_visa["exp"]), ga4gh_visa=encoded_visa, ) current_session.add(visa) current_session.commit() # Store refresh token in db assert "refresh_token" in flask.g.tokens, "No refresh_token in user tokens" refresh_token = flask.g.tokens["refresh_token"] assert "id_token" in flask.g.tokens, "No id_token in user tokens" id_token = flask.g.tokens["id_token"] decoded_id = jwt.decode(id_token, verify=False) # Add 15 days to iat to calculate refresh token expiration time issued_time = int(decoded_id.get("iat")) expires = config["RAS_REFRESH_EXPIRATION"] # User definied RAS refresh token expiration time parsed_url = urlparse(flask.session.get("redirect")) query_params = parse_qs(parsed_url.query) if query_params.get("upstream_expires_in"): custom_refresh_expiration = query_params.get("upstream_expires_in")[0] expires = get_valid_expiration( custom_refresh_expiration, expires, expires, ) flask.current_app.ras_client.store_refresh_token( user=user, refresh_token=refresh_token, expires=expires + issued_time ) global_parse_visas_on_login = config["GLOBAL_PARSE_VISAS_ON_LOGIN"] usersync = config.get("USERSYNC", {}) sync_from_visas = usersync.get("sync_from_visas", False) parse_visas = global_parse_visas_on_login or ( global_parse_visas_on_login == None and ( strtobool(query_params.get("parse_visas")[0]) if query_params.get("parse_visas") else False ) ) # if sync_from_visas and (global_parse_visas_on_login or global_parse_visas_on_login == None): # Check if user has any project_access from a previous session or from usersync AND if fence is configured to use visas as authZ source # if not do an on-the-fly usersync for this user to give them instant access after logging in through RAS # If GLOBAL_PARSE_VISAS_ON_LOGIN is true then we want to run it regardless of whether or not the client sent parse_visas on request if sync_from_visas and parse_visas and not user.project_access: # Close previous db sessions. Leaving it open causes a race condition where we're viewing user.project_access while trying to update it in usersync # not closing leads to partially updated records current_session.close() DB = os.environ.get("FENCE_DB") or config.get("DB") if DB is None: try: from fence.settings import DB except ImportError: pass arborist = ArboristClient( arborist_base_url=config["ARBORIST"], logger=get_logger("user_syncer.arborist_client"), authz_provider="user-sync", ) dbGaP = os.environ.get("dbGaP") or config.get("dbGaP") if not isinstance(dbGaP, list): dbGaP = [dbGaP] sync = init_syncer( dbGaP, None, DB, arborist=arborist, ) sync.sync_single_user_visas(user, current_session) super(RASCallback, self).post_login()
process_timeline() else: if g.args['eventid']: # we need to construct the url g.args['input'] = g.args[ 'portal'] + '/index.php?view=view_video&eid=' + g.args[ 'eventid'] + '&username='******'username'] + '&password='******'password'] g.out_file = g.args['eventid'] + '-analyzed.mp4' if (g.args['input'].lower().startswith(('http:', 'https:'))): parsed = urlparse.urlparse(g.args['input']) try: eid = urlparse.parse_qs(parsed.query)['eid'][0] fname = eid + '.mp4' g.args['eventid'] = eid except KeyError: fname = 'temp-analysis.mp4' if g.args['download']: utils.dim_print('Downloading video from url: {}'.format( g.args['input'])) urllib.request.urlretrieve(g.args['input'], fname) g.args['input'] = fname remove_downloaded = True g.out_file = 'analyzed-' + fname if g.args['find']: res = zmm_search.search_video(input_file=g.args['input'], out_file=g.out_file,