def do_GET ( self ): self.send_response( 200 ) self.send_header("Content-type", "text/html") self.end_headers() data = urlparse.parse_qs( self.path[2:] ).get( 'input' ) or [""] shortest = urlparse.parse_qs( self.path[2:] ).get( 'shortest' ) html = """ <html> <script> function update(){ document.getElementById('output').value = Array.prototype.slice.call( document.getElementsByTagName('select')).map(function (s) { return s.options[s.selectedIndex].value }).join(''); document.getElementById('length').textContent = document.getElementById('output').value.length } function submit() { document.getElementById('form').submit(); } </script> <body onload=update()> <tt>%s</tt> encoding from <tt>%s</tt> (%s tokens, %s unique)<hr> <form id=form>Text to encode: <input name=input type=text value="%s"><br> <small><input type=checkbox name=shortest %s onchange=submit()>Sort by length (instead of frequency)</small><hr> Select alternates: """ % (encodingSpec, corpusFilename, len(corpusTokens), len(set(corpusTokens)), data[0], 'checked' if shortest else '') if data != None: for word in changeWordSize( 8, bits ) < map( ord, data[0] ): keyFunc = len if shortest else model[word].count html += '<select onchange=update() title="%s words hash to value %s">' % (len(set(model[word])),word) for token in sorted( set(model[ word ]), key=keyFunc, reverse=(not shortest) ): html += "<option value='%s'>%s</option>" % ( token, token ) html += "</select>" self.wfile.write( html + "<hr>Result (<span id=length></span> bytes):<br>" + "<textarea id=output cols=80 rows=5></textarea><br>(pipe through <tt>python -mbananaphone pipeline 'rh_decoder(\"%s\")'</tt> to decode it)" % (encodingSpec,))
def test_execute_async(self, call_rpc_client_mock): with patch.object(ResponseUnavailableViewMixing, 'verify', return_value=None) as mock_method: call_rpc_client_mock.return_value = True self.client1.login(username='******', password='******') from urllib2 import urlparse # get redirect response = self.client1.get(self.base1_apy1.get_exec_url()+"&async") self.assertEqual(301, response.status_code) queries = urlparse.urlparse(response['Location'])[4] rid = int(urlparse.parse_qs(queries)['rid'][0]) transaction = Transaction.objects.get(pk=rid) # get state (RUNNING) #response = self.client1.get(self.base1_apy1.get_exec_url()+"&rid=%s" % rid, HTTP_ACCEPT='application/xml') response = self.client1.get(self.base1_apy1.get_exec_url()+"&rid=%s" % rid) self.assertEqual(200, response.status_code) tout = {u'status': u'RUNNING', "url": "/fastapp/base/base1/exec/base1_apy1/?json=&rid="+str(rid), 'rid': rid, 'id': u'base1_apy1'} self.assertEqual(json.loads(response.content)['rid'], tout['rid']) # mock creation of response tout = {u'status': u'OK', u'exception': None, u'returned': u'{"content": "{\\"aaa\\": \\"aaa\\"}", "class": "XMLResponse", "content_type": "application/json"}', u'response_class': u'JSONResponse', 'time_ms': '74', 'rid': rid, 'id': u'base1_apy1'} transaction.tout = tout transaction.save() self.assertEqual(transaction.apy, self.base1_apy1) # get response response = self.client1.get(self.base1_apy1.get_exec_url()+"&rid=%s" % rid) self.assertEqual(200, response.status_code) # check transaction duration transaction = Transaction.objects.get(pk=rid) self.assertEqual(int, type(transaction.duration))
def get_details_from_url(url_portion): res = urlparse.parse_qs(url_portion.partition('?')[2]) if not res: # might happen in the case of someone not submitting a URL with "details.php?" at the start res = url_portion for key in res.keys(): res[key] = res[key][0] return res
def victimise(victim, uri): raw_url = victim + uri scheme, netloc, path, raw_query, fragment = urlparse.urlsplit(raw_url) query = urlparse.parse_qs(raw_query) url = urlparse.urlunsplit((scheme, netloc, path, urlencode(query, True), fragment)) print url http_client.fetch(url, fetch, use_gzip=False)
def process_waze_message(token): connection=httplib.HTTPConnection("waze.to") connection.request("GET","/"+token) response=connection.getresponse() meetingUrl=response.getheader("location") logging.debug(meetingUrl) querystring=urlparse.urlparse(meetingUrl).query token= urlparse.parse_qs(querystring)["token"][0] logging.debug(token) meetingInfoUrl="http://mobile-web.waze.com/SocialMediaServer/internal/getMeetingInfo?event_id="+ token logging.debug(meetingInfoUrl) meetingInfo=json.loads(urllib2.urlopen(meetingInfoUrl).read()) logging.debug(meetingInfo) driveUrl="http://mobile-web.waze.com/rtserver/web/PickUpGetDriverInfo?clientID=70a8b694c7&routeTimestamp=0&getUserInfo=true&token=" + token logging.debug(driveUrl) driverInfo=json.loads(urllib2.urlopen(driveUrl).read()) logging.debug(driverInfo) if driverInfo['status']=='ok': eta = driverInfo['eta'] insert_waze_request(token,eta) else: deactivate_waze_request(token)
def fetch_photos_from_msg(self, album, msg=None): u = album.user token = get_access_token(u) graph = facebook.GraphAPI(token) if msg.status == 'awaiting': parts = urlparse.urlparse(msg.next_page) qs = urlparse.parse_qs(parts.query) after = qs.get('after')[0] photos = graph.get_object(album.fb_album_id + "/photos", fields='id,source', limit=2, after=after) new_next_page = photos.get('paging').get('next') new_msg = Message.objects.create(next_page=new_next_page, user=u, status='awaiting') for photo in photos.get('data'): img_temp = NamedTemporaryFile(delete=True) img_temp.write(urlopen(photo.get('source')).read()) img_temp.flush() photo_object = Photo.objects.create(title=photo.get('id'), description=photo.get('created_time'), album=album, file=File(img_temp)) pprint(photo_object.filename) self.stdout.write('Successfully fetched photo for source "%s"\n' % photo.get('source')) msg.status = 'done' msg.save() self.stdout.write('Finished this queue "%s"\n' % new_msg.next_page)
def test_execute_async(self, call_rpc_client_mock, send_client_mock): with patch.object(ResponseUnavailableViewMixing, 'verify', return_value=None) as mock_method: call_rpc_client_mock.return_value = True send_client_mock.return_value = True self.client1.login(username='******', password='******') from urllib2 import urlparse # get redirect response = self.client1.get(self.base1_apy1.get_exec_url()+"&async") self.assertEqual(301, response.status_code) queries = urlparse.urlparse(response['Location'])[4] rid = int(urlparse.parse_qs(queries)['rid'][0]) transaction = Transaction.objects.get(pk=rid) # get state (RUNNING) response = self.client1.get(self.base1_apy1.get_exec_url()+"&rid=%s" % rid, HTTP_ACCEPT='application/xml') self.assertEqual(200, response.status_code) tout = {u'status': u'RUNNING', "url": "/fastapp/base/base1/exec/base1_apy1/?json=&rid="+str(rid), 'rid': rid, 'id': u'base1_apy1'} self.assertEqual(json.loads(response.content)['rid'], tout['rid']) # mock creation of response tout = {u'status': u'OK', u'exception': None, u'returned': u'{"content": "{\\"aaa\\": \\"aaa\\"}", "class": "XMLResponse", "content_type": "application/json"}', u'response_class': u'JSONResponse', 'time_ms': '74', 'rid': rid, 'id': u'base1_apy1'} transaction.tout = tout transaction.save() self.assertEqual(transaction.apy, self.base1_apy1) # get response response = self.client1.get(self.base1_apy1.get_exec_url()+"&rid=%s" % rid) self.assertEqual(200, response.status_code) # check transaction duration transaction = Transaction.objects.get(pk=rid) self.assertEqual(int, type(transaction.duration))
def batch_lookup(data_list, webhook=None, debug=False): """ Send requests to the Full Contact API and return the status logs of those requests. If debug is True, print the logs. Arguments: data_list -- the list of tuples of contact data in the form (type, data) webhook -- url with the callback function that handles responses from Full Contact debug -- boolean specifying whether some debug information should be printed to the console. """ # divide the data into chunks of 20 (max number for a single batch request) data_chunks = [] counter = 0 while counter < len(data_list): data_chunks.append(data_list[counter:counter+20]) counter += 20 # send the request for each chunk process_log = [] for chunk in data_chunks: request_urls = [] for data in chunk: # escape params if data: data = (quote(data[0].encode('utf-8')), quote(data[1].encode('utf-8'))) url = 'https://api.fullcontact.com/v2/person.json?%s=%s' % (data) if webhook: url += '&webhookUrl=' + webhook + '&webhookId=%s:%s' % (data) request_urls.append(url) post_data = simplejson.dumps({'requests' : request_urls}) data = requests.post( 'https://api.fullcontact.com/v2/batch.json', params={'apiKey': FULL_CONTACT_API_KEY}, headers={'content-type': 'application/json'}, data=post_data ).json for person_url, person_json in data['responses'].items(): log = {} params = urlparse.parse_qs(urlparse.urlparse(person_url).query) if params.get('email'): log['type'] = 'email' log['data'] = params['email'][0] elif params.get('phone'): log['type'] = 'phone' log['data'] = params['phone'][0] elif params.get('twitter'): log['type'] = 'twitter' log['data'] = params['twitter'][0] elif params.get('facebookUsername'): log['type'] = 'facebookUsername' log['data'] = params['facebookUsername'][0] else: log['type'] = 'Wrong data' log['data'] = person_url log['status'] = '%s - %s' % (person_json.get('status'), person_json.get('message')) process_log.append(log) if debug: print log return process_log
def get_qs(url): """ >>> get_qs("http://abc.com/a?a=123&b=%2b9") {'a': ['123'], 'b': ['+9']} """ try: return urlparse.parse_qs(urlparse.urlparse(url).query) except: return {}
def fetch_and_parse(url): """parses out the review and the spotify id, if present""" r = requests.get(url) soup = BeautifulSoup(r.content) review = soup.select('div.editorial')[0].text spotify_iframe = soup.select('div.spotify')[0].iframe spotify_embed_url = urlparse.urlparse(spotify_iframe['src']) spotify_item_uri = urlparse.parse_qs(spotify_embed_url.query)['uri'][0] return review, spotify_item_uri
def get_query_arg(qs, arg): """ >>> get_query_arg("a=123&b=%2b9", "b") '+9' """ try: return safe_get(urlparse.parse_qs(qs), arg) except: return ""
def format_link(link): url_parts = urlparse.urlparse(link.link) if re.match(r'(www\.)?facebook\.com', url_parts[1]): return '<div class="fb-post" data-href="%s"></div>' % link.link elif re.match(r'(www\.)?youtu(\.be|be\.com)', url_parts[1]): qs = urlparse.parse_qs(url_parts[4]) v = qs.get('v', ['', ])[0] return '<iframe title="YouTube video player" width="480" height="390" src="http://www.youtube.com/embed/%s" frameborder="0" allowfullscreen></iframe>' % v else: return '<a href="%s">%s</a>' % (link.link, link.name)
def getparams(request): from urllib2 import urlparse print type(request) print dir(request) print request.get_full_path() res = urlparse.urlparse(request.GET.get('params','Not found')) query = urlparse.parse_qs(res.query) #data = plotp.picToBase64(query['p'][0]) #return HttpResponse('<img src="' + "data:image/jpg;base64," + data + '"/>') img = open(plotp.path_prefix + query['p'][0],'rb').read() return HttpResponse(img, mimetype='image/jpg')
def parseCookie(cookie): parsed = urlparse.parse_qs(cookie) if(parsed.has_key('user_id') and parsed.has_key('token')): uid = parsed['user_id'][0] token = parsed['token'][0] userQuery = Users.get_by_id(int(uid)) if(userQuery != None): pword = userQuery.pword if(token==encodeCookie(uid, pword)): return True return False
def do_GET ( self ): self.send_response( 200 ) self.send_header("Content-type", "text/html") self.end_headers() html = "<html><form><input name=input type=text><br>" data = urlparse.parse_qs( self.path[2:] ).get( 'input' ) if data != None: for word in changeWordSize( map( ord, data[0] ), 8, bits ): html += "<select>" for token in sorted( set(model[ word ]), key=model[word].count, reverse=True ): html += "<option>%s</option>" % ( token, ) html += "</select>" self.wfile.write( html + "\n" )
def victimise(victim, request): try: lines = request.split('\n') uri = lines[0].split(' ')[1] body = lines[-1] raw_url = victim + uri scheme, netloc, path, raw_query, fragment = urlparse.urlsplit(raw_url) query = urlparse.parse_qs(raw_query) url = urlparse.urlunsplit((scheme, netloc, path, urlencode(query, True), fragment)) if body: http_client.fetch(url, fetch, method="POST", body=body, use_gzip=False) except: pass
def serve(env, start_response): path = env['PATH_INFO'] args = urlparse.parse_qs(env['QUERY_STRING']) key = args['key'][0] start_response('200 OK', [('Content-type', 'text/plain')]) if path == '/get': return data[key] if path == '/set': value = args['value'][0] data[key] = value return ['ok'] return ['unknown error']
def do_GET(self): self.send_response(200) self.send_header("Content-type", "text/html") self.end_headers() html = "<html><form><input name=input type=text><br>" data = urlparse.parse_qs(self.path[2:]).get('input') if data != None: for word in changeWordSize(map(ord, data[0]), 8, bits): html += "<select>" for token in sorted(set(model[word]), key=model[word].count, reverse=True): html += "<option>%s</option>" % (token, ) html += "</select>" self.wfile.write(html + "\n")
def parse_homepage(html): soup = BeautifulSoup(html) websites = soup.find_all('dd', 'websites') if websites and len(websites) > 0: websites = websites[0] sites = websites.find_all('li') if sites and len(sites) > 0: result = {} for site in sites: site_name = site.text.strip() original = site.a.get('href') url_parse = urlparse.urlparse(original).query query_parse = urlparse.parse_qs(url_parse) if 'url' in query_parse: result[site_name] = query_parse['url'] return result return None
def get_details_from_url(url_portion): """ >>> res = get_details_from_url("details.php?airport=SXF&id=102191022&flightno=4U+8129&direction=WB") >>> res['direction'] 'WB' >>> res['airport'] 'SXF' >>> res['id'] '102191022 >>> res['flightno'] '4U 8129' """ res = urlparse.parse_qs(url_portion.partition('?')[2]) if not res: # might happen in the case of someone not submitting a URL with "details.php?" at the start res = url_portion for key in res.keys(): res[key] = res[key][0] return res
def get_details_from_url(url_portion): """ >>> res = get_details_from_url("details.php?airport=SXF&id=102191022&flightno=4U+8129&direction=WB") >>> res['direction'] 'WB' >>> res['airport'] 'SXF' >>> res['id'] '102191022 >>> res['flightno'] '4U 8129' """ res = urlparse.parse_qs(url_portion.partition("?")[2]) if not res: # might happen in the case of someone not submitting a URL with "details.php?" at the start res = url_portion for key in res.keys(): res[key] = res[key][0] return res
def youtube_id_by_url(url): validator = URLValidator() try: validator(url) except ValidationError: msg = _("Please provide a valid url") raise ValidationError(msg) parsed = urlparse.urlparse(url) if 'youtube' not in parsed.netloc and \ 'y2u.be' not in parsed.netloc: msg = _("Only youtube videos are allowed.") raise ValidationError(msg) qs = urlparse.parse_qs(parsed.query) if 'v' not in qs and not qs['v']: msg = _("Invalid youtube video url.") raise ValidationError(msg) id = qs['v'][0] return id
def _parse(self, html, url, encoding, status_code): parse_result = urlparse.urlparse(url) params = urlparse.parse_qs(parse_result.query) item_type = params['types'][0] proName = params.get('proName', '') province = proName[0] if proName else 'all' city = params['industry'][0] response_data = json.loads(html) error = int(response_data['error']) if error != 0: yield None else: show_html = response_data['data'] tree = BeautifulSoup(show_html, 'lxml') tr_list = tree.find_all('tr') for tr in tr_list: td_list = tr.find_all('td') overall_data = td_list[-1].a.get('onclick') read_num = td_list[2].get_text() read_num = self._get_abstract_num(read_num) like_num = td_list[3].get_text() like_num = self._get_abstract_num(like_num) try: tag_wxarc = QingboWX.TAG_WXARC.search( overall_data).groups()[0].split(',')[1:] item_url = tag_wxarc[0][1:-1] title = util.get_utf8_str(tag_wxarc[1][1:-1], encoding) source = tag_wxarc[2][1:-1] source_detail = util.get_utf8_str(tag_wxarc[3][1:-1], encoding) original_time = tag_wxarc[4][1:-1] #tag = QingboWX.TYPE_TAG_TABLE[item_type] tag = item_type key = "{}#{}".format(title, source) md5_key = md5(key).hexdigest() item = self._get_item(title, source, source_detail, item_url, original_time, province, city, like_num, read_num, tag, md5_key) yield item except Exception: yield None
def __call__(self, url, timeout=15): """ Get data and convert it to TSV if possible """ data = u'' if not self.test(url): return data ourl = urlparse.urlparse(url) query = ourl.query query = urlparse.parse_qs(query) site = getSite() request = getattr(site, 'REQUEST', None) if getattr(request, 'form', None) is not None: request.form.update(query) try: view = site.unrestrictedTraverse(ourl.path) except Exception as err: logger.warn("Invalid data url '%s'", url) return data if IATBlob.providedBy(view): query = {} view = view.getFile if getattr(view, 'meta_type', None) == 'File': query = {} oldView = view view = lambda: oldView.data try: try: data = view(**query) except TypeError: # got an unexpected keyword argument query = {} data = view() except Unauthorized: data = self._data(view, query) except Exception, err: logger.exception(err)
def img(self): """ Image """ if self._img: return self._img objectIds = getattr(self.context, 'objectIds', lambda: []) if 'cover.png' in objectIds(): self._img = self.context.restrictedTraverse('cover.png') return self._img view = queryMultiAdapter((self.context, self.request), name='daviz-view.html') for tab in view.tabs: fallback = tab.get('fallback-image', None) if not fallback: continue url = urlparse.urlparse(fallback) query = urlparse.parse_qs(url.query) img = url.path.split('/')[-1] if isinstance(img, unicode): img = img.encode('utf-8') if img.startswith('embed-chart.svg'): img = query['chart'][0] + '.svg' img = self.context.restrictedTraverse(img, None) if not img: continue self._img = img break return self._img
def sso_authorize(request): """ Authorizes specific web sites to utilize an existing My.jobs account Required on HTTP GET: :auth_callback: GET parameter - Desired return url when authorization succeeds Required on HTTP POST: :auth_callback: POST parameter, copy of :auth_callback: GET parameter """ # Common between GET and POST, callback is required. auth_callback = request.GET.get('auth_callback') or \ request.POST.get('auth_callback') data = {'auth_callback': auth_callback} if auth_callback: auth_callback = unquote(auth_callback) auth_callback = urlparse.urlparse(auth_callback) if not auth_callback.netloc: # If the base url of the callback is not truthy, the url # must be malformed somehow raise Http404 else: raise Http404 if request.method == 'GET': # Initial view after being redirected from an external site data['auth_callback_short'] = auth_callback.netloc if not request.user.is_anonymous(): # Process logged in users first; Certain criteria may cause the # user to be logged out. good_key = request.session.get('key') test_key = request.GET.get('key') if good_key: # The current user already has a key available. if test_key: # The remote site has provided a key; the user has # potentially already authorized this site. if test_key == good_key: if request.user.authorizedclient_set.filter( site=auth_callback.netloc): # The user has authorized this site; Reset the # current session expiry, add the key to the # callback url, and redirect to it. request.session.set_expiry(None) q = urlparse.parse_qs(auth_callback.query) q.update({'key': good_key}) auth_callback = auth_callback._replace( query=urlencode(q)) return redirect(urlparse.urlunparse(auth_callback)) else: # The user at one time authorized this site but it # was revoked (potential future functionality?). # Ask for authorization again. return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) else: # The key provided does not match the user's key; Log # the user out. It may be a different user's key. logout(request) else: # No key was provided; Proceed to authorization normally. return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) else: # The user has no key; Create one. request.session['key'] = AuthorizedClient.create_key( request.user) if test_key: # A key was provided, but the current user did not have one # until now. Log out the user. logout(request) else: # No key was provided; Proceed to authorization. return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) # Only anonymous users can reach this point. This is not inside an else # block so that it can catch users who were logged out above. login_form = CustomAuthForm(auto_id=True) login_form.fields.pop('remember_me') data['login_form'] = login_form return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) else: # Form was posted. action = request.POST.get('action') if action == 'login': login_form = CustomAuthForm(data=request.POST, auto_id=False) login_form.fields.pop('remember_me') if login_form.is_valid(): user = authenticate( username=login_form.cleaned_data['username'], password=login_form.cleaned_data['password']) login(request, user) request.session.set_expiry(None) # User was logged in. Fall through to code common to # preauthenticated users else: if request.is_ajax(): return HttpResponse( json.dumps({'errors': login_form.errors.items()})) else: data['login_form'] = login_form data['auth_callback_short'] = auth_callback.netloc return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) # Ensure that an AuthorizedClient instance exists for the current user # and the site that is requesting authorization. request.user.authorizedclient_set.get_or_create( site=auth_callback.netloc) # Ensure that the current user has a key. if not request.session.get('key'): request.session['key'] = AuthorizedClient.create_key(request.user) # Add the user's key to the callback url and redirect to it. q = urlparse.parse_qs(auth_callback.query) q.update({'key': request.session.get('key')}) auth_callback = auth_callback._replace(query=urlencode(q)) auth_callback = urlparse.urlunparse(auth_callback) if request.is_ajax(): return HttpResponse(json.dumps({'url': auth_callback})) else: return redirect(auth_callback)
def _do_COMMON(self, data={}): handlers = [ { "url": r"/terminate", "action": self._do_terminate, "require_auth": False, }, { "url": r"/rest/usermanagement/1/authentication$", "action": self._auth_user, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/session$", "action": self._get_session, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/session/[A-Za-z0-9]{24}$", "action": self._validate_session, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/session/[A-Za-z0-9]{24}$", "action": self._delete_session, "require_auth": True, "method": "DELETE", }, { "url": r"/rest/usermanagement/1/user/group/direct$", "action": self._get_groups, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/user/group/nested$", "action": self._get_groups, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/group/user/nested$", "action": self._get_group_users, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/user$", "action": self._get_user, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/user$", "action": self._add_user, "require_auth": True, "method": "POST", }, # Default handler for unmatched requests { "url": r".*", "action": self._default_handler, "require_auth": True, }, ] p = urlparse.urlparse(self.path) self.json_data = data self.get_params = urlparse.parse_qs(p.query) for handler in handlers: method = handler.get('method') if (re.match(handler['url'], p.path) and (not method or method == self.command)): # Authenticate application if required require_auth = handler.get('require_auth') if require_auth and not check_app_auth(self.headers): self._do_app_failed_auth() return # Run the handler's action handler['action']() return # An unhandled path was encountered. self.send_response(500) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write('Oops, should not be here for {}'.format(self.path).encode('ascii'))
def sso_authorize(request): """ Authorizes specific web sites to utilize an existing My.jobs account Required on HTTP GET: :auth_callback: GET parameter - Desired return url when authorization succeeds Required on HTTP POST: :auth_callback: POST parameter, copy of :auth_callback: GET parameter """ # Common between GET and POST, callback is required. auth_callback = request.GET.get('auth_callback') or \ request.POST.get('auth_callback') data = {'auth_callback': auth_callback} if auth_callback: auth_callback = unquote(auth_callback) auth_callback = urlparse.urlparse(auth_callback) if not auth_callback.netloc: # If the base url of the callback is not truthy, the url # must be malformed somehow raise Http404("mysignon.views.sso_authorize: bad callback") else: raise Http404("mysignon.views.sso_authorize: no callback") if request.method == 'GET': # Initial view after being redirected from an external site data['auth_callback_short'] = auth_callback.netloc if not request.user.is_anonymous(): # Process logged in users first; Certain criteria may cause the # user to be logged out. good_key = request.session.get('key') test_key = request.GET.get('key') if good_key: # The current user already has a key available. if test_key: # The remote site has provided a key; the user has # potentially already authorized this site. if test_key == good_key: if request.user.authorizedclient_set.filter( site=auth_callback.netloc): # The user has authorized this site; Reset the # current session expiry, add the key to the # callback url, and redirect to it. request.session.set_expiry(None) q = urlparse.parse_qs(auth_callback.query) q.update({'key': good_key}) auth_callback = auth_callback._replace( query=urlencode(q)) return redirect(urlparse.urlunparse(auth_callback)) else: # The user at one time authorized this site but it # was revoked (potential future functionality?). # Ask for authorization again. return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) else: # The key provided does not match the user's key; Log # the user out. It may be a different user's key. logout(request) else: # No key was provided; Proceed to authorization normally. return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) else: # The user has no key; Create one. request.session['key'] = AuthorizedClient.create_key( request.user) if test_key: # A key was provided, but the current user did not have one # until now. Log out the user. logout(request) else: # No key was provided; Proceed to authorization. return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) # Only anonymous users can reach this point. This is not inside an else # block so that it can catch users who were logged out above. login_form = CustomAuthForm(auto_id=True) login_form.fields.pop('remember_me') data['login_form'] = login_form return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) else: # Form was posted. action = request.POST.get('action') if action == 'login': login_form = CustomAuthForm(data=request.POST, auto_id=False) login_form.fields.pop('remember_me') if login_form.is_valid(): user = authenticate( username=login_form.cleaned_data['username'], password=login_form.cleaned_data['password']) login(request, user) request.session.set_expiry(None) # User was logged in. Fall through to code common to # preauthenticated users else: if request.is_ajax(): return HttpResponse(json.dumps( {'errors': login_form.errors.items()})) else: data['login_form'] = login_form data['auth_callback_short'] = auth_callback.netloc return render_to_response('mysignon/sso_auth.html', data, RequestContext(request)) # Ensure that an AuthorizedClient instance exists for the current user # and the site that is requesting authorization. request.user.authorizedclient_set.get_or_create(site=auth_callback.netloc) # Ensure that the current user has a key. if not request.session.get('key'): request.session['key'] = AuthorizedClient.create_key(request.user) # Add the user's key to the callback url and redirect to it. q = urlparse.parse_qs(auth_callback.query) q.update({'key': request.session.get('key')}) auth_callback = auth_callback._replace(query=urlencode(q)) auth_callback = urlparse.urlunparse(auth_callback) if request.is_ajax(): return HttpResponse(json.dumps({'url': auth_callback})) else: return redirect(auth_callback)
def prepare(self): self.url = parse_path(self.path) self.url_params = urlparse.parse_qs(self.url.query) pass
def request(method, url, content_type, data=None, params=None, headers=None, cookies=None, auth=None, redirection=True, timeout=60): """Creates a new HTTP request and processes it. :param method: the type of request to be created (``GET`` or ``POST``) :type method: ``str``. :param url: the url of the request. :type url: ``str``. :param content_type: the content type to be returned (``raw`` or ``json``) :type content_type: ``str``. :param data: the data to be posted. :type data: ``any``. :param params: mapping of url parameters. :type params: :class:`dict`. :param headers: the headers of the request. :type headers: :class:`dict`. :param cookies: the cookies of the request. :type cookies: :class:`dict`. :param auth: the authentication information to be used. :type auth: :class:`dict`. :param redirection: a flag indicating whether redirection is allowed or not. :type redirection: ``boolean``. :param timeout: a timeout for the request. :type timeout: ``int``. :return: the content obtained from executing the request. :rtype: ``str`` or ``json``. """ openers = [] if not redirection: openers.append(NoRedirectHttpHandler()) if auth: manager = HTTPPasswordMgrWithDefaultRealm() manager.add_password(None, url, auth['username'], auth['password']) openers.append(HTTPBasicAuthHandler(manager)) opener = build_opener(*openers) install_opener(opener) headers = headers or {} if cookies: for cookie in cookies.keys(): headers['Cookie'] = "{0}={1}".format(cookie, cookies[cookie]) if 'user-agent' not in headers: headers['user-agent'] = 'Alfred-Workflow/1.17' encodings = [s.strip() for s in headers.get('accept-encoding', '').split(',')] if 'gzip' not in encodings: encodings.append('gzip') headers['accept-encoding'] = ', '.join(encodings) if method == 'POST' and not data: data = '' if data and isinstance(data, dict): data = urlencode(format_headers(data)) headers = format_headers(headers) if isinstance(url, unicode): url = url.encode('utf-8') if params: scheme, netloc, path, query, fragment = urlparse.urlsplit(url) if query: url_params = urlparse.parse_qs(query) url_params.update(params) params = url_params query = urlencode(format_headers(params), doseq=True) url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) try: response = urlopen(Request(url, data, headers), timeout=timeout) response_headers = response.info() content = response.read() if 'gzip' in response_headers.get('content-encoding', '') \ or 'gzip' in response_headers.get('transfer-encoding', ''): content = unzip(content) if content_type.lower() == 'json': return json.loads(content, 'utf-8') return content except (HTTPError, URLError): send_notification('Workflow', 'Error while calling {0}'.format(url)) if content_type.lower() == 'json': return {} return ''
def _handle(self, is_get): debug('#\n' * 30) q = urlparse.parse_qs(urlparse.urlparse(self.path).query) url = q["url"][0] host = urlparse.urlsplit(url) headers = { "user-agent": USER_AGENT, "accept": "*/*", "host": host.netloc, "connection": "keep-alive", "keep-alive": "timeout=5, max=1000", } for h in self.headers: if h.lower() in ["icy-metadata", "range"] or h in headers: continue headers[h] = self.headers[h] debug("HEADERS: %s\n" % headers) debug("%s\n" % url) s = cache[url] if url in cache else requests.Session() res = s.get(url, headers=headers) debug("Response length: %d\n" % len(res.content)) debug("Content Type: %s\n" % res.headers["Content-Type"]) debug("Reponse Code: %s\n" % res.status_code) body = res.content url = "%s://%s/" % (host.scheme, host.netloc) if "application/vnd.apple.mpegurl" in res.headers['Content-Type'] or \ "audio/x-mpegurl" in res.headers["Content-Type"]: # it's a playlist. Route to our proxy server. out = [] for line in body.splitlines(False): if line.startswith("http"): line = "http://localhost:1704/?url=" + urllib.quote_plus( line) elif line and not line.startswith("#EXT"): line = "http://localhost:1704/?url=" + urllib.quote_plus( url + "/" + line) elif line.startswith("#EXT-X-KEY"): # reroute the encryption key url m = uri_pat.search(line) if m: line = "%sURI=\"http://localhost:1704/?url=%s\"" % ( m.group(1), urllib.quote_plus(m.group(2))) elif line.startswith("#EXT-X-MEDIA"): m = uri_pat.search(line) if m: uri = urllib.quote_plus(url + "/" + m.group(2)) line = "%sURI=\"http://localhost:1704/?url=%s\"" % ( m.group(1), uri) out.append(line) self.send(res, '\n'.join(out), is_get) else: self.send(res, body, is_get) debug("EXIT\n")
def process_batch(sm_account_id, graph, interactions, batch_requests, p_session, processed_interactions=None, cutoff=None): """ A function that sends batch requests to FB, collects the results and prepares the next set of batch requests for data corresponding to pagination. Call itself recursively until all posts in the given period are fetched. :param sm_account_id: :param graph: :param interactions: :param batch_requests: :param p_session: :param processed_interactions: Number of interactions already processed :param cutoff: stop collection if processed_interactions exceeds cutoff :return: """ with transaction.manager: for interaction in interactions: p_session.merge(interaction) if len(batch_requests) == 0 or (processed_interactions and processed_interactions >= cutoff): return # process batch requests # Number of max items in a batch request is 50 MAX_BATCH_SIZE = 50 batch_requests_p = [{ 'method': req.get('method'), 'relative_url': req.get('relative_url') } for req in batch_requests] batch_data = [] interactions_new = set() batch_requests_new = [] for i in range(math.ceil(len(batch_requests_p) / MAX_BATCH_SIZE)): # TODO handle connection error. attempt retries try: batch_req = json.dumps( batch_requests_p[i * MAX_BATCH_SIZE:(i * MAX_BATCH_SIZE) + (MAX_BATCH_SIZE - 1)], indent=1) batch_data += graph.request("", post_args={'batch': batch_req}) except ConnectionError as e: logger.exception( 'unable to process batch request \n:{}'.format(batch_req)) for req, batch_response in zip(batch_requests, batch_data): parent_id = req.get('parent_id') if 'body' in batch_response: batch_response_data = json.loads(batch_response['body']) if 'error' in batch_response_data and batch_response_data[ 'error'].get('code') == 1: # handle request failure - 'Please reduce the amount of data you are asking for, then retry your request' error_url = req.get('relative_url') parse_result = urlparse(error_url) query_data = urlparse.parse_qs(parse_result.query) old_limit = query_data.get('limit')[0] sm_account_id = parse_result.path.split("/")[2] new_limit = int(float(old_limit) / 2) new_req = get_feed_request(sm_account_id, limit=new_limit) batch_requests_new.append(new_req) if 'data' in batch_response_data: for interaction_raw in batch_response_data['data']: Interactions.get_nested_interactions( sm_account_id, interaction_raw, interactions_new, batch_requests_new, parent_id) if 'paging' in batch_response_data and 'next' in batch_response_data[ 'paging']: next_url = urlparse(batch_response_data['paging']['next']) relative_url = next_url.path + '?' + next_url.query + '&include_headers=false' req = { 'method': 'GET', 'relative_url': relative_url, 'parent_id': parent_id } batch_requests_new.append(req) else: logger.info( 'Exception occurred while collecting posts for {} skipping this..' .format(sm_account_id)) process_batch(sm_account_id, graph, interactions_new, batch_requests_new, p_session, processed_interactions + len(interactions), cutoff)
def query(self): return urlparse.parse_qs(self.parsed_uri.query)
def _do_COMMON(self, data={}): handlers = [ { "url": r"/terminate", "action": self._do_terminate, "require_auth": False, }, { "url": r"/rest/usermanagement/1/authentication$", "action": self._auth_user, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/session$", "action": self._get_session, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/session/[A-Za-z0-9]{24}$", "action": self._validate_session, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/session/[A-Za-z0-9]{24}$", "action": self._delete_session, "require_auth": True, "method": "DELETE", }, { "url": r"/rest/usermanagement/1/user/group/direct$", "action": self._get_groups, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/user/group/nested$", "action": self._get_groups, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/group/membership$", "action": self._get_memberships, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/group/user/nested$", "action": self._get_group_users, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/user$", "action": self._get_user, "require_auth": True, "method": "GET", }, { "url": r"/rest/usermanagement/1/user$", "action": self._add_user, "require_auth": True, "method": "POST", }, { "url": r"/rest/usermanagement/1/user/password$", "action": self._change_password, "require_auth": True, "method": "PUT", }, # Default handler for unmatched requests { "url": r".*", "action": self._default_handler, "require_auth": True, }, ] p = urlparse.urlparse(self.path) self.json_data = data self.get_params = urlparse.parse_qs(p.query) for handler in handlers: method = handler.get('method') if (re.match(handler['url'], p.path) and (not method or method == self.command)): # Authenticate application if required require_auth = handler.get('require_auth') if require_auth and not check_app_auth(self.headers): self._do_app_failed_auth() return # Run the handler's action handler['action']() return # An unhandled path was encountered. self.send_response(500) self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write('Oops, should not be here for {}'.format( self.path).encode('ascii'))