def _blaster_url(cookie): '''Cookie is a string, possibly representing a megacookie. For now, raise an exception if there are multiple cookies pointing to different blasters.''' map = get_blaster_map( [ScopeCookie.parse(c) for c in ScopeCookie.split(cookie)]) if not map: raise BadRequest('No JSON blaster specified in scope cookies') if len(map) > 1: raise BadRequest('Multiple JSON blasters not supported') return map.keys()[0]
def _blaster_url(cookie): '''Cookie is a string, possibly representing a megacookie. For now, raise an exception if there are multiple cookies pointing to different blasters.''' map = get_blaster_map([ScopeCookie.parse(c) for c in ScopeCookie.split(cookie)]) if not map: raise BadRequest('No JSON blaster specified in scope cookies') if len(map) > 1: raise BadRequest('Multiple JSON blasters not supported') return map.keys()[0]
def search(): if request.method == 'POST': # Parse form try: cookie = request.files['cookie'].read() orientation = request.form['orientation'] except KeyError: abort(400, 'Bad form submission') # Split megacookie into a list of cookies per JSON Blaster cookies = get_blaster_map([ScopeCookie.parse(c) for c in ScopeCookie.split(cookie)]) # Make sure at least one cookie specifies a Blaster if not cookies: abort(400, 'No cookies or no JSON Blaster specified') # Create searches def static_url(path): return urljoin(request.url_root, url_for('static', filename=path)) searches = [] for blaster, cookie_list in cookies.iteritems(): config = { 'cookies': [c.encode() for c in cookie_list], 'filters': [ { 'name': 'RGB', 'code': { 'uri': static_url('filters/fil_decode'), }, 'min_score': 1, }, { 'name': 'Orientation', 'code': { 'uri': static_url('filters/fil_orientation'), }, 'arguments': [orientation], 'min_score': 1, } ], } req = urllib2.Request(blaster, json.dumps(config), { 'Content-Type': 'application/json', 'User-Agent': 'webappfind/0.1', }) try: response = urllib2.urlopen(req) except urllib2.HTTPError, e: abort(400, e.read() or e.reason) except urllib2.URLError, e: abort(400, e.reason) searches.append(json.loads(response.read()))