def validate_fingerprints(fp_secret_key, fp_salt, client_ip_fingerprint, browser_fingerprint, client_ip, user_agent, accept_language): is_valid = True signer = Signer(fp_secret_key, fp_salt) logging.debug('client_ip_fingerprint: %s', client_ip_fingerprint) calculated_client_ip_fingerprint = signer.get_signature(client_ip) logging.debug('calculated_client_ip_fingerprint: %s', calculated_client_ip_fingerprint) if calculated_client_ip_fingerprint != client_ip_fingerprint: logging.warn('Client IP does not match fingerprint in signature') is_valid = False # TODO: # Uncomment return line below until atmobeta sends the right fingerprint signature. # Just ignore fingerprint for now. return is_valid browser_fingerprint_input = ''.join([user_agent, accept_language]) logging.debug('browser_fingerprint_input: %s', browser_fingerprint_input) logging.debug('browser_fingerprint: %s', browser_fingerprint) calculated_browser_fingerprint = signer.get_signature( browser_fingerprint_input) logging.debug('calculated_browser_fingerprint: %s', calculated_browser_fingerprint) if calculated_browser_fingerprint != browser_fingerprint: logging.warn( 'Browser fingerprint does not match calculated fingerprint') is_valid = False return is_valid
def validate_fingerprints(fp_secret_key, fp_salt, client_ip_fingerprint, browser_fingerprint, client_ip, user_agent, accept_language): is_valid = True signer = Signer(fp_secret_key, fp_salt) logging.debug('client_ip_fingerprint: %s', client_ip_fingerprint) calculated_client_ip_fingerprint = signer.get_signature(client_ip) logging.debug('calculated_client_ip_fingerprint: %s', calculated_client_ip_fingerprint) if calculated_client_ip_fingerprint != client_ip_fingerprint: logging.warn('Client IP does not match fingerprint in signature') is_valid = False # TODO: # Uncomment return line below until atmobeta sends the right fingerprint signature. # Just ignore fingerprint for now. return is_valid browser_fingerprint_input = ''.join([ user_agent, accept_language]) logging.debug('browser_fingerprint_input: %s', browser_fingerprint_input) logging.debug('browser_fingerprint: %s', browser_fingerprint) calculated_browser_fingerprint = signer.get_signature(browser_fingerprint_input) logging.debug('calculated_browser_fingerprint: %s', calculated_browser_fingerprint) if calculated_browser_fingerprint != browser_fingerprint: logging.warn('Browser fingerprint does not match calculated fingerprint') is_valid = False return is_valid
def authorize(): """ authorization page GET: returns page where the user can authorize an app to access the filesystem via the webdav server POST: set a cookie """ origin = request.args.get('origin') if request.method == 'POST': response = make_response() debug(request.form.items()) if request.form.get('continue') != 'true': debug('old key was: ' + app.secret_key) generate_key() debug('new key is: ' + app.secret_key) s = Signer(app.secret_key) if s.get_signature(origin) == request.args.get('sig'): key = base64_encode(str(origin)) back = request.args.get('back_url') info = generate_cookie_info(origin=origin) debug('Correct origin, setting cookie with info: ' + info) response.set_cookie(key, value=s.get_signature(info), max_age=None, expires=None, path='/', domain=None, secure=True, httponly=True) else: return 'Something went wrong...' response.status = '301' # moved permanently response.headers['Location'] = '/' if not back else back else: debug(request.args) response = make_response( render_template('authorization_page.html', cookie_list=[ base64_decode(cookey) for cookey in request.cookies.keys() if verify_cookie(cookey) ], origin=request.args.get('origin'), back_url=request.args.get('back_url'))) return response
def build_url(self, local_path, **kwargs): local_path = local_path.strip('/') for key in 'background mode width height quality format padding'.split(): if key in kwargs: kwargs[key[0]] = kwargs.pop(key) # Remote URLs are encoded into the query. parsed = urlparse(local_path) if parsed.netloc: kwargs['u'] = local_path local_path = 'remote' # Local ones are not. else: abs_path = self.find_img(local_path) if abs_path: kwargs['v'] = encode_int(int(os.path.getmtime(abs_path))) # Sign the query. public_kwargs = ((k, v) for k, v in kwargs.iteritems() if not k.startswith('_')) query = urlencode(sorted(public_kwargs), True) signer = Signer(current_app.secret_key) sig = signer.get_signature('%s?%s' % (local_path, query)) return '%s/%s?%s&s=%s' % ( current_app.config['IMAGES_URL'], local_path, query, sig, )
def verify_cookie(cookey): """ verify that the signature contained in the cookie corresponds to the informations sent by the app (see generate_cookie_info) """ is_correct = False debug("verify_cookie for origin: " + base64_decode(cookey)) cookie_value = request.cookies.get(cookey) if cookie_value: debug("cookie exists for this origin") s = Signer(app.secret_key) expected_cookie_content = \ generate_cookie_info(base64_decode(cookey)) expected_cookie_content = s.get_signature(expected_cookie_content) debug("verify_cookie: " + cookie_value + ", " + expected_cookie_content) if expected_cookie_content == cookie_value: debug('correct cookie') is_correct = True else: debug('incorrect cookie') return is_correct
def sign_user(user_data, fields=None): """Sign user data.""" signer = Signer(current_app.config['SECRET_KEY'], salt='newdle-users') return dict( user_data, signature=signer.get_signature( _get_signature_source_bytes(user_data, fields)).decode('ascii'), )
def before_request(): """ * put in g the prepared response with status and headers that can be changed by some methods later * allow cross origin for webdav uri that are authorized and filter unauthorized requests! * prepare response to OPTIONS request on webdav """ if request.path.startswith(URI_BEGINNING_PATH['webdav']): response = None headers = {} headers['Access-Control-Max-Age'] = '3600' headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Allow-Headers'] = \ 'Origin, Accept, Accept-Encoding, Content-Length, ' + \ 'Content-Type, Authorization, Depth, If-Modified-Since, '+ \ 'If-None-Match' headers['Access-Control-Expose-Headers'] = \ 'Content-Type, Last-Modified, WWW-Authenticate' origin = request.headers.get('Origin') headers['Access-Control-Allow-Origin'] = origin specific_header = request.headers.get('Access-Control-Request-Headers') if is_authorized(): status_code = 200 elif request.method == 'OPTIONS' and specific_header: # tells the world we do CORS when authorized debug('OPTIONS request special header: ' + specific_header) headers['Access-Control-Request-Headers'] = specific_header headers['Access-Control-Allow-Methods'] = ', '.join( ALLOWED_METHODS) response = make_response('', 200, headers) return response else: s = Signer(app.secret_key) headers['WWW-Authenticate'] = 'Nayookie login_url=' + \ urlparse.urljoin(request.url_root, URI_BEGINNING_PATH['authorization']) + '?sig=' + \ s.get_signature(origin) + '{&back_url,origin}' response = make_response('', 401, headers) # do not handle the request if not authorized return response g.status = status_code debug('headers: ' + str(headers)) g.headers = headers
def signed_url_for(user, blueprint, url_params=None, *args, **kwargs): """Get a URL from a blueprint, which is signed using a user's signing secret.""" from indico.web.flask.util import url_for _external = kwargs.pop('_external', False) base_url = url_for(blueprint, *args, **(url_params or {})) qs = url_encode(sorted(kwargs.items())) # this is the URL which is to be signed url = '{}?{}'.format(base_url, qs) if qs else base_url signer = Signer(user.signing_secret, salt='url-signing') qs = url_encode(dict(kwargs, token=signer.get_signature(url))) full_base_url = url_for(blueprint, *args, _external=_external, **(url_params or {})) # this is the final URL including the signature ('token' parameter) return '{}?{}'.format(full_base_url, qs) if qs else base_url
def build_url(self, local_path, **kwargs): # Make the path relative. local_path = local_path.strip('/') # We complain when we see non-normalized paths, as it is a good # indicator that unsanitized data may be getting through. # Mutating the scheme syntax to match is a little gross, but it works # for today. norm_path = os.path.normpath(local_path) if local_path.replace( '://', ':/') != norm_path or norm_path.startswith('../'): raise ValueError('path is not normalized') for key in 'background mode width height quality format padding'.split( ): if key in kwargs: kwargs[key[0]] = kwargs.pop(key) # Remote URLs are encoded into the query. parsed = urlparse(local_path) if parsed.scheme or parsed.netloc: if parsed.scheme not in ALLOWED_SCHEMES: raise ValueError('scheme %r is not allowed' % parsed.scheme) kwargs['u'] = local_path local_path = 'remote' # Local ones are not. else: abs_path = self.find_img(local_path) if abs_path: kwargs['v'] = encode_int(int(os.path.getmtime(abs_path))) # Sign the query. public_kwargs = ((k, v) for k, v in kwargs.iteritems() if not k.startswith('_')) query = urlencode(sorted(public_kwargs), True) signer = Signer(current_app.secret_key) sig = signer.get_signature('%s?%s' % (local_path, query)) return '%s/%s?%s&s=%s' % ( current_app.config['IMAGES_URL'], urlquote(local_path), query, sig, )
def build_url(self, local_path, **kwargs): # Make the path relative. local_path = local_path.strip('/') # We complain when we see non-normalized paths, as it is a good # indicator that unsanitized data may be getting through. # Mutating the scheme syntax to match is a little gross, but it works # for today. norm_path = os.path.normpath(local_path) if local_path.replace('://', ':/') != norm_path or norm_path.startswith('../'): raise ValueError('path is not normalized') for key in 'background mode width height quality format padding'.split(): if key in kwargs: kwargs[key[0]] = kwargs.pop(key) # Remote URLs are encoded into the query. parsed = urlparse(local_path) if parsed.scheme or parsed.netloc: if parsed.scheme not in ALLOWED_SCHEMES: raise ValueError('scheme %r is not allowed' % parsed.scheme) kwargs['u'] = local_path local_path = 'remote' # Local ones are not. else: abs_path = self.find_img(local_path) if abs_path: kwargs['v'] = encode_int(int(os.path.getmtime(abs_path))) # Sign the query. public_kwargs = ((k, v) for k, v in kwargs.iteritems() if not k.startswith('_')) query = urlencode(sorted(public_kwargs), True) signer = Signer(current_app.secret_key) sig = signer.get_signature('%s?%s' % (local_path, query)) return '%s/%s?%s&s=%s' % ( current_app.config['IMAGES_URL'], urlquote(local_path), query, sig, )
def generate_signature(signing_secret_key, signing_salt, fp_secret_key, fp_salt, client_ip, vm_ip, user_agent, accept_language): """ Generate test signatures. Notes from @lenards: 1. Creating the signed value: SIGNED_SERIALIZER = URLSafeTimedSerializer( settings.WEB_DESKTOP['signing']['SECRET_KEY'], salt=settings.WEB_DESKTOP['signing']['SALT']) SIGNER = Signer( settings.WEB_DESKTOP['fingerprint']['SECRET_KEY'], salt=settings.WEB_DESKTOP['fingerprint']['SALT']) client_ip = '127.0.0.1' client_ip_fingerprint = SIGNER.get_signature(client_ip) browser_fingerprint = SIGNER.get_signature(''.join([ request.META['HTTP_USER_AGENT'], request.META['HTTP_ACCEPT'], request.META['HTTP_ACCEPT_ENCODING'], request.META['HTTP_ACCEPT_LANGUAGE']])) sig = SIGNED_SERIALIZER.dumps([ip_address, client_ip_fingerprint, browser_fingerprint]) 2. Test curl request: curl 'https://api.atmo.dev/api/v1/maintenance' -H 'Origin: https://ui.atmo.dev' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8' -H 'Authorization: Token fe5ce63617af95898fa6973774c64f81' -H 'Content-Type: application/json' -H 'Accept: application/json, text/javascript, */*; q=0.01' -H 'Referer: https://ui.atmo.dev/application/projects/3277/instances/25286' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.110 Safari/537.36' -H 'Connection: keep-alive' --compressed --insecure 3. Should generate a redirect like: https://kurtz.iplantcollaborative.org/?sig=WyIxMjguMTk2LjY0LjIxNCIsIll2OUtONXhZTGcxYzdzU2tYQ0trb2x6RnBRayIsIlZkS28yemhidVJpQ3Z6WDZmTnNJRUVWdWcydyJd.CfsDWA.1tUUGb1772CZPn5IlttHM82qiuA 4. Which contains values: In [10]: sig = 'WyIxMjguMTk2LjY0LjIxNCIsIll2OUtONXhZTGcxYzdzU2tYQ0trb2x6RnBRayIsIlZkS28yemhidVJpQ3Z6WDZmTnNJRUVWdWcydyJd.CfsDWA.1tUUGb1772CZPn5IlttHM82qiuA' In [11]: usts.loads(sig) Out[11]: [u'128.196.64.214', u'Yv9KN5xYLg1c7sSkXCKkolzFpQk', u'VdKo2zhbuRiCvzX6fNsIEEVug2w'] from flask import Flask, request test_sig = generate_signature('secrets-things-that-arenot-so-secret', 'i-like-the-idea-of-a-salt', '128.196.64.214', request) """ usts = URLSafeTimedSerializer(signing_secret_key, salt=signing_salt) signer = Signer(fp_secret_key, salt=fp_salt) client_ip_fingerprint = signer.get_signature(client_ip) logging.debug('client_ip: %s', client_ip) logging.debug('client_ip_fingerprint: %s', client_ip_fingerprint) browser_fingerprint_input = ''.join([user_agent, accept_language]) logging.debug('browser_fingerprint_input: %s', browser_fingerprint_input) browser_fingerprint = signer.get_signature(browser_fingerprint_input) logging.debug('browser_fingerprint: %s', browser_fingerprint) sig = usts.dumps([vm_ip, client_ip_fingerprint, browser_fingerprint]) return sig
def handle_request(self, path): # Verify the signature. query = dict(request.args.iteritems()) old_sig = str(query.pop('s', None)) if not old_sig: abort(404) signer = Signer(current_app.secret_key) new_sig = signer.get_signature('%s?%s' % (path, urlencode(sorted(query.iteritems()), True))) if not constant_time_compare(old_sig, new_sig): abort(404) # Expand kwargs. query = dict((SHORT_TO_LONG.get(k, k), v) for k, v in query.iteritems()) remote_url = query.get('url') if remote_url: # This is redundant for newly built URLs, but not for those which # have already been generated and cached. parsed = urlparse(remote_url) if parsed.scheme not in ALLOWED_SCHEMES: abort(404) # Download the remote file. makedirs(current_app.config['IMAGES_CACHE']) path = os.path.join( current_app.config['IMAGES_CACHE'], hashlib.md5(remote_url).hexdigest() + os.path.splitext(parsed.path)[1] ) if not os.path.exists(path): log.info('downloading %s' % remote_url) tmp_path = path + '.tmp-' + str(os.getpid()) fh = open(tmp_path, 'wb') fh.write(urlopen(remote_url).read()) fh.close() call(['mv', tmp_path, path]) else: path = self.find_img(path) if not path: abort(404) # Not found. raw_mtime = os.path.getmtime(path) mtime = datetime.datetime.utcfromtimestamp(raw_mtime) # log.debug('last_modified: %r' % mtime) # log.debug('if_modified_since: %r' % request.if_modified_since) if request.if_modified_since and request.if_modified_since >= mtime: return '', 304 mode = query.get('mode') transform = query.get('transform') transform = re.split(r'[;,_/ ]', transform) if transform else None background = query.get('background') width = query.get('width') width = int(width) if width else None height = query.get('height') height = int(height) if height else None quality = query.get('quality') quality = int(quality) if quality else 75 format = (query.get('format', '') or os.path.splitext(path)[1][1:] or 'jpeg').lower() format = {'jpg' : 'jpeg'}.get(format, format) has_version = 'version' in query use_cache = query.get('cache', True) if use_cache: cache_key_parts = [path, mode, width, height, quality, format, background] if transform: cache_key_parts.append(transform) cache_key = hashlib.md5(repr(tuple(cache_key_parts))).hexdigest() cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2]) cache_path = os.path.join(cache_dir, cache_key + '.' + format) cache_mtime = os.path.getmtime(cache_path) if os.path.exists(cache_path) else None mimetype = 'image/%s' % format cache_timeout = 31536000 if has_version else current_app.config['IMAGES_MAX_AGE'] if not use_cache or not cache_mtime or cache_mtime < raw_mtime: log.info('resizing %r for %s' % (path, query)) img = image.open(path) img = self.resize(img, width=width, height=height, mode=mode, background=background, transform=transform, ) if not use_cache: fh = StringIO() img.save(fh, format, quality=quality) return fh.getvalue(), 200, [ ('Content-Type', mimetype), ('Cache-Control', cache_timeout), ] makedirs(cache_dir) cache_file = open(cache_path, 'wb') img.save(cache_file, format, quality=quality) cache_file.close() return send_file(cache_path, mimetype=mimetype, cache_timeout=cache_timeout)
def build_url(self, local_path, **kwargs): # Make the path relative. local_path = local_path.strip('/') # We complain when we see non-normalized paths, as it is a good # indicator that unsanitized data may be getting through. # Mutating the scheme syntax to match is a little gross, but it works # for today. norm_path = os.path.normpath(local_path) if local_path.replace('://', ':/') != norm_path or norm_path.startswith('../'): raise ValueError('path is not normalized') external = kwargs.pop('external', None) or kwargs.pop('_external', None) scheme = kwargs.pop('scheme', None) if scheme and not external: raise ValueError('cannot specify scheme without external=True') if kwargs.get('_anchor'): raise ValueError('images have no _anchor') if kwargs.get('_method'): raise ValueError('images have no _method') # Remote URLs are encoded into the query. parsed = urlparse(local_path) if parsed.scheme or parsed.netloc: if parsed.scheme not in ALLOWED_SCHEMES: raise ValueError('scheme %r is not allowed' % parsed.scheme) kwargs['url'] = local_path local_path = '_' # Must be something. # Local ones are not. else: abs_path = self.find_img(local_path) if abs_path: kwargs['version'] = encode_int(int(os.path.getmtime(abs_path))) # Prep the cache cache = kwargs.pop('cache', True) if not cache: kwargs['cache'] = '' # Prep the transform. transform = kwargs.get('transform') if transform: if isinstance(transform, basestring): transform = re.split(r'[,;:_ ]', transform) # This is a strange character, but we won't be using it and it # doesn't escape. kwargs['transform'] = '_'.join(map(str, transform)) # Sign the query. public_kwargs = ( (LONG_TO_SHORT.get(k, k), v) for k, v in kwargs.iteritems() if not k.startswith('_') ) query = urlencode(sorted(public_kwargs), True) signer = Signer(current_app.secret_key) sig = signer.get_signature('%s?%s' % (local_path, query)) url = '%s/%s?%s&s=%s' % ( current_app.config['IMAGES_URL'], urlquote(local_path), query, sig, ) if external: url = '%s://%s%s/%s' % ( scheme or request.scheme, request.host, request.script_root, url.lstrip('/') ) return url
def handle_request(self, path): # Verify the signature. query = dict(iteritems(request.args)) old_sig = str(query.pop('s', None)) if not old_sig: abort(404) signer = Signer(current_app.secret_key) new_sig = signer.get_signature( '%s?%s' % (path, urlencode(sorted(iteritems(query)), True))) if not constant_time_compare(str(old_sig), str(new_sig)): log.warning("Signature mismatch: url's {} != expected {}".format( old_sig, new_sig)) abort(404) # Expand kwargs. query = dict((SHORT_TO_LONG.get(k, k), v) for k, v in iteritems(query)) remote_url = query.get('url') if remote_url: # This is redundant for newly built URLs, but not for those which # have already been generated and cached. parsed = urlparse(remote_url) if parsed.scheme not in ALLOWED_SCHEMES: abort(404) # Download the remote file. makedirs(current_app.config['IMAGES_CACHE']) path = os.path.join( current_app.config['IMAGES_CACHE'], hashlib.md5(encode_str(remote_url)).hexdigest() + os.path.splitext(parsed.path)[1]) if not os.path.exists(path): log.info('downloading %s' % remote_url) tmp_path = path + '.tmp-' + str(os.getpid()) try: remote_file = urlopen(remote_url).read() except HTTPError as e: # abort with remote error code (403 or 404 most times) # log.debug('HTTP Error: %r' % e) abort(e.code) else: fh = open(tmp_path, 'wb') fh.write(remote_file) fh.close() call(['mv', tmp_path, path]) else: path = self.find_img(path) if not path: abort(404) # Not found. raw_mtime = os.path.getmtime(path) mtime = datetime.datetime.utcfromtimestamp(raw_mtime).replace( microsecond=0) # log.debug('last_modified: %r' % mtime) # log.debug('if_modified_since: %r' % request.if_modified_since) if request.if_modified_since and request.if_modified_since >= mtime: return '', 304 mode = query.get('mode') transform = query.get('transform') transform = re.split(r'[;,_/ ]', transform) if transform else None background = query.get('background') width = query.get('width') width = int(width) if width else None height = query.get('height') height = int(height) if height else None quality = query.get('quality') quality = int(quality) if quality else 75 format = (query.get('format', '') or os.path.splitext(path)[1][1:] or 'jpeg').lower() format = {'jpg': 'jpeg'}.get(format, format) has_version = 'version' in query use_cache = query.get('cache', True) enlarge = query.get('enlarge', False) sharpen = query.get('sharpen') sharpen = re.split(r'[+:;,_/ ]', sharpen) if sharpen else None if use_cache: # The parts in this initial list were parameters cached in version 1. # In order to avoid regenerating all images when a new feature is # added, we append (feature_name, value) tuples to the end. cache_key_parts = [ path, mode, width, height, quality, format, background ] if transform: cache_key_parts.append(('transform', transform)) if sharpen: cache_key_parts.append(('sharpen', sharpen)) if enlarge: cache_key_parts.append(('enlarge', enlarge)) cache_key = hashlib.md5( repr(tuple(cache_key_parts)).encode('utf-8')).hexdigest() cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2]) cache_path = os.path.join(cache_dir, cache_key + '.' + format) cache_mtime = os.path.getmtime(cache_path) if os.path.exists( cache_path) else None mimetype = 'image/%s' % format cache_timeout = 31536000 if has_version else current_app.config[ 'IMAGES_MAX_AGE'] if not use_cache or not cache_mtime or cache_mtime < raw_mtime: log.info('resizing %r for %s' % (path, query)) image = Image.open(path) image = self.resize( image, background=background, enlarge=enlarge, height=height, mode=mode, transform=transform, width=width, ) image = self.post_process( image, sharpen=sharpen, ) if not use_cache: fh = StringIO() image.save(fh, format, quality=quality) return fh.getvalue(), 200, [ ('Content-Type', mimetype), ('Cache-Control', str(cache_timeout)), ] makedirs(cache_dir) cache_file = open(cache_path, 'wb') image.save(cache_file, format, quality=quality) cache_file.close() return send_file(cache_path, mimetype=mimetype, cache_timeout=cache_timeout)
def build_url(self, local_path, **kwargs): # Make the path relative. local_path = local_path.strip('/') # We complain when we see non-normalized paths, as it is a good # indicator that unsanitized data may be getting through. # Mutating the scheme syntax to match is a little gross, but it works # for today. norm_path = os.path.normpath(local_path) if local_path.replace( '://', ':/') != norm_path or norm_path.startswith('../'): raise ValueError('path is not normalized') external = kwargs.pop('external', None) or kwargs.pop( '_external', None) scheme = kwargs.pop('scheme', None) if scheme and not external: raise ValueError('cannot specify scheme without external=True') if kwargs.get('_anchor'): raise ValueError('images have no _anchor') if kwargs.get('_method'): raise ValueError('images have no _method') # Remote URLs are encoded into the query. parsed = urlparse(local_path) if parsed.scheme or parsed.netloc: if parsed.scheme not in ALLOWED_SCHEMES: raise ValueError('scheme %r is not allowed' % parsed.scheme) kwargs['url'] = local_path local_path = '_' # Must be something. # Local ones are not. else: abs_path = self.find_img(local_path) if abs_path: kwargs['version'] = encode_int(int(os.path.getmtime(abs_path))) # Prep the cache flag, which defaults to True. cache = kwargs.pop('cache', True) if not cache: kwargs['cache'] = '' # Prep the enlarge flag, which defaults to False. enlarge = kwargs.pop('enlarge', False) if enlarge: kwargs['enlarge'] = '1' # Prep the transform, which is a set of delimited strings. transform = kwargs.get('transform') if transform: if isinstance(transform, string_types): transform = re.split(r'[,;:_ ]', transform) # We replace delimiters with underscores, and percent with p, since # these won't need escaping. kwargs['transform'] = '_'.join( str(x).replace('%', 'p') for x in transform) # Sign the query. # Collapse to a dict first so that if we accidentally have two of the # same kwarg (e.g. used `hidpi_sharpen` and `usm` which both turn into `usm`). public_kwargs = { LONG_TO_SHORT.get(k, k): v for k, v in iteritems(kwargs) if v is not None and not k.startswith('_') } query = urlencode(sorted(iteritems(public_kwargs)), True) signer = Signer(current_app.secret_key) sig = signer.get_signature('%s?%s' % (local_path, query)) url = '%s/%s?%s&s=%s' % ( current_app.config['IMAGES_URL'], urlquote(local_path, "/$-_.+!*'(),"), query, sig, ) if external: url = '%s://%s%s/%s' % (scheme or request.scheme, request.host, request.script_root, url.lstrip('/')) return url
def handle_request(self, path): query = dict(request.args.iteritems()) if current_app.secret_key != 'nokey' : # Verify the signature. old_sig = str(query.pop('s', None)) if not old_sig: abort(404) signer = Signer(current_app.secret_key) new_sig = signer.get_signature('%s?%s' % (path, urlencode(sorted(query.iteritems()), True))) if not constant_time_compare(old_sig, new_sig): abort(404) remote_url = query.get('u') if remote_url: # This is redundant for newly built URLs, but not for those which # have already been generated and cached. parsed = urlparse(remote_url) if parsed.scheme not in ALLOWED_SCHEMES: abort(404) # Download the remote file. makedirs(current_app.config['IMAGES_CACHE']) path = os.path.join( current_app.config['IMAGES_CACHE'], hashlib.md5(remote_url).hexdigest() + os.path.splitext(remote_url)[1] ) if not os.path.exists(path): log.info('downloading %s' % remote_url) tmp_path = path + '.tmp-' + str(os.getpid()) fh = open(tmp_path, 'wb') fh.write(urlopen(remote_url).read()) fh.close() call(['mv', tmp_path, path]) else: path = self.find_img(path) if not path: abort(404) # Not found. raw_mtime = os.path.getmtime(path) mtime = datetime.datetime.utcfromtimestamp(raw_mtime) # log.debug('last_modified: %r' % mtime) # log.debug('if_modified_since: %r' % request.if_modified_since) if request.if_modified_since and request.if_modified_since >= mtime: return '', 304 mode = query.get('m') background = query.get('b') width = query.get('w') width = int(width) if width else None height = query.get('h') height = int(height) if height else None quality = query.get('q') quality = int(quality) if quality else 75 format = query.get('f', '').lower() or os.path.splitext(path)[1][1:] or 'jpeg' format = {'jpg' : 'jpeg'}.get(format, format) has_version = 'v' in query cache_key = hashlib.md5(repr(( path, mode, width, height, quality, format, background ))).hexdigest() cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2]) cache_path = os.path.join(cache_dir, cache_key + '.' + format) cache_mtime = os.path.getmtime(cache_path) if os.path.exists(cache_path) else None if not cache_mtime or cache_mtime < raw_mtime: log.info('resizing %r for %s' % (path, query)) img = image.open(path) img = self.resize(img, width=width, height=height, mode=mode, background=background) makedirs(cache_dir) cache_file = open(cache_path, 'wb') img.save(cache_file, format, quality=quality) cache_file.close() return send_file(cache_path, mimetype='image/%s' % format, cache_timeout=31536000 if has_version else current_app.config['IMAGES_MAX_AGE'], )
def handle_request(self, path): # Verify the signature. query = dict(request.args.iteritems()) old_sig = str(query.pop('s', None)) if not old_sig: abort(404) signer = Signer(current_app.secret_key) new_sig = signer.get_signature( '%s?%s' % (path, urlencode(sorted(query.iteritems()), True))) if not constant_time_compare(old_sig, new_sig): abort(404) remote_url = query.get('u') if remote_url: # This is redundant for newly built URLs, but not for those which # have already been generated and cached. parsed = urlparse(remote_url) if parsed.scheme not in ALLOWED_SCHEMES: abort(404) # Download the remote file. makedirs(current_app.config['IMAGES_CACHE']) path = os.path.join( current_app.config['IMAGES_CACHE'], hashlib.md5(remote_url).hexdigest() + os.path.splitext(remote_url)[1]) if not os.path.exists(path): log.info('downloading %s' % remote_url) tmp_path = path + '.tmp-' + str(os.getpid()) fh = open(tmp_path, 'wb') fh.write(urlopen(remote_url).read()) fh.close() call(['mv', tmp_path, path]) else: path = self.find_img(path) if not path: abort(404) # Not found. raw_mtime = os.path.getmtime(path) mtime = datetime.datetime.utcfromtimestamp(raw_mtime) # log.debug('last_modified: %r' % mtime) # log.debug('if_modified_since: %r' % request.if_modified_since) if request.if_modified_since and request.if_modified_since >= mtime: return '', 304 mode = query.get('m') background = query.get('b') width = query.get('w') width = int(width) if width else None height = query.get('h') height = int(height) if height else None quality = query.get('q') quality = int(quality) if quality else 75 format = query.get( 'f', '').lower() or os.path.splitext(path)[1][1:] or 'jpeg' format = {'jpg': 'jpeg'}.get(format, format) has_version = 'v' in query cache_key = hashlib.md5( repr((path, mode, width, height, quality, format, background))).hexdigest() cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2]) cache_path = os.path.join(cache_dir, cache_key + '.' + format) cache_mtime = os.path.getmtime(cache_path) if os.path.exists( cache_path) else None if not cache_mtime or cache_mtime < raw_mtime: log.info('resizing %r for %s' % (path, query)) img = image.open(path) img = self.resize(img, width=width, height=height, mode=mode, background=background) makedirs(cache_dir) cache_file = open(cache_path, 'wb') img.save(cache_file, format, quality=quality) cache_file.close() return send_file( cache_path, mimetype='image/%s' % format, cache_timeout=31536000 if has_version else current_app.config['IMAGES_MAX_AGE'], )
def generate_signature(signing_secret_key, signing_salt, fp_secret_key, fp_salt, client_ip, vm_ip, user_agent, accept_language): """ Generate test signatures. Notes from @lenards: 1. Creating the signed value: SIGNED_SERIALIZER = URLSafeTimedSerializer( settings.WEB_DESKTOP['signing']['SECRET_KEY'], salt=settings.WEB_DESKTOP['signing']['SALT']) SIGNER = Signer( settings.WEB_DESKTOP['fingerprint']['SECRET_KEY'], salt=settings.WEB_DESKTOP['fingerprint']['SALT']) client_ip = '127.0.0.1' client_ip_fingerprint = SIGNER.get_signature(client_ip) browser_fingerprint = SIGNER.get_signature(''.join([ request.META['HTTP_USER_AGENT'], request.META['HTTP_ACCEPT'], request.META['HTTP_ACCEPT_ENCODING'], request.META['HTTP_ACCEPT_LANGUAGE']])) sig = SIGNED_SERIALIZER.dumps([ip_address, client_ip_fingerprint, browser_fingerprint]) 2. Test curl request: curl 'https://api.atmo.dev/api/v1/maintenance' -H 'Origin: https://ui.atmo.dev' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8' -H 'Authorization: Token fe5ce63617af95898fa6973774c64f81' -H 'Content-Type: application/json' -H 'Accept: application/json, text/javascript, */*; q=0.01' -H 'Referer: https://ui.atmo.dev/application/projects/3277/instances/25286' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.110 Safari/537.36' -H 'Connection: keep-alive' --compressed --insecure 3. Should generate a redirect like: https://kurtz.iplantcollaborative.org/?sig=WyIxMjguMTk2LjY0LjIxNCIsIll2OUtONXhZTGcxYzdzU2tYQ0trb2x6RnBRayIsIlZkS28yemhidVJpQ3Z6WDZmTnNJRUVWdWcydyJd.CfsDWA.1tUUGb1772CZPn5IlttHM82qiuA 4. Which contains values: In [10]: sig = 'WyIxMjguMTk2LjY0LjIxNCIsIll2OUtONXhZTGcxYzdzU2tYQ0trb2x6RnBRayIsIlZkS28yemhidVJpQ3Z6WDZmTnNJRUVWdWcydyJd.CfsDWA.1tUUGb1772CZPn5IlttHM82qiuA' In [11]: usts.loads(sig) Out[11]: [u'128.196.64.214', u'Yv9KN5xYLg1c7sSkXCKkolzFpQk', u'VdKo2zhbuRiCvzX6fNsIEEVug2w'] from flask import Flask, request test_sig = generate_signature('secrets-things-that-arenot-so-secret', 'i-like-the-idea-of-a-salt', '128.196.64.214', request) """ usts = URLSafeTimedSerializer( signing_secret_key, salt=signing_salt) signer = Signer( fp_secret_key, salt=fp_salt) client_ip_fingerprint = signer.get_signature(client_ip) logging.debug('client_ip: %s', client_ip) logging.debug('client_ip_fingerprint: %s', client_ip_fingerprint) browser_fingerprint_input = ''.join([ user_agent, accept_language]) logging.debug('browser_fingerprint_input: %s', browser_fingerprint_input) browser_fingerprint = signer.get_signature(browser_fingerprint_input) logging.debug('browser_fingerprint: %s', browser_fingerprint) sig = usts.dumps([vm_ip, client_ip_fingerprint, browser_fingerprint]) return sig
class VSDatabase(object): MAX_LENGTH = 50 def __init__(self): self._s = NullSigner() self._config_defaults = dict() def init_app(self, app): """ Initializes the database with the application object. :param app: Flask application object """ secret = app.config['SECRET_KEY'] if secret: self._s = Signer(secret) self._config_defaults = dict( (k.lower(), v) for k, v in app.config['DEFAULTS'].items() ) def initialize(self): """ Function which is supposed to initialize the database only once. This can be used for creating SQL schemes. """ pass def generate_id(self, alphabet=None): """ Generates a new unique Id, race conditions possible! :param alphabet: Alphabet to generate the Id from. """ if alphabet is None: alphabet = self.config_get('alphabet') length = 3 id = ''.join(random.sample(alphabet, length)) while self.has_id(id): id = ''.join(random.sample(alphabet, length)) length = min(length + 1, self.MAX_LENGTH) return id def config_get(self, key, domain=None): """ Fetches a configuration value from the underlaying database. :param key: Key to lookup. :param domain: If domain is not set the Flask request object will be used to get the current domain. """ key = key.lower() if domain is None: domain = urlparse(request.url).netloc result = self._config_get(domain, key) if result is None: result = self._config_defaults.get(key) return result def config_set(self, key, value, domain=None): """ Setsa configuration value. :param key: Key to set. :param value: Value to set. :param domain: If domain is not set the Flask request object will be used to get the current domain. """ key = key.lower() if domain is None: domain = urlparse(request.url).netloc self._config_set(domain, key, value) def config_delete(self, domain=None): """ Deletes (resets) the configuration for the domain. :param domain: If domain is not set the Flask request object will be used to get the current domain. """ if domain is None: domain = urlparse(request.url).netloc self._config_delete(domain) def get_domain(self, domain=None): """ Returns the domain or the alias (if set), for the current domain. This method requires a flask application-context, if called without :param:`domain`. :param domain: use this domain instead of the current domain """ if domain is None: domain = urlparse(request.url).netloc alias = self.config_get('alias', domain=domain) if alias is not None: domain = alias return domain def get(self, id): """ Fetches the long Url for the given Id. :param id: Id to lookup. """ domain = self.get_domain() result = self._get(domain, id) if result is None: raise IdNotFound('Id "{0}" not found'.format(id), 404) return want_unicode(result) def has_id(self, id): """ Tests if this Id is already used. :param id: Id to lookup. :return: bool """ try: result = self.get(id) except IdNotFound: return False return result is not None def create(self, url, id=None, expiry=None): """ Creates a new short URL. :param id: Custom Id, if `None` :method:`generate_id` will be used. :param expiry: Time in days after which the URL will expire (`None` means it will never expire) :return: (id, expiry, secret) """ p = urlparse(url) if not p.scheme or not p.netloc: raise InvalidUrl('Url does not contain scheme and/or netloc', 400) if not self.config_get('custom_ids') and id is not None: raise InvalidId('Custom Ids are disabled', 400) # use default if not set # infite < 0 (easier to work with) if expiry is None: expiry = self.config_get('default_expiry') expiry = -1 if expiry is None else expiry max_expiry = self.config_get('max_expiry') if max_expiry is not None and max_expiry > 0: if expiry > 0: expiry = min(expiry, max_expiry) else: expiry = max_expiry # infinite = None (return value) expiry = datetime.timedelta(days=expiry) if expiry > 0 else None domain = self.get_domain() if id is None: while True: id = self.generate_id() try: self._create(domain, id, url, expiry=expiry) except IdAlreadyExists: # race condition, do it again continue break else: if len(id) > self.MAX_LENGTH: raise InvalidId('Id is too long', 400) if not all(c in self.config_get('alphabet') for c in id): raise InvalidId('Id contains invalid characters', 400) # if the key already exists, not our problem self._create(domain, id, url, expiry=expiry) return ( id, getattr(expiry, 'days', None), want_unicode(self._s.get_signature(id)) ) def delete(self, id, secret): """ Deletes an short URL. :param id: Id to delete. :param secret: Secret returned by :method:`create` for the Id. """ domain = self.get_domain() if self._s.verify_signature(want_bytes(id), want_bytes(secret)): self._delete(domain, id) return True raise InvalidDeletionSecret('Invalid secret', 400) def _config_get(self, domain, key): raise NotImplementedError def _config_set(self, domain, key, value): raise NotImplementedError def _config_delete(self, domain): raise NotImplementedError def _get(self, domain, id): raise NotImplementedError def _create(self, domain, id, url, expiry=None): raise NotImplementedError def _delete(self, domain, id): raise NotImplementedError
def handle_request(self, path): # Verify the signature. query = dict(iteritems(request.args)) old_sig = str(query.pop('s', None)) if not old_sig: abort(404) signer = Signer(current_app.secret_key) new_sig = signer.get_signature('%s?%s' % (path, urlencode(sorted(iteritems(query)), True))) if not constant_time_compare(str(old_sig), str(new_sig)): log.warning("Signature mismatch: url's {} != expected {}".format(old_sig, new_sig)) abort(404) # Expand kwargs. query = dict((SHORT_TO_LONG.get(k, k), v) for k, v in iteritems(query)) remote_url = query.get('url') if remote_url: # This is redundant for newly built URLs, but not for those which # have already been generated and cached. parsed = urlparse(remote_url) if parsed.scheme not in ALLOWED_SCHEMES: abort(404) # Download the remote file. makedirs(current_app.config['IMAGES_CACHE']) path = os.path.join( current_app.config['IMAGES_CACHE'], hashlib.md5(encode_str(remote_url)).hexdigest() + os.path.splitext(parsed.path)[1] ) if not os.path.exists(path): log.info('downloading %s' % remote_url) tmp_path = path + '.tmp-' + str(os.getpid()) try: remote_file = urlopen(remote_url).read() except HTTPError as e: # abort with remote error code (403 or 404 most times) # log.debug('HTTP Error: %r' % e) abort(e.code) else: fh = open(tmp_path, 'wb') fh.write(remote_file) fh.close() call(['mv', tmp_path, path]) else: path = self.find_img(path) if not path: abort(404) # Not found. raw_mtime = os.path.getmtime(path) mtime = datetime.datetime.utcfromtimestamp(raw_mtime).replace(microsecond=0) # log.debug('last_modified: %r' % mtime) # log.debug('if_modified_since: %r' % request.if_modified_since) if request.if_modified_since and request.if_modified_since >= mtime: return '', 304 mode = query.get('mode') transform = query.get('transform') transform = re.split(r'[;,_/ ]', transform) if transform else None background = query.get('background') width = query.get('width') width = int(width) if width else None height = query.get('height') height = int(height) if height else None quality = query.get('quality') quality = int(quality) if quality else 75 format = (query.get('format', '') or os.path.splitext(path)[1][1:] or 'jpeg').lower() format = {'jpg' : 'jpeg'}.get(format, format) has_version = 'version' in query use_cache = query.get('cache', True) enlarge = query.get('enlarge', False) sharpen = query.get('sharpen') sharpen = re.split(r'[+:;,_/ ]', sharpen) if sharpen else None if use_cache: # The parts in this initial list were parameters cached in version 1. # In order to avoid regenerating all images when a new feature is # added, we append (feature_name, value) tuples to the end. cache_key_parts = [path, mode, width, height, quality, format, background] if transform: cache_key_parts.append(('transform', transform)) if sharpen: cache_key_parts.append(('sharpen', sharpen)) if enlarge: cache_key_parts.append(('enlarge', enlarge)) cache_key = hashlib.md5(repr(tuple(cache_key_parts)).encode('utf-8')).hexdigest() cache_dir = os.path.join(current_app.config['IMAGES_CACHE'], cache_key[:2]) cache_path = os.path.join(cache_dir, cache_key + '.' + format) cache_mtime = os.path.getmtime(cache_path) if os.path.exists(cache_path) else None mimetype = 'image/%s' % format cache_timeout = 31536000 if has_version else current_app.config['IMAGES_MAX_AGE'] if not use_cache or not cache_mtime or cache_mtime < raw_mtime: log.info('resizing %r for %s' % (path, query)) image = Image.open(path) image = self.resize(image, background=background, enlarge=enlarge, height=height, mode=mode, transform=transform, width=width, ) image = self.post_process(image, sharpen=sharpen, ) if not use_cache: fh = StringIO() image.save(fh, format, quality=quality) return fh.getvalue(), 200, [ ('Content-Type', mimetype), ('Cache-Control', str(cache_timeout)), ] makedirs(cache_dir) cache_file = open(cache_path, 'wb') image.save(cache_file, format, quality=quality) cache_file.close() return send_file(cache_path, mimetype=mimetype, cache_timeout=cache_timeout)
def build_url(self, local_path, **kwargs): # Make the path relative. local_path = local_path.strip('/') # We complain when we see non-normalized paths, as it is a good # indicator that unsanitized data may be getting through. # Mutating the scheme syntax to match is a little gross, but it works # for today. norm_path = os.path.normpath(local_path) if local_path.replace('://', ':/') != norm_path or norm_path.startswith('../'): raise ValueError('path is not normalized') external = kwargs.pop('external', None) or kwargs.pop('_external', None) scheme = kwargs.pop('scheme', None) if scheme and not external: raise ValueError('cannot specify scheme without external=True') if kwargs.get('_anchor'): raise ValueError('images have no _anchor') if kwargs.get('_method'): raise ValueError('images have no _method') # Remote URLs are encoded into the query. parsed = urlparse(local_path) if parsed.scheme or parsed.netloc: if parsed.scheme not in ALLOWED_SCHEMES: raise ValueError('scheme %r is not allowed' % parsed.scheme) kwargs['url'] = local_path local_path = '_' # Must be something. # Local ones are not. else: abs_path = self.find_img(local_path) if abs_path: kwargs['version'] = encode_int(int(os.path.getmtime(abs_path))) # Prep the cache flag, which defaults to True. cache = kwargs.pop('cache', True) if not cache: kwargs['cache'] = '' # Prep the enlarge flag, which defaults to False. enlarge = kwargs.pop('enlarge', False) if enlarge: kwargs['enlarge'] = '1' # Prep the transform, which is a set of delimited strings. transform = kwargs.get('transform') if transform: if isinstance(transform, string_types): transform = re.split(r'[,;:_ ]', transform) # We replace delimiters with underscores, and percent with p, since # these won't need escaping. kwargs['transform'] = '_'.join(str(x).replace('%', 'p') for x in transform) # Sign the query. # Collapse to a dict first so that if we accidentally have two of the # same kwarg (e.g. used `hidpi_sharpen` and `usm` which both turn into `usm`). public_kwargs = { LONG_TO_SHORT.get(k, k): v for k, v in iteritems(kwargs) if v is not None and not k.startswith('_') } query = urlencode(sorted(iteritems(public_kwargs)), True) signer = Signer(current_app.secret_key) sig = signer.get_signature('%s?%s' % (local_path, query)) url = '%s/%s?%s&s=%s' % ( current_app.config['IMAGES_URL'], urlquote(local_path, "/$-_.+!*'(),"), query, sig, ) if external: url = '%s://%s%s/%s' % ( scheme or request.scheme, request.host, request.script_root, url.lstrip('/') ) return url