def finance_information(self): return FinanceInformation( balance=self.user_data.finance_balance, transactions=((parse_date(t.valid_on), t.amount) for t in self.user_data.finance_history), last_update=parse_date(self.user_data.last_finance_update) )
def test_parse_date_overflows(self): assert http.parse_date(" Sun 02 Feb 1343 08:49:37 GMT") == datetime( 1343, 2, 2, 8, 49, 37 ) assert http.parse_date("Thu, 01 Jan 1970 00:00:00 GMT") == datetime( 1970, 1, 1, 0, 0 ) assert http.parse_date("Thu, 33 Jan 1970 00:00:00 GMT") is None
def test_parse_date(self): assert http.parse_date( 'Sun, 06 Nov 1994 08:49:37 GMT ') == datetime( 1994, 11, 6, 8, 49, 37) assert http.parse_date('Sunday, 06-Nov-94 08:49:37 GMT') == datetime( 1994, 11, 6, 8, 49, 37) assert http.parse_date(' Sun Nov 6 08:49:37 1994') == datetime( 1994, 11, 6, 8, 49, 37) assert http.parse_date('foo') is None
def test_parse_date(self): assert http.parse_date("Sun, 06 Nov 1994 08:49:37 GMT ") == datetime( 1994, 11, 6, 8, 49, 37 ) assert http.parse_date("Sunday, 06-Nov-94 08:49:37 GMT") == datetime( 1994, 11, 6, 8, 49, 37 ) assert http.parse_date(" Sun Nov 6 08:49:37 1994") == datetime( 1994, 11, 6, 8, 49, 37 ) assert http.parse_date("foo") is None
def get_info(self, request, ident, base_uri): r = LorisResponse() r.set_acao(request, self.cors_regex) try: info, last_mod = self._get_info(ident, request, base_uri) except ResolverException as re: return NotFoundResponse(re.message) except ImageInfoException as ie: return ServerSideErrorResponse(ie.message) except IOError as e: # 500 msg = '%s \n(This is likely a permissions problem)' % (str(e), ) return ServerSideErrorResponse(msg) else: ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date( http_date(last_mod)) # see note under get_img if ims and ims >= last_mod: logger.debug('Sent 304 for %s ' % (ident, )) r.status_code = 304 else: if last_mod: r.last_modified = last_mod # r.automatically_set_content_length callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_json()) else: if request.headers.get('accept') == 'application/ld+json': r.content_type = 'application/ld+json' else: r.content_type = 'application/json' l = '<http://iiif.io/api/image/2/context.json>;rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json"' r.headers['Link'] = '%s,%s' % (r.headers['Link'], l) # If interpolation is not allowed, we have to remove this # value from info.json - but only if exists (cached ImageInfo might miss this) if self.max_size_above_full <= 100: try: info.profile[1]['supports'].remove('sizeAboveFull') except ValueError: pass r.data = info.to_json() finally: return r
def get_info(self, request, ident, base_uri): r = LorisResponse() r.set_acao(request, self.cors_regex) try: info, last_mod = self._get_info(ident,request,base_uri) except ResolverException as re: return NotFoundResponse(re.message) except ImageInfoException as ie: return ServerSideErrorResponse(ie.message) except IOError as e: # 500 msg = '%s \n(This is likely a permissions problem)' % (str(e),) return ServerSideErrorResponse(msg) else: ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date(http_date(last_mod)) # see note under get_img if ims and ims >= last_mod: logger.debug('Sent 304 for %s ' % (ident,)) r.status_code = 304 else: if last_mod: r.last_modified = last_mod # r.automatically_set_content_length callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_json()) else: if request.headers.get('accept') == 'application/ld+json': r.content_type = 'application/ld+json' else: r.content_type = 'application/json' l = '<http://iiif.io/api/image/2/context.json>;rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json"' r.headers['Link'] = '%s,%s' % (r.headers['Link'], l) # If interpolation is not allowed, we have to remove this # value from info.json - but only if exists (cached ImageInfo might miss this) if self.max_size_above_full <= 100: try: info.profile[1]['supports'].remove('sizeAboveFull') except ValueError: pass r.data = info.to_json() finally: return r
def _get_retry_after(self): value = self.headers.get('retry-after') if value is None: return elif value.isdigit(): return datetime.utcnow() + timedelta(seconds=int(value)) return parse_date(value)
def _get_retry_after(self): value = self.headers.get('retry-after') if value is None: return if value.isdigit(): return datetime.utcnow() + timedelta(seconds=int(value)) return parse_date(value)
def test_session_expiration(): permanent = True app = flask.Flask(__name__) app.secret_key = 'testkey' @app.route('/') def index(): flask.session['test'] = 42 flask.session.permanent = permanent return '' @app.route('/test') def test(): return text_type(flask.session.permanent) client = app.test_client() rv = client.get('/') assert 'set-cookie' in rv.headers match = re.search(r'\bexpires=([^;]+)(?i)', rv.headers['set-cookie']) expires = parse_date(match.group()) expected = datetime.utcnow() + app.permanent_session_lifetime assert expires.year == expected.year assert expires.month == expected.month assert expires.day == expected.day rv = client.get('/test') assert rv.data == b'True' permanent = False rv = app.test_client().get('/') assert 'set-cookie' in rv.headers match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie']) assert match is None
def test_session_expiration(self): permanent = True app = flask.Flask(__name__) app.secret_key = 'testkey' @app.route('/') def index(): flask.session['test'] = 42 flask.session.permanent = permanent return '' @app.route('/test') def test(): return text_type(flask.session.permanent) client = app.test_client() rv = client.get('/') self.assert_in('set-cookie', rv.headers) match = re.search(r'\bexpires=([^;]+)(?i)', rv.headers['set-cookie']) expires = parse_date(match.group()) expected = datetime.utcnow() + app.permanent_session_lifetime self.assert_equal(expires.year, expected.year) self.assert_equal(expires.month, expected.month) self.assert_equal(expires.day, expected.day) rv = client.get('/test') self.assert_equal(rv.data, b'True') permanent = False rv = app.test_client().get('/') self.assert_in('set-cookie', rv.headers) match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie']) self.assert_true(match is None)
def test_session_expiration(self): permanent = True app = flask.Flask(__name__) app.secret_key = 'testkey' @app.route('/') def index(): flask.session['test'] = 42 flask.session.permanent = permanent return '' @app.route('/test') def test(): return unicode(flask.session.permanent) client = app.test_client() rv = client.get('/') self.assert_('set-cookie' in rv.headers) match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie']) expires = parse_date(match.group()) expected = datetime.utcnow() + app.permanent_session_lifetime self.assert_equal(expires.year, expected.year) self.assert_equal(expires.month, expected.month) self.assert_equal(expires.day, expected.day) rv = client.get('/test') self.assert_equal(rv.data, 'True') permanent = False rv = app.test_client().get('/') self.assert_('set-cookie' in rv.headers) match = re.search(r'\bexpires=([^;]+)', rv.headers['set-cookie']) self.assert_(match is None)
def test_session_expiration(): permanent = True app = flask.Flask(__name__) app.secret_key = "testkey" @app.route("/") def index(): flask.session["test"] = 42 flask.session.permanent = permanent return "" @app.route("/test") def test(): return text_type(flask.session.permanent) client = app.test_client() rv = client.get("/") assert "set-cookie" in rv.headers match = re.search(r"\bexpires=([^;]+)(?i)", rv.headers["set-cookie"]) expires = parse_date(match.group()) expected = datetime.utcnow() + app.permanent_session_lifetime assert expires.year == expected.year assert expires.month == expected.month assert expires.day == expected.day rv = client.get("/test") assert rv.data == b"True" permanent = False rv = app.test_client().get("/") assert "set-cookie" in rv.headers match = re.search(r"\bexpires=([^;]+)", rv.headers["set-cookie"]) assert match is None
def timegate(request, url): # impose an arbitrary length-limit on the submitted URL, so that the headers don't become illegally large url = url_with_qs_and_hash(url, request.META['QUERY_STRING'])[:500] data = memento_data_for_url(request, url) if not data: return HttpResponseNotFound('404 page not found\n') accept_datetime = request.META.get('HTTP_ACCEPT_DATETIME') if accept_datetime: accept_datetime = parse_date(accept_datetime) if not accept_datetime: return HttpResponseBadRequest('Invalid value for Accept-Datetime.') else: accept_datetime = timezone.now() accept_datetime = accept_datetime.replace(tzinfo=tzutc()) target, target_datetime = closest(map(lambda m: m.values(), data['mementos']['list']), accept_datetime) response = redirect(target) response['Vary'] = 'accept-datetime' response['Link'] = str( LinkHeader([ Rel(data['original_uri'], rel='original'), Rel(data['timegate_uri'], rel='timegate'), Rel(data['timemap_uri']['link_format'], rel='timemap', type='application/link-format'), Rel(data['timemap_uri']['json_format'], rel='timemap', type='application/json'), Rel(data['timemap_uri']['html_format'], rel='timemap', type='text/html'), Rel(data['mementos']['first']['uri'], rel='first memento', datetime=datetime_to_http_date(data['mementos']['first']['datetime'])), Rel(data['mementos']['last']['uri'], rel='last memento', datetime=datetime_to_http_date(data['mementos']['last']['datetime'])), Rel(target, rel='memento', datetime=datetime_to_http_date(target_datetime)), ]) ) return response
def get_surveys(): current_app.logger.info( f"/surveys (get_surveys) accessed by {g.request_user.email}") updated_since = request.args.get('updated_since', None) updated_since = http.parse_date(updated_since) archived = request.args.get('archived', 'none') # archived # all -> only archived surveys # none -> only non-archived surveys # any -> all surveys if archived and archived not in ['all', 'none', 'any']: msg = flask_babel.gettext( u"Unsupported archived filter : %(archived)s", archived=archived) return jsonify({'message': msg}), 400 survey_query = models.Survey.query if archived in ('all', 'none'): is_archived = True if archived == 'all' else False survey_query = survey_query.filter_by(is_archived=is_archived) if updated_since: survey_query = survey_query.filter( models.Survey.updated_at >= updated_since) survey_list = survey_query.all() surveys_return = [s.__getstate__() for s in survey_list] return jsonify(surveys_return), 200
def object_hook(obj): """Checks to see if the `__type`-hinting field is available in the object being de-serialized. If present, and the class referenced has a `from_json` function it will return the generated object, else a standard dic will be returned Args: obj: Object to be deserialized Returns: Deserialized object or regular python objec """ try: if '__type' in obj: obj_type = obj['__type'] cls = getattr(cloud_inquisitor.schema, obj_type) if hasattr(cls, 'from_json'): return cls.from_json(obj) key, value = next(iter(obj.items())) if key == ' t': return tuple(value) elif key == ' u': return uuid.UUID(value) elif key == ' b': return b64decode(value) elif key == ' m': return Markup(value) elif key == ' d': return parse_date(value) return obj except Exception: log.exception('Error during data deserialization')
def traffic_history(self): return [{ 'day': parse_date(entry.timestamp).weekday(), 'input': to_kib(entry.ingress), 'output': to_kib(entry.egress), 'throughput': to_kib(entry.ingress) + to_kib(entry.egress), } for entry in self.user_data.traffic_history]
def __inject_headers(self, response): current_limit = getattr(g, 'view_rate_limit', None) if self.enabled and self._headers_enabled and current_limit: window_stats = self.limiter.get_window_stats(*current_limit) reset_in = 1 + window_stats[0] response.headers.add(self._header_mapping[HEADERS.LIMIT], str(current_limit[0].amount)) response.headers.add(self._header_mapping[HEADERS.REMAINING], window_stats[1]) response.headers.add(self._header_mapping[HEADERS.RESET], reset_in) # response may have an existing retry after existing_retry_after_header = response.headers.get('Retry-After') if existing_retry_after_header is not None: # might be in http-date format retry_after = parse_date(existing_retry_after_header) # parse_date failure returns None if retry_after is None: retry_after = time.time() + int( existing_retry_after_header) if isinstance(retry_after, datetime.datetime): retry_after = time.mktime(retry_after.timetuple()) reset_in = max(retry_after, reset_in) # set the header instead of using add response.headers.set( self._header_mapping[HEADERS.RETRY_AFTER], self._retry_after == 'http-date' and http_date(reset_in) or int(reset_in - time.time())) return response
def test_session_expiration(self): permanent = True app = flask.Flask(__name__) app.secret_key = "testkey" @app.route("/") def index(): flask.session["test"] = 42 flask.session.permanent = permanent return "" @app.route("/test") def test(): return unicode(flask.session.permanent) client = app.test_client() rv = client.get("/") self.assert_("set-cookie" in rv.headers) match = re.search(r"\bexpires=([^;]+)", rv.headers["set-cookie"]) expires = parse_date(match.group()) expected = datetime.utcnow() + app.permanent_session_lifetime self.assert_equal(expires.year, expected.year) self.assert_equal(expires.month, expected.month) self.assert_equal(expires.day, expected.day) rv = client.get("/test") self.assert_equal(rv.data, "True") permanent = False rv = app.test_client().get("/") self.assert_("set-cookie" in rv.headers) match = re.search(r"\bexpires=([^;]+)", rv.headers["set-cookie"]) self.assert_(match is None)
def get_info(self, request, ident, base_uri): try: info, last_mod = self._get_info(ident, request, base_uri) except ResolverException as re: return NotFoundResponse(str(re)) except ImageInfoException as ie: return ServerSideErrorResponse(str(ie)) except IOError as e: msg = '%s \n(This is likely a permissions problem)' % e return ServerSideErrorResponse(msg) r = LorisResponse() r.set_acao(request, self.cors_regex) ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date(http_date(last_mod)) # see note under get_img if self.authorizer and self.authorizer.is_protected(info): authed = self.authorizer.is_authorized(info, request) if authed['status'] == 'deny': r.status_code = 401 # trash If-Mod-Since to ensure no 304 ims = None elif authed['status'] == 'redirect': r.status_code = 302 r.location = authed['location'] # Otherwise we're okay if ims and ims >= last_mod: self.logger.debug('Sent 304 for %s ', ident) r.status_code = 304 else: if last_mod: r.last_modified = last_mod callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_iiif_json()) else: if request.headers.get('accept') == 'application/ld+json': r.content_type = 'application/ld+json' else: r.content_type = 'application/json' l = '<http://iiif.io/api/image/2/context.json>;rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json"' r.headers['Link'] = '%s,%s' % (r.headers['Link'], l) r.data = info.to_iiif_json() return r
def send_avatar(user): if user.picture_source == ProfilePictureSource.standard: return send_default_avatar(user.full_name) metadata = user.picture_metadata return send_file('avatar.png', BytesIO(user.picture), mimetype=metadata['content_type'], inline=True, conditional=True, last_modified=parse_date(metadata['lastmod']), cache_timeout=(86400*7))
def check_modified(self, file_path, environ): if environ.get('HTTP_IF_MODIFIED_SINCE'): date1 = parse_date(environ['HTTP_IF_MODIFIED_SINCE']) date2 = datetime.datetime.utcfromtimestamp(os.path.getmtime(file_path)).replace(microsecond=0) if date1 != date2: try: os.utime(file_path, None) except: pass
def make_response(self) -> Response: use_default_range = True status_code = 200 # range requests are only allowed for get if request.method == 'GET': range_header = request.headers.get('Range') ranges = parse_range_header(range_header, self.__size) if not (len(ranges) == 1 and ranges[0][0] == 0 and ranges[0][1] == self.__size - 1): use_default_range = False status_code = 206 if range_header: if_range = request.headers.get('If-Range') if if_range and if_range != self.__etag: use_default_range = True status_code = 200 if use_default_range: ranges = [(0, self.__size - 1)] if len(ranges) > 1: abort(416) # We don't support multipart range requests yet if_unmod = request.headers.get('If-Unmodified-Since') if if_unmod: if_date = parse_date(if_unmod) if if_date and if_date < self.__last_modified: status_code = 304 # TODO If-None-Match support if status_code != 304: resp = Response(self.__generate(ranges, self.__data)) else: resp = Response() if not use_default_range: etag = self.make_etag( BytesIO((self.__etag + str(ranges)).encode('utf-8'))) else: etag = self.__etag resp.headers['Content-Length'] = ranges[0][1] - ranges[0][0] + 1 resp.headers['Accept-Ranges'] = 'bytes' resp.headers['ETag'] = etag resp.headers['Last-Modified'] = http_date(self.__last_modified) if status_code == 206: resp.headers['Content-Range'] = \ 'bytes {}-{}/{}'.format(ranges[0][0], ranges[0][1], self.__size) resp.status_code = status_code return resp
def get_ff_cache(profile_dir, store_body=False): cache_dir = os.path.join(profile_dir, "Cache") if not os.path.isdir(cache_dir): return [] # Firefox updated the cache dir structure since our study cache_map = os.path.join(cache_dir, "_CACHE_MAP_") cache_dump = os.path.join(BASE_TMP_DIR, append_timestamp("cache") + rand_str()) create_dir(cache_dump) subprocess.call( [PERL_PATH, CACHE_PERL_SCRIPT, cache_map, "--recover=" + cache_dump]) cache_items = [] db_items = ("Etag", "Request String", "Expires", "Cache-Control") for fname in glob(os.path.join(cache_dump, "*_metadata")): item = {} try: with open(fname) as f: metadata = f.read() item = parse_metadata(metadata) for db_item in db_items: if db_item not in item: item[db_item] = "" # If a response includes both an Expires header and a max-age # directive, the max-age directive overrides the Expires header # (http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html) expiry_delta_sec = 0 if "Expires" in item: # parse expiry date expiry = parse_date(item["Expires"]) if expiry: expiry_delta = expiry - datetime.now() expiry_delta_sec = expiry_delta.total_seconds() if "Cache-Control:" in item: # parse max-age directive cache_directives =\ parse_cache_control_header(item["Cache-Control"], cls=ResponseCacheControl) if "max-age" in cache_directives: expiry_delta_sec = cache_directives["max-age"] if expiry_delta_sec < DELTA_MONTH: continue item["Expiry-Delta"] = expiry_delta_sec with open(fname[:-9]) as f: data = f.read() item["Body"] = data if store_body else "" # store as BLOB item["Hash"] = hash_text(base64.b64encode(data)) except IOError as exc: print "Error processing cache: %s: %s" % (exc, traceback.format_exc()) cache_items.append(item) if os.path.isdir(cache_dump): shutil.rmtree(cache_dump) return cache_items
def get_deleted(): current_app.logger.info(f"/deleted accessed {g.request_user.email}") deleted_since = request.args.get('deleted_since', None) deleted_since = http.parse_date(deleted_since) deleted_objects = { "surveys": [], "responses": [], "case_definitions": [], "cases": [] } endpoints = [{ 'name': 'surveys', 'model': models.Survey, }, { 'name': 'responses', 'model': models.SurveyResponse, }, { 'name': 'case_definitions', 'model': models.CaseDefinition, }, { 'name': 'cases', 'model': models.Case, }, { 'name': 'activities', 'model': models.Activity }, { 'name': 'activity_definitions', 'model': models.ActivityDefinition }] def gen_item(item): return { 'id': item.id, 'deleted_at': item.transaction.issued_at.isoformat() } for e in endpoints: model_version_class = version_class(e['model']) query = app.db.session.query(model_version_class) query = query.filter_by(operation_type=Operation.DELETE) deleted = query.all() if deleted_since: deleted = [ i for i in deleted if i.transaction.issued_at >= deleted_since ] deleted_objects[e['name']] = [gen_item(d) for d in deleted] return jsonify(deleted_objects), 200
def __init__(self): self.conversions = [{ 'check': lambda value: self._is_dict_with_used_key(value), 'tag': lambda value: self._tag_dict_used_with_key(value), 'untag': lambda value: self._untag_dict_used_with_key(value), 'key': ' di', }, { 'check': lambda value: isinstance(value, tuple), 'tag': lambda value: [self._tag(x) for x in value], 'untag': lambda value: tuple(value), 'key': ' t', }, { 'check': lambda value: isinstance(value, uuid.UUID), 'tag': lambda value: value.hex, 'untag': lambda value: uuid.UUID(value), 'key': ' u', }, { 'check': lambda value: isinstance(value, bytes), 'tag': lambda value: b64encode(value).decode('ascii'), 'untag': lambda value: b64decode(value), 'key': ' b', }, { 'check': lambda value: callable(getattr(value, '__html__', None)), 'tag': lambda value: text_type(value.__html__()), 'untag': lambda value: Markup(value), 'key': ' m', }, { 'check': lambda value: isinstance(value, list), 'tag': lambda value: [self._tag(x) for x in value], }, { 'check': lambda value: isinstance(value, datetime), 'tag': lambda value: http_date(value), 'untag': lambda value: parse_date(value), 'key': ' d', }, { 'check': lambda value: isinstance(value, dict), 'tag': lambda value: dict((k, self._tag(v)) for k, v in iteritems(value)), }, { 'check': lambda value: isinstance(value, str), 'tag': lambda value: self._tag_string(value), }]
def get_ff_cache(profile_dir, store_body=False): cache_dir = os.path.join(profile_dir, "Cache") if not os.path.isdir(cache_dir): return [] # Firefox updated the cache dir structure since our study cache_map = os.path.join(cache_dir, "_CACHE_MAP_") cache_dump = os.path.join(BASE_TMP_DIR, append_timestamp("cache") + rand_str()) create_dir(cache_dump) subprocess.call([PERL_PATH, CACHE_PERL_SCRIPT, cache_map, "--recover=" + cache_dump]) cache_items = [] db_items = ("Etag", "Request String", "Expires", "Cache-Control") for fname in glob(os.path.join(cache_dump, "*_metadata")): item = {} try: with open(fname) as f: metadata = f.read() item = parse_metadata(metadata) for db_item in db_items: if db_item not in item: item[db_item] = "" # If a response includes both an Expires header and a max-age # directive, the max-age directive overrides the Expires header # (http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html) expiry_delta_sec = 0 if "Expires" in item: # parse expiry date expiry = parse_date(item["Expires"]) if expiry: expiry_delta = expiry - datetime.now() expiry_delta_sec = expiry_delta.total_seconds() if "Cache-Control:" in item: # parse max-age directive cache_directives =\ parse_cache_control_header(item["Cache-Control"], cls=ResponseCacheControl) if "max-age" in cache_directives: expiry_delta_sec = cache_directives["max-age"] if expiry_delta_sec < DELTA_MONTH: continue item["Expiry-Delta"] = expiry_delta_sec with open(fname[:-9]) as f: data = f.read() item["Body"] = data if store_body else "" # store as BLOB item["Hash"] = hash_text(base64.b64encode(data)) except IOError as exc: print "Error processing cache: %s: %s" % (exc, traceback.format_exc()) cache_items.append(item) if os.path.isdir(cache_dump): shutil.rmtree(cache_dump) return cache_items
def object_hook(obj): if len(obj) != 1: return obj the_key, the_value = obj.iteritems().next() if the_key == ' t': return tuple(the_value) elif the_key == ' m': return Markup(the_value) elif the_key == ' d': return parse_date(the_value) return obj
def test_session_permanent(client): """test a session expiration""" client.application.config.secret_key = "foobar" resp = client.get("/session?permanent=1") assert 'set-cookie' in resp.headers match = re.search(r'\bexpires=([^;]+)', resp.headers['set-cookie']) expires = parse_date(match.group()) expected = datetime.datetime.utcnow() + client.application.config.permanent_session_lifetime assert expires.year == expected.year assert expires.month == expected.month assert expires.day == expected.day
def get_info(self, request, ident, base_uri): r = LorisResponse() try: info, last_mod = self._get_info(ident, request, base_uri) except ResolverException as re: return NotFoundResponse(re.message) except ImageInfoException as ie: return ServerSideErrorResponse(ie.message) except IOError as e: # 500 msg = '%s \n(This is likely a permissions problem)' % (str(e), ) return ServerSideErrorResponse(msg) else: ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date( http_date(last_mod)) # see note under get_img if ims and ims >= last_mod: logger.debug('Sent 304 for %s ' % (ident, )) r.status_code = 304 else: if last_mod: r.last_modified = last_mod # r.automatically_set_content_length callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_json()) else: if request.headers.get('accept') == 'application/ld+json': r.content_type = 'application/ld+json' else: r.content_type = 'application/json' l = '<http://iiif.io/api/image/2/context.json>;rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json"' r.headers['Link'] = '%s,%s' % (r.headers['Link'], l) r.data = info.to_json() finally: return r
def __call__(self, environ, start_response): """Respond to a request when called in the usual WSGI way.""" if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'): headers = [('Allow', 'GET, HEAD')] return self.method_not_allowed(environ, start_response, headers) path_info = environ.get('PATH_INFO', '') full_path = self._full_path(path_info) if not self._is_under_root(full_path): return self.not_found(environ, start_response) if path.isdir(full_path): if full_path[-1] != '/' or full_path == self.root: location = util.request_uri(environ, include_query=False) + '/' if environ.get('QUERY_STRING'): location += '?' + environ.get('QUERY_STRING') headers = [('Location', location)] return self.moved_permanently(environ, start_response, headers) else: full_path = self._full_path(path_info + self.index_file) content_type = self._guess_type(full_path) try: etag, last_modified = self._conditions(full_path, environ) headers = [('Date', http_date(time.time())), ('Last-Modified', last_modified), ('ETag', etag)] if_modified = environ.get('HTTP_IF_MODIFIED_SINCE') if if_modified and (parse_date(if_modified) >= parse_date(last_modified)): return self.not_modified(environ, start_response, headers) if_none = environ.get('HTTP_IF_NONE_MATCH') if if_none and (if_none == '*' or etag in if_none): return self.not_modified(environ, start_response, headers) file_like = self._file_like(full_path) headers.append(('Content-Type', content_type)) start_response("200 OK", headers) if environ['REQUEST_METHOD'] == 'GET': return self._body(full_path, environ, file_like) else: return [''] except (IOError, OSError) as e: print(e) return self.not_found(environ, start_response)
def test_session_permanent(client): """test a session expiration""" client.application.config.secret_key = "foobar" resp = client.get("/session?permanent=1") assert 'set-cookie' in resp.headers match = re.search(r'\bexpires=([^;]+)', resp.headers['set-cookie']) expires = parse_date(match.group()) expected = datetime.datetime.utcnow( ) + client.application.config.permanent_session_lifetime assert expires.year == expected.year assert expires.month == expected.month assert expires.day == expected.day
def get_info(self, request, ident, base_uri): r = LorisResponse() try: info, last_mod = self._get_info(ident,request,base_uri) except ResolverException as re: return NotFoundResponse(re.message) except ImageInfoException as ie: return ServerSideErrorResponse(ie.message) except IOError as e: # 500 msg = '%s \n(This is likely a permissions problem)' % (str(e),) return ServerSideErrorResponse(msg) else: ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date(http_date(last_mod)) # see note under get_img if ims and ims >= last_mod: logger.debug('Sent 304 for %s ' % (ident,)) r.status_code = 304 else: if last_mod: r.last_modified = last_mod # r.automatically_set_content_length callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_json()) else: if request.headers.get('accept') == 'application/ld+json': r.content_type = 'application/ld+json' else: r.content_type = 'application/json' l = '<http://iiif.io/api/image/2/context.json>;rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json"' r.headers['Link'] = '%s,%s' % (r.headers['Link'], l) r.data = info.to_json() finally: return r
def __inject_headers(self, response): self.__check_conditional_deductions(response) current_limit = getattr(g, 'view_rate_limit', None) if self.enabled and self._headers_enabled and current_limit: try: window_stats = self.limiter.get_window_stats(*current_limit) reset_in = 1 + window_stats[0] response.headers.add(self._header_mapping[HEADERS.LIMIT], str(current_limit[0].amount)) response.headers.add(self._header_mapping[HEADERS.REMAINING], window_stats[1]) response.headers.add(self._header_mapping[HEADERS.RESET], reset_in) # response may have an existing retry after existing_retry_after_header = response.headers.get( 'Retry-After') if existing_retry_after_header is not None: # might be in http-date format retry_after = parse_date(existing_retry_after_header) # parse_date failure returns None if retry_after is None: retry_after = time.time() + int( existing_retry_after_header) if isinstance(retry_after, datetime.datetime): retry_after = time.mktime(retry_after.timetuple()) reset_in = max(retry_after, reset_in) # set the header instead of using add response.headers.set( self._header_mapping[HEADERS.RETRY_AFTER], self._retry_after == 'http-date' and http_date(reset_in) or int(reset_in - time.time())) except: # noqa: E722 if self._in_memory_fallback_enabled and not self._storage_dead: self.logger.warning( "Rate limit storage unreachable - falling back to" " in-memory storage") self._storage_dead = True response = self.__inject_headers(response) else: if self._swallow_errors: self.logger.exception( "Failed to update rate limit headers. " "Swallowing error") else: six.reraise(*sys.exc_info()) return response
def get_info(self, request, ident): r = LorisResponse() if self.enable_cors: if self.cors_whitelist[0] == "*": r.headers['Access-Control-Allow-Origin'] = "*" elif request.headers.get("origin") in self.cors_whitelist: r.headers['Access-Control-Allow-Origin'] = request.headers.get('origin') try: info, last_mod = self._get_info(ident,request) except (ImageInfoException,resolver.ResolverException) as e: r.response = e r.status_code = e.http_status r.mimetype = 'text/plain' else: ims_hdr = request.headers.get('If-Modified-Since') ims = parse_date(ims_hdr) last_mod = parse_date(http_date(last_mod)) # see note under get_img if ims and ims >= last_mod: logger.debug('Sent 304 for %s ' % (ident,)) r.status_code = 304 else: if last_mod: r.last_modified = last_mod r.automatically_set_content_length # r.headers['Cache-control'] = 'public' callback = request.args.get('callback', None) if callback: r.mimetype = 'application/javascript' r.data = '%s(%s);' % (callback, info.to_json()) else: r.content_type = 'application/json' r.data = info.to_json() finally: return r
def object_hook(obj): if len(obj) != 1: return obj the_key, the_value = next(iteritems(obj)) if the_key == ' t': return tuple(the_value) elif the_key == ' u': return uuid.UUID(the_value) elif the_key == ' m': return Markup(the_value) elif the_key == ' d': return parse_date(the_value) return obj
def retry_after(self) -> t.Optional[datetime]: """The Retry-After response-header field can be used with a 503 (Service Unavailable) response to indicate how long the service is expected to be unavailable to the requesting client. Time in seconds until expiration or date. """ value = self.headers.get("retry-after") if value is None: return None elif value.isdigit(): return datetime.utcnow() + timedelta(seconds=int(value)) return parse_date(value)
def test_setting_headers(self): """ Making a request for pixel.gif should set the Set-Cookie, P3P, X-Uri-Query, and X-User-Agent headers """ response = self.client.get('/pixel.gif', environ_base={'HTTP_USER_AGENT': 'User Agent'}) # parse_cookie discards the expires portion, which we need to check; # split the cookie manually aguid_cookie = self.get_cookie(response, 'aguid').split('; ') aguid, domain, expires, path = aguid_cookie # aguid portion should be in the form "aguid=uuid"; # Split on '=' and parse the second part as a uuid # to check if it is valid uuid.UUID(aguid.split('=')[1]) self.assertTrue(domain.endswith('=.localhost')) # These two datetimes are not guaranteed to be exactly equal; # if we check the date portion (year, month, day), we can be # reasonably certain that the expiration is set correctly expiration = parse_date(expires.split('=')[1]) expected_expiration = datetime.utcnow() + timedelta(days=365) self.assertEqual(expiration.date(), expected_expiration.date()) self.assertEqual(response.headers['P3P'], 'CP="ALL DSP COR CURa IND PHY UNR"') # Ensure header exists self.assertEqual(response.headers['X-User-Agent'], 'User+Agent') self.assertTrue(aguid in response.headers['X-Uri-Query']) # If myguid is not provided or is invalid, we should not include # it in the header for myguid in ['', 'invalid_myguid']: self.client.set_cookie('localhost', 'myguid', myguid) response = self.client.get('/pixel.gif') self.assertFalse('myguid=' in response.headers['X-Uri-Query']) myguid = '1234567890abcdef' * 2 self.client.set_cookie('localhost', 'myguid', myguid) response = self.client.get('/pixel.gif') self.assertTrue('='.join(['myguid',myguid]) in response.headers['X-Uri-Query'])
def object_hook(obj): if (len(obj) != 1): return obj the_key, the_value = next(obj.iteritems()) if the_key == 't': return str(tuple(the_value)) elif the_key == 'u': return str(uuid.UUID(the_value)) elif the_key == 'b': return str(b64decode(the_value)) elif the_key == 'm': return str(Markup(the_value)) elif the_key == 'd': return str(parse_date(the_value)) return obj
def wait_retry_after_header(retry_state): exc = retry_state.outcome.exception() if exc is None or not getattr(exc, "response", None): return 0 value = exc.response.headers.get("retry-after") if value is None: return 0 elif value.isdigit(): return int(value) d = parse_date(value) if d is None: return 0 return max(0, (d - datetime.datetime.utcnow()).total_seconds())
def sync_tweets(developer): """Finds new tweets for the given developer""" logger.info('Checking tweets of @%s', developer.twitter_name) tweets = get_tweets(developer.twitter_name) for tweet in tweets: hidden = bool(tweet.get('in_reply_to_user_id')) msg = Message.query.filter_by( source='twitter', reference_id=tweet['id_str']).first() if msg is not None: continue logger.info('Found new tweet #%s' % tweet['id_str']) msg = Message(developer, tweet['text'], 'twitter', parse_date(tweet['created_at']), tweet['id_str'], hidden) db.session.add(msg)
def object_hook(obj): if len(obj) != 1: return obj the_key, the_value = next(iteritems(obj)) if the_key == " t": return tuple(the_value) elif the_key == " u": return uuid.UUID(the_value) elif the_key == " b": return b64decode(the_value) elif the_key == " m": return Markup(the_value) elif the_key == " d": return parse_date(the_value) return obj
def __call__(self, retry_state: tenacity.RetryCallState) -> float: if retry_state.outcome is None: return 0 exc = retry_state.outcome.exception() if exc is None or not isinstance(exc, HTTPStatusError): return 0 value = exc.response.headers.get("retry-after") if value is None: return 0 elif value.isdigit(): return int(value) d = parse_date(value) if d is None: return 0 return max(0, (d - date.utcnow()).total_seconds())
def hook(obj): if len(obj) != 1: return obj key, val = next(iter(obj.items())) if key == ' t': return tuple(val) elif key == ' u': return UUID(val) elif key == ' b': return b64decode(val) elif key == ' m': return Markup(val) elif key == ' d': return parse_date(val) return obj
def get_range_and_status_code(cls, dl_size, etag, last_modified): use_default_range = True status_code = 200 range_header = request.headers.get("Range") # range requests are only allowed for get if request.method == "GET": ranges = parse_range_header(range_header, dl_size) if not (len(ranges) == 1 and ranges[0][0] == 0 and ranges[0][1] == dl_size - 1): use_default_range = False status_code = 206 if range_header: if_range = request.headers.get("If-Range") if if_range and if_range != etag: use_default_range = True status_code = 200 if use_default_range: ranges = [(0, dl_size - 1)] if len(ranges) > 1: abort(416) # We don't support multipart range requests yet range_ = ranges[0] etag_header = request.headers.get("ETag") if etag_header is not None and etag_header != etag: abort(412) if_unmod = request.headers.get("If-Unmodified-Since") if if_unmod: if_date = parse_date(if_unmod) if if_date and not if_date.tzinfo: if_date = if_date.replace( tzinfo=timezone.utc) # Compatible with Flask < 2.0.0 if if_date and if_date > last_modified: abort(412) elif range_header is None: status_code = 304 return range_, status_code
def test_record_mementos(app, db): """Test resolution of record mementos.""" modifications = [(datetime.now() + timedelta(days=-1), 'test')] pid, record = create_record({'title': 'test'}) db.session.commit() assert len(record.revisions) == 1 modifications.append((record.model.updated, 'test')) sleep(1) record.update({'title': 'test1'}) record.commit() db.session.commit() assert len(record.revisions) == 2 modifications.append((record.model.updated, 'test1')) modifications.append((datetime.now() + timedelta(days=1), 'test1')) headers = [('Accept', 'application/json')] with app.test_client() as client: # Normal request res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid.pid_value), headers=headers) assert res.status_code == 200 assert 'Memento-Datetime' not in res.headers for i, (accept_datetime, title) in enumerate(modifications): res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid.pid_value), headers=headers + [ ('Accept-Datetime', http_date(accept_datetime)) ], ) assert res.status_code == 200, i assert parse_date(res.headers['Memento-Datetime']), i assert title == json.loads( res.data )['metadata']['title'], i
def timegate(self, uri_r): """Handle timegate high-level logic. Fetch the Memento for the requested URI at the requested date time. Returns a HTTP 302 response if it exists. If the resource handler allows batch requests, then the result may be cached. :return: The body of the HTTP response. """ if 'Accept-Datetime' in request.headers: accept_datetime = parse_date( request.headers['Accept-Datetime'] ).replace(tzinfo=tzutc()) else: accept_datetime = datetime.utcnow().replace(tzinfo=tzutc()) # Runs the handler's API request for the Memento mementos = first = last = None HAS_TIMEMAP = hasattr(self.handler, 'get_all_mementos') if HAS_TIMEMAP and self.config['USE_TIMEMAPS']: logging.debug('Using multiple-request mode.') mementos = self.get_all_mementos(uri_r) if mementos: first = mementos[0] last = mementos[-1] memento = best(mementos, accept_datetime, self.config['RESOURCE_TYPE']) else: logging.debug('Using single-request mode.') memento = self.get_memento(uri_r, accept_datetime) # If the handler returned several Mementos, take the closest return memento_response( memento, uri_r, first, last, has_timemap=HAS_TIMEMAP and self.config['USE_TIMEMAPS'], )
def object_hook(obj): if len(obj) != 1: return obj the_key, the_value = next(iteritems(obj)) if the_key == " t": return tuple(the_value) elif the_key == " u": return uuid.UUID(the_value) elif the_key == " b": return b64decode(the_value) elif the_key == " m": return Markup(the_value) elif the_key == " d": return parse_date(the_value) elif the_key == " ch": c = CheckerSessionObject() c.update(the_value) return c elif the_key == " mt": m = MeansTest() m.update(the_value) return m return obj
def get_record(cls, *args, **kwargs): """Return record object and enhance response.""" record = super(MementoRecord, cls).get_record(*args, **kwargs) if request.method in ('GET', 'HEAD'): if 'Accept-Datetime' in request.headers: memento = get_memento(record, parse_date( request.headers['Accept-Datetime'] )) @after_this_request def memento_datetime(response): """Add Memento-Datetime header.""" response.headers['Memento-Datetime'] = http_date( memento.updated ) response.headers['Vary'] = 'accept-datetime, accept' response.headers['Link'] = LinkHeader([ Link(request.url, rel='original timegate') ]) return response return memento return record
def to_python(self, value): return parse_date(value)
def __call__(self, url, cache_info={}): esc=self.escape yield self.m('Starting the validation process...') r=None try: yield self.m('* Attempting to retreive %s'%self.bold(url)) headers={'User-Agent': 'FFDN DB validator'} if cache_info.get('etag'): headers['If-None-Match'] = cache_info['etag'] if cache_info.get('last-modified'): headers['If-Modified-Since'] = cache_info['last-modified'] r=requests.get(url, verify='/etc/ssl/certs/ca-certificates.crt', headers=headers, stream=True, timeout=10) except requests.exceptions.SSLError as e: yield self.err('Unable to connect, SSL Error: '+self.color('#dd1144', esc(e))) except requests.exceptions.ConnectionError as e: yield self.err('Unable to connect: '+self.color('#dd1144', esc(e))) except requests.exceptions.Timeout as e: yield self.err('Connection timeout') except requests.exceptions.TooManyRedirects as e: yield self.err('Too many redirects') except requests.exceptions.RequestException as e: yield self.err('Internal request exception') except Exception as e: # Unexpected exception: abort the validation, then re-raise it # so that it's logged. tb = sys.exc_info()[2] yield self.abort('Unexpected request exception') raise e, None, tb if r is None: yield self.abort('Connection could not be established, aborting') return yield self.info('Connection established') yield self.info('Response code: '+self.bold(str(r.status_code)+' '+esc(r.reason))) try: r.raise_for_status() except requests.exceptions.HTTPError as e: yield self.err('Response code indicates an error') yield self.abort('Invalid response code') return _cachecontrol=r.headers.get('cache-control') cachecontrol=self.parse_cache_control(_cachecontrol) if _cachecontrol else None max_age=None if cachecontrol: try: _maxage=cachecontrol.get('max-age') _maxage=cachecontrol.get('s-maxage', _maxage) # s-maxage takes precedence max_age=int(_maxage) except ValueError: yield self.warn('Invalid max-age '+esc(_maxage)) yield self.info('Cache control: '+self.bold(esc( ', '.join([k+'='+v if type(v) != bool else k for k, v in cachecontrol.iteritems()])) )) _expires=r.headers.get('expires') expires=parse_date(_expires) if expires: _now=r.headers.get('date') if _now: # use server date when possible now=parse_date(_now) else: now=datetime.utcnow() if expires > now: expires=(expires-now).total_seconds() yield self.info('Expires: '+self.bold(esc(_expires))) else: yield self.warn('Invalid Expires header. Expiry date must be in the future.') expires=None elif _expires and not expires: yield self.warn('Invalid Expires header %r'%esc(_expires)) if not max_age and not expires: yield self.warn('No valid expiration time provided ! Please provide it either ' 'with a Cache-Control or Expires header.') max_age=self.config('DEFAULT_CACHE_TIME') yield self.info('Using default expiration time of %d seconds'%(max_age)) self.jdict_max_age = max_age if max_age else expires self.jdict_max_age = min( self.config('MAX_CACHE_TIME'), max(self.config('MIN_CACHE_TIME'), self.jdict_max_age) ) yield self.info('Next update will be in %s'%(timedelta(seconds=self.jdict_max_age))) etag=r.headers.get('etag') last_modified=r.headers.get('last-modified') if not etag and not last_modified: yield self.warn('Please, provide at an ETag or Last-Modified header for ' 'conditional requests') self.cache_info={} if etag: self.cache_info['etag']=etag if last_modified: self.cache_info['last-modified']=last_modified if cache_info and r.status_code == 304: # not modified self.m('== '+self.color('forestgreen', 'Response not modified. All good !')) self.modified=False self.success=True self.done_cb() return yield self.info('Content type: '+self.bold(esc(r.headers.get('content-type', 'not defined')))) if not r.headers.get('content-type'): yield self.err('Content-type '+self.bold('MUST')+' be defined') yield self.abort('The file must have a proper content-type to continue') return elif r.headers.get('content-type').lower() != 'application/json': yield self.warn('Content-type '+self.italics('SHOULD')+' be application/json') encoding=get_encoding(r.headers.get('content-type')) if not encoding: yield self.warn('Encoding not set. Assuming it\'s unicode, as per RFC4627 section 3') yield self.info('Content length: %s'%(self.bold(esc(r.headers.get('content-length', 'not set'))))) cl=r.headers.get('content-length') if not cl: yield self.warn('No content-length. Note that we will not process a file whose size exceed %s' % (filesize_fmt(self.MAX_JSON_SIZE))) elif int(cl) > self.MAX_JSON_SIZE: yield self.abort('File too big ! File size must be less then %s' % (filesize_fmt(self.MAX_JSON_SIZE))) return yield self.info('Reading response into memory...') b=io.BytesIO() for d in r.iter_content(requests.models.CONTENT_CHUNK_SIZE): b.write(d) if b.tell() > self.MAX_JSON_SIZE: yield self.abort('File too big ! File size must be less then %s' % (filesize_fmt(self.MAX_JSON_SIZE))) return r._content=b.getvalue() del b yield self.info('Successfully read %d bytes'%len(r.content)) yield self.nl()+self.m('* Parsing the JSON file') if not encoding: charset=requests.utils.guess_json_utf(r.content) if not charset: yield self.err('Unable to guess unicode charset') yield self.abort('The file MUST be unicode-encoded when no explicit charset is in the content-type') return yield self.info('Guessed charset: '+self.bold(charset)) try: txt=r.content.decode(encoding or charset) yield self.info('Successfully decoded file as %s'%esc(encoding or charset)) except LookupError as e: yield self.err('Invalid/unknown charset: %s'%esc(e)) yield self.abort('Charset error, Cannot continue') return except UnicodeDecodeError as e: yield self.err('Unicode decode error: %s'%e) yield self.abort('Charset error, cannot continue') return except Exception: yield self.abort('Unexpected charset error') return jdict=None try: jdict=json.loads(txt) except ValueError as e: yield self.err('Error while parsing JSON: %s'%esc(e)) except Exception as e: yield self.err('Unexpected error while parsing JSON: %s'%esc(e)) if not jdict: yield self.abort('Could not parse JSON') return yield self.info('JSON parsed successfully') yield self.nl()+self.m('* Validating the JSON against the schema') v=list(validate_isp(jdict)) if v: yield self.err('Validation errors:')+self.format_validation_errors(v) yield self.abort('Your JSON file does not follow the schema, please fix it') return else: yield self.info('Done. No errors encountered \o') for ca in jdict.get('coveredAreas', []): if not 'area' in ca: continue if not check_geojson_spatialite(ca['area']): yield self.err('GeoJSON data for covered area "%s" cannot ' 'be handled by our database'%esc(ca['name'])) yield self.abort('Please fix your GeoJSON') return ret=self.pre_done_cb(jdict) if ret: yield ret return yield (self.nl()+self.m('== '+self.color('forestgreen', 'All good ! You can click on Confirm now'))+ self.m(json.dumps({'passed': 1}), 'control')) self.jdict=jdict self.success=True self.done_cb()
def if_unmodified_since(self): """The parsed `If-Unmodified-Since` header as datetime object.""" return parse_date(self.environ.get('HTTP_IF_UNMODIFIED_SINCE'))
T_CO_RE = re.compile(r'http://t.co/[^ ]*') BIT_LY_RE = re.compile(r'http://bit.ly/[^ ]*') last_feed = Feed.query.order_by(Feed.id.desc()).first() if last_feed: ZJA_TIMELINE_URL += '&since_id=' + str(last_feed.tid) r = requests.get(ZJA_TIMELINE_URL) if r.status_code == 200: for tweet in reversed(json.loads(r.content)): status = tweet['text'] if not (u"空气质量" in status and u"浓度" in status and u"等级" in status): continue f = Feed(tweet['id'], status, parse_date(tweet['created_at'])) db.session.add(f) db.session.commit() bind = Bind.query.filter_by(to='weibo').first() oauth_hook = OAuthHook(bind.token, bind.secret, header_auth=True) client = requests.session(hooks={'pre_request': oauth_hook}) expander = requests.session() for feed in Feed.query.filter_by(synced=False).order_by(Feed.id.asc()).all(): status = feed.text status = T_CO_RE.sub(lambda match: expander.get(match.group(), allow_redirects=False).headers['location'], status) status = BIT_LY_RE.sub(lambda match: expander.get(match.group(), allow_redirects=False).headers['location'], status) r = client.post(STATUSES_UPDATE_URL, data={"status": status}) if r.status_code != 200: print feed.tid, r.content
def get_img(self, request, ident, region, size, rotation, quality, target_fmt=None): '''Get an Image. Args: request (Request): Forwarded by dispatch_request ident (str): The identifier portion of the IIIF URI syntax ''' r = LorisResponse() if target_fmt == None: target_fmt = self._format_from_request(request) logger.debug('target_fmt: %s' % (target_fmt,)) if self.redirect_conneg: logger.debug(ident) image_request = img.ImageRequest(ident, region, size, rotation, quality, target_fmt) logger.debug('Attempting redirect to %s' % (image_request.request_path,)) r.headers['Location'] = image_request.request_path r.status_code = 301 return r # no reason for an else because the spec dictates that we should use a # default format, which _format_from_request will return if one can't be # discerned from the Accept header. # ImageRequest's Parameter attributes, i.e. RegionParameter etc. are # decorated with @property and not constructed until they are first # accessed, which mean we don't have to catch any exceptions here. image_request = img.ImageRequest(ident, region, size, rotation, quality, target_fmt) logger.debug(image_request.request_path) if self.enable_caching: in_cache = image_request in self.img_cache else: in_cache = False if in_cache: fp = self.img_cache[image_request] ims_hdr = request.headers.get('If-Modified-Since') img_last_mod = datetime.utcfromtimestamp(path.getmtime(fp)) # The stamp from the FS needs to be rounded using the same precision # as when went sent it, so for an accurate comparison turn it into # an http date and then parse it again :-( : img_last_mod = parse_date(http_date(img_last_mod)) logger.debug("Time from FS (native, rounded): " + str(img_last_mod)) logger.debug("Time from IMS Header (parsed): " + str(parse_date(ims_hdr))) # ims_hdr = parse_date(ims_hdr) # catch parsing errors? if ims_hdr and parse_date(ims_hdr) >= img_last_mod: logger.debug('Sent 304 for %s ' % (fp,)) r.status_code = 304 return r else: r.content_type = constants.FORMATS_BY_EXTENSION[target_fmt] r.status_code = 200 r.last_modified = img_last_mod r.headers['Content-Length'] = path.getsize(fp) r.response = file(fp) return r else: try: # 1. resolve the identifier src_fp, src_format = self.resolver.resolve(ident) # 2 hand the Image object its info info = self._get_info(ident, request, src_fp, src_format)[0] image_request.info = info # 3. Redirect if appropriate if self.redirect_cannonical_image_request: if not image_request.is_cannonical: logger.debug('Attempting redirect to %s' % (image_request.c14n_request_path,)) r.headers['Location'] = image_request.c14n_request_path r.status_code = 301 return r # 4. Make an image fp = self._make_image(image_request, src_fp, src_format) except (resolver.ResolverException, ImageInfoException, img.ImageException, RegionSyntaxException, RegionRequestException, SizeSyntaxException, SizeRequestException, RotationSyntaxException) as e: r.response = e r.status_code = e.http_status r.mimetype = 'text/plain' return r r.content_type = constants.FORMATS_BY_EXTENSION[target_fmt] r.status_code = 200 r.last_modified = datetime.utcfromtimestamp(path.getctime(fp)) r.headers['Content-Length'] = path.getsize(fp) r.response = file(fp) if not self.enable_caching: r.call_on_close(unlink(fp)) return r