def post(self): c.request_timer.intermediate("action") # if the action raised an HTTPException (i.e. it aborted) then pylons # will have replaced response with the exception itself. c.is_exception_response = getattr(response, "_exception", False) if c.response_wrapper and not c.is_exception_response: content = flatten_response(response.content) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" #set content cache if (g.page_cache_time and request.method.upper() == 'GET' and (not c.user_is_loggedin or c.allow_loggedin_cache) and not c.used_cache and response.status_int != 429 and not response.status.startswith("5") and not c.is_exception_response): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning("Ignored exception (%r) on pagecache " "write for %r", e, request.path) # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) if self.should_update_last_visit(): c.user.update_last_visit(c.start_time) hooks.get_hook("reddit.request.end").call() # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() c.request_timer.intermediate("post") # push data to statsd c.request_timer.stop() g.stats.flush()
def post(self): c.request_timer.intermediate("action") if c.response_wrapper: content = "".join(_force_utf8(x) for x in tup(response.content) if x) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" #set content cache if (g.page_cache_time and request.method.upper() == 'GET' and (not c.user_is_loggedin or c.allow_loggedin_cache) and not c.used_cache and response.status_code not in (429, 503)): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning("Ignored exception (%r) on pagecache " "write for %r", e, request.path) # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) end_time = datetime.now(g.tz) # update last_visit if (c.user_is_loggedin and not g.disallow_db_writes and request.method.upper() != "POST" and not c.dont_update_last_visit and request.path != '/validuser'): c.user.update_last_visit(c.start_time) check_request(end_time) # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() # push data to statsd c.request_timer.stop() g.stats.flush()
def post(self): c.request_timer.intermediate("action") # if the action raised an HTTPException (i.e. it aborted) then pylons # will have replaced response with the exception itself. c.is_exception_response = getattr(response, "_exception", False) if c.response_wrapper and not c.is_exception_response: content = flatten_response(response.content) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" # save the result of this page to the pagecache if possible. we # mustn't cache things that rely on state not tracked by request_key # such as If-Modified-Since headers for 304s or requesting IP for 429s. if (g.page_cache_time and request.method.upper() == 'GET' and c.can_use_pagecache and not c.used_cache and response.status_int not in (304, 429) and not response.status.startswith("5") and not c.is_exception_response): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning("Ignored exception (%r) on pagecache " "write for %r", e, request.path) pragmas = [p.strip() for p in request.headers.get("Pragma", "").split(",")] if g.debug or "x-reddit-pagecache" in pragmas: if c.can_use_pagecache: pagecache_state = "hit" if c.used_cache else "miss" else: pagecache_state = "disallowed" response.headers["X-Reddit-Pagecache"] = pagecache_state # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) if self.should_update_last_visit(): c.user.update_last_visit(c.start_time) hooks.get_hook("reddit.request.end").call() # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() c.request_timer.intermediate("post") # push data to statsd c.request_timer.stop() g.stats.flush()
def post(self): c.request_timer.intermediate("action") # if the action raised an HTTPException (i.e. it aborted) then pylons # will have replaced response with the exception itself. c.is_exception_response = getattr(response, "_exception", False) if c.response_wrapper and not c.is_exception_response: content = flatten_response(response.content) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" # save the result of this page to the pagecache if possible. we # mustn't cache things that rely on state not tracked by request_key # such as If-Modified-Since headers for 304s or requesting IP for 429s. if (g.page_cache_time and request.method.upper() == 'GET' and c.can_use_pagecache and not c.used_cache and response.status_int not in (304, 429) and not response.status.startswith("5") and not c.is_exception_response): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning( "Ignored exception (%r) on pagecache " "write for %r", e, request.path) pragmas = [ p.strip() for p in request.headers.get("Pragma", "").split(",") ] if g.debug or "x-reddit-pagecache" in pragmas: if c.can_use_pagecache: pagecache_state = "hit" if c.used_cache else "miss" else: pagecache_state = "disallowed" response.headers["X-Reddit-Pagecache"] = pagecache_state # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) if self.should_update_last_visit(): c.user.update_last_visit(c.start_time) hooks.get_hook("reddit.request.end").call() # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() c.request_timer.intermediate("post") # push data to statsd c.request_timer.stop() g.stats.flush()
def _handleOAIRequest(self, format='html'): t_req = request._current_obj() t_res = response._current_obj() enable_flow_control = False fc_id_limit = None fc_doc_limit = None service_id = None serviceDoc = h.getServiceDocument(appConfig['lr.oaipmh.docid']) if serviceDoc != None: if 'service_id' in serviceDoc: service_id = serviceDoc['service_id'] if 'service_data' in serviceDoc: serviceData = serviceDoc['service_data'] if 'flow_control' in serviceData: enable_flow_control = serviceData['flow_control'] if enable_flow_control and 'id_limit' in serviceData: fc_id_limit = serviceData['id_limit'] elif enable_flow_control: fc_id_limit = 100 if enable_flow_control and 'doc_limit' in serviceData: fc_doc_limit = serviceData['doc_limit'] elif enable_flow_control: fc_doc_limit = 100 o = oaipmh() def GetRecord(params): try: from lr.mustache.oaipmh import GetRecord as must_GetRecord identifier = params["identifier"] if params["by_doc_ID"] == True: resolver = OAIPMHDocumentResolver() single_doc = o.get_record(params["identifier"]) if single_doc is not None: docList = [resolver.process({"doc": single_doc})] else: docList = [] else: docList = o.get_records_by_resource(params["identifier"]) doc_idx = 0 valid_docs = 0 mustache = must_GetRecord() for doc in docList: if doc is not None: doc_idx += 1 if "payload_schema" in doc and params[ "metadataPrefix"] in map( lambda x: o_mod.getMetadataPrefix(x), doc["payload_schema"] ) and OAIPMHDocumentResolver.PAYLOAD_ERROR not in doc: valid_docs += 1 if valid_docs == 1: part = mustache.prefix(**self._initMustache( args=params, req=t_req)) yield h.fixUtf8( self._returnResponse(part, res=t_res)) part = mustache.doc(doc) yield h.fixUtf8( self._returnResponse(part, res=t_res)) if doc_idx == 0: raise IdDoesNotExistError(params['verb'], req=t_req) elif valid_docs == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: yield h.fixUtf8( self._returnResponse(mustache.suffix(), res=t_res)) except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) def ListGeneric(params, showDocs=False, record_limit=None): if not showDocs: from lr.mustache.oaipmh import ListIdentifiers as must_ListID mustache = must_ListID() else: from lr.mustache.oaipmh import ListRecords as must_ListRec mustache = must_ListRec() try: doc_index = 0 err_count = 0 metadataPrefix = params["metadataPrefix"] from_date = params["from"] until_date = params["until"] doc_err = None rendered_init = False resumptionToken = None if "resumptionToken" not in params else params[ 'resumptionToken'] records = o.list_identifiers_or_records(metadataPrefix, from_date=from_date, until_date=until_date, rt=resumptionToken, fc_limit=record_limit, include_docs=showDocs) for ident in records: doc_index += 1 doc_err = False if OAIPMHDocumentResolver.PAYLOAD_ERROR in ident: err_count += 1 doc_err = True log.debug( "Payload Error detected, doc_index: {0}, err_count: {1}" .format(doc_index, err_count)) if doc_index - err_count == 1: rendered_init = True part = mustache.prefix( **self._initMustache(args=params, req=t_req)) yield h.fixUtf8(self._returnResponse(part, res=t_res)) if doc_err is False and (record_limit is None or doc_index <= record_limit): part = mustache.doc(ident) yield h.fixUtf8(part) elif enable_flow_control: from lr.lib import resumption_token if doc_index - err_count > 0 and doc_index > record_limit: opts = o.list_opts( metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] token = resumption_token.get_token( serviceid=service_id, from_date=from_date, until_date=until_date, **opts) part = mustache.resumptionToken(token) yield h.fixUtf8(part) break elif doc_index - err_count == 0 and doc_index > record_limit: opts = o.list_opts( metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] payload = resumption_token.get_payload( from_date=from_date, until_date=until_date, **opts) records = o.list_identifiers_or_records( metadataPrefix, from_date=from_date, until_date=until_date, rt=payload, fc_limit=record_limit, include_docs=showDocs) doc_index = 0 err_count = 0 if doc_index == 0 and err_count == 0: raise NoRecordsMatchError(params['verb'], req=t_req) elif (doc_index - err_count) == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: if enable_flow_control and doc_index <= record_limit: yield h.fixUtf8(mustache.resumptionToken()) yield h.fixUtf8(mustache.suffix()) except oaipmherrors.Error as e: if not rendered_init: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) else: from lr.mustache.oaipmh import ErrorOnly as err_stache err = err_stache() yield h.fixUtf8( self._returnResponse(err.xml(e) + mustache.suffix(), res=t_res)) except: log.exception("Unknown Error Occurred") def ListIdentifiers(params): return ListGeneric(params, False, fc_id_limit) def ListRecords(params): return ListGeneric(params, True, fc_doc_limit) # def ListRecords(params): # try: # from lr.mustache.oaipmh import ListRecords as must_ListRec # # doc_index = 0 # mustache = must_ListRec() # for record in o.list_records(params["metadataPrefix"],from_date=params["from"], until_date=params["until"] ): # doc_index += 1 # log.debug(json.dumps(record)) # if doc_index == 1: # part = mustache.prefix(**self._initMustache(args=params, req=t_req)) # yield self._returnResponse(part, res=t_res) # # part = mustache.doc(record) # yield self._returnResponse(part, res=t_res) # # # if doc_index == 0: # raise NoRecordsMatchError(params['verb'], req=t_req) # else: # yield mustache.suffix() # # except oaipmherrors.Error as e: # from lr.mustache.oaipmh import Error as err_stache # err = err_stache() # yield self._returnResponse(err.xml(e), res=t_res) # except: # log.exception("Unable to render template") def Identify(params=None): body = "" try: self._initRender(params, ctx=c, req=t_req) c.identify = o.identify() body = render("/oaipmh-Identify.mako") except Exception as e: raise BadVerbError() return self._returnResponse(body, res=t_res) def ListMetadataFormats(params): body = "" try: self._initRender(params, ctx=c, req=t_req) fmts = o.list_metadata_formats(identity=params["identifier"], by_doc_ID=params["by_doc_ID"]) if len(fmts) == 0: raise NoMetadataFormats(params["verb"]) c.formats = fmts body = render("/oaipmh-ListMetadataFormats.mako") return self._returnResponse(body, res=t_res) except Error as e: raise e def ListSets(params=None): raise NoSetHierarchyError(verb) def NotYetSupported(params=None): raise BadVerbError() switch = { 'GetRecord': GetRecord, 'ListRecords': ListRecords, 'ListIdentifiers': ListIdentifiers, 'Identify': Identify, 'ListMetadataFormats': ListMetadataFormats, 'ListSets': ListSets } try: params = self._parseParams(flow_control=enable_flow_control, serviceid=service_id) # If this is a special case where we are actually using OAI interface to serve basic harvest if params.has_key("metadataPrefix") and params[ "metadataPrefix"] == "LR_JSON_0.10.0": if params.has_key("identifier") == True: params[self.REQUESTID] = params["identifier"] if params.has_key("from") and isinstance( params["from"], datetime): params["from"] = h.convertToISO8601Zformat(params["from"]) if params.has_key("until") and isinstance( params["until"], datetime): params["until"] = h.convertToISO8601Zformat( params["until"]) return HarvestController.harvest(self, params, request.body, params['verb'].lower()) verb = params['verb'] response.headers['Content-Type'] = "text/xml; charset=utf-8" return switch[verb](params) except Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() return self._returnResponse(err.xml(e), res=t_res)
def _handleOAIRequest(self, format='html'): t_req = request._current_obj() t_res = response._current_obj() enable_flow_control = False fc_id_limit = None fc_doc_limit = None service_id = None serviceDoc = h.getServiceDocument(appConfig['lr.oaipmh.docid']) if serviceDoc != None: if 'service_id' in serviceDoc: service_id = serviceDoc['service_id'] if 'service_data' in serviceDoc: serviceData = serviceDoc['service_data'] if 'flow_control' in serviceData: enable_flow_control = serviceData['flow_control'] if enable_flow_control and 'id_limit' in serviceData: fc_id_limit = serviceData['id_limit'] elif enable_flow_control: fc_id_limit = 100 if enable_flow_control and 'doc_limit' in serviceData: fc_doc_limit = serviceData['doc_limit'] elif enable_flow_control: fc_doc_limit = 100 o = oaipmh() def GetRecord(params): try: from lr.mustache.oaipmh import GetRecord as must_GetRecord identifier = params["identifier"] if params["by_doc_ID"] == True: resolver = OAIPMHDocumentResolver() single_doc = o.get_record(params["identifier"]) if single_doc is not None: docList = [resolver.process({ "doc": single_doc })] else: docList = [] else: docList = o.get_records_by_resource(params["identifier"]) doc_idx = 0 valid_docs = 0 mustache = must_GetRecord() for doc in docList: if doc is not None: doc_idx += 1 if "payload_schema" in doc and params["metadataPrefix"] in map(lambda x: o_mod.getMetadataPrefix(x), doc["payload_schema"]) and OAIPMHDocumentResolver.PAYLOAD_ERROR not in doc: valid_docs += 1 if valid_docs == 1: part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield h.fixUtf8(self._returnResponse(part, res=t_res)) part = mustache.doc(doc) yield h.fixUtf8(self._returnResponse(part, res=t_res)) if doc_idx == 0: raise IdDoesNotExistError(params['verb'], req=t_req) elif valid_docs == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: yield h.fixUtf8(self._returnResponse(mustache.suffix(), res=t_res)) except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) def ListGeneric(params, showDocs=False, record_limit=None): if not showDocs: from lr.mustache.oaipmh import ListIdentifiers as must_ListID mustache = must_ListID() else: from lr.mustache.oaipmh import ListRecords as must_ListRec mustache = must_ListRec() try: doc_index = 0 err_count = 0 metadataPrefix=params["metadataPrefix"] from_date=params["from"] until_date=params["until"] doc_err = None rendered_init = False resumptionToken = None if "resumptionToken" not in params else params['resumptionToken'] records = o.list_identifiers_or_records(metadataPrefix, from_date=from_date, until_date=until_date, rt=resumptionToken, fc_limit=record_limit, include_docs=showDocs ) for ident in records: doc_index += 1 doc_err = False if OAIPMHDocumentResolver.PAYLOAD_ERROR in ident: err_count += 1 doc_err = True log.debug("Payload Error detected, doc_index: {0}, err_count: {1}".format(doc_index, err_count)) if doc_index - err_count == 1: rendered_init = True part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield h.fixUtf8(self._returnResponse(part, res=t_res)) if doc_err is False and (record_limit is None or doc_index <= record_limit): part = mustache.doc(ident) yield h.fixUtf8(part) elif enable_flow_control: from lr.lib import resumption_token if doc_index - err_count > 0 and doc_index > record_limit: opts = o.list_opts(metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] token = resumption_token.get_token(serviceid=service_id, from_date=from_date, until_date=until_date, **opts) part = mustache.resumptionToken(token) yield h.fixUtf8(part) break elif doc_index - err_count == 0 and doc_index > record_limit: opts = o.list_opts(metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] payload = resumption_token.get_payload(from_date=from_date, until_date=until_date, **opts) records = o.list_identifiers_or_records(metadataPrefix, from_date=from_date, until_date=until_date, rt=payload, fc_limit=record_limit, include_docs=showDocs ) doc_index = 0 err_count = 0 if doc_index == 0 and err_count == 0: raise NoRecordsMatchError(params['verb'], req=t_req) elif (doc_index - err_count) == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: if enable_flow_control and doc_index <= record_limit: yield h.fixUtf8(mustache.resumptionToken()) yield h.fixUtf8(mustache.suffix()) except oaipmherrors.Error as e: if not rendered_init: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) else: from lr.mustache.oaipmh import ErrorOnly as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e)+mustache.suffix(), res=t_res)) except: log.exception("Unknown Error Occurred") def ListIdentifiers(params): return ListGeneric(params, False, fc_id_limit) def ListRecords(params): return ListGeneric(params, True, fc_doc_limit) # def ListRecords(params): # try: # from lr.mustache.oaipmh import ListRecords as must_ListRec # # doc_index = 0 # mustache = must_ListRec() # for record in o.list_records(params["metadataPrefix"],from_date=params["from"], until_date=params["until"] ): # doc_index += 1 # log.debug(json.dumps(record)) # if doc_index == 1: # part = mustache.prefix(**self._initMustache(args=params, req=t_req)) # yield self._returnResponse(part, res=t_res) # # part = mustache.doc(record) # yield self._returnResponse(part, res=t_res) # # # if doc_index == 0: # raise NoRecordsMatchError(params['verb'], req=t_req) # else: # yield mustache.suffix() # # except oaipmherrors.Error as e: # from lr.mustache.oaipmh import Error as err_stache # err = err_stache() # yield self._returnResponse(err.xml(e), res=t_res) # except: # log.exception("Unable to render template") def Identify(params=None): body = "" try: self._initRender(params, ctx=c, req=t_req) c.identify = o.identify() body = render("/oaipmh-Identify.mako") except Exception as e: raise BadVerbError() return self._returnResponse(body, res=t_res) def ListMetadataFormats(params): body = "" try: self._initRender(params, ctx=c, req=t_req) fmts = o.list_metadata_formats(identity=params["identifier"], by_doc_ID=params["by_doc_ID"]) if len(fmts) == 0: raise NoMetadataFormats(params["verb"]) c.formats = fmts body = render("/oaipmh-ListMetadataFormats.mako") return self._returnResponse(body, res=t_res) except Error as e: raise e def ListSets(params=None): raise NoSetHierarchyError(verb) def NotYetSupported(params=None): raise BadVerbError() switch = { 'GetRecord': GetRecord, 'ListRecords': ListRecords, 'ListIdentifiers': ListIdentifiers, 'Identify': Identify, 'ListMetadataFormats': ListMetadataFormats, 'ListSets': ListSets } try: params = self._parseParams(flow_control=enable_flow_control, serviceid=service_id) # If this is a special case where we are actually using OAI interface to serve basic harvest if params.has_key("metadataPrefix") and params["metadataPrefix"] == "LR_JSON_0.10.0": if params.has_key("identifier") == True: params[self.REQUESTID] = params["identifier"] if params.has_key("from") and isinstance(params["from"], datetime): params["from"] = h.convertToISO8601Zformat(params["from"]) if params.has_key("until") and isinstance(params["until"], datetime): params["until"] = h.convertToISO8601Zformat(params["until"]) return HarvestController.harvest(self, params, request.body, params['verb'].lower()) verb = params['verb'] response.headers['Content-Type'] = "text/xml; charset=utf-8" return switch[verb](params) except Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() return self._returnResponse(err.xml(e), res=t_res)
def _handleOAIRequest(self, format='html'): t_req = request._current_obj() t_res = response._current_obj() o = oaipmh() def GetRecord(params): try: from lr.mustache.oaipmh import GetRecord as must_GetRecord identifier = params["identifier"] if params["by_doc_ID"] == True: docList = [o.get_record(params["identifier"])] else: docList = o.get_records_by_resource(params["identifier"]) doc_idx = 0 valid_docs = 0 mustache = must_GetRecord() for doc in docList: if doc is not None: doc_idx += 1 if "payload_schema" in doc and params["metadataPrefix"] in doc["payload_schema"]: valid_docs += 1 if valid_docs == 1: part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield self._returnResponse(part, res=t_res) part = mustache.doc(doc) yield self._returnResponse(part, res=t_res) if doc_idx == 0: raise IdDoesNotExistError(params['verb'], req=t_req) elif valid_docs == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: yield self._returnResponse(mustache.suffix(), res=t_res) except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield self._returnResponse(err.xml(e), res=t_res) def ListIdentifiers(params): try: from lr.mustache.oaipmh import ListIdentifiers as must_ListID doc_index = 0 mustache = must_ListID() for ident in o.list_identifiers(params["metadataPrefix"],from_date=params["from"], until_date=params["until"] ): doc_index += 1 log.debug(json.dumps(ident)) if doc_index == 1: # self._initRender(params, c, t_req) # part = t_render("/oaipmh-ListIdentifiers-prefix.mako") part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield self._returnResponse(part, res=t_res) part = mustache.doc(ident) yield part if doc_index == 0: raise NoRecordsMatchError(params['verb'], req=t_req) else: yield mustache.suffix() except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield self._returnResponse(err.xml(e), res=t_res) except: log.exception("Unable to render template") def ListRecords(params): try: from lr.mustache.oaipmh import ListRecords as must_ListRec doc_index = 0 mustache = must_ListRec() for record in o.list_records(params["metadataPrefix"],from_date=params["from"], until_date=params["until"] ): doc_index += 1 log.debug(json.dumps(record)) if doc_index == 1: part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield self._returnResponse(part, res=t_res) part = mustache.doc(record) yield self._returnResponse(part, res=t_res) if doc_index == 0: raise NoRecordsMatchError(params['verb'], req=t_req) else: yield mustache.suffix() except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield self._returnResponse(err.xml(e), res=t_res) except: log.exception("Unable to render template") def Identify(params=None): body = "" try: self._initRender(params) c.identify = o.identify() body = render("/oaipmh-Identify.mako") except Exception as e: raise BadVerbError() return self._returnResponse(body) def ListMetadataFormats(params): body = "" try: self._initRender(params) fmts = o.list_metadata_formats(identity=params["identifier"], by_doc_ID=params["by_doc_ID"]) if len(fmts) == 0: raise NoMetadataFormats(params["verb"]) c.formats = fmts body = render("/oaipmh-ListMetadataFormats.mako") return self._returnResponse(body) except Error as e: raise e # except Exception as e: # raise NoMetadataFormats(params["verb"]) def ListSets(params=None): raise NoSetHierarchyError(verb) def NotYetSupported(params=None): raise BadVerbError() switch = { 'GetRecord': GetRecord, 'ListRecords': ListRecords, 'ListIdentifiers': ListIdentifiers, 'Identify': Identify, 'ListMetadataFormats': ListMetadataFormats, 'ListSets': ListSets } try: params = self._parseParams() # If this is a special case where we are actually using OAI interface to serve basic harvest if params.has_key("metadataPrefix") and params["metadataPrefix"] == "LR_JSON_0.10.0": if params.has_key("identifier") == True: params["request_id"] = params["identifier"] return HarvestController.harvest(self, params, request.body, params['verb'].lower()) verb = params['verb'] return switch[verb](params) except Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() return self._returnResponse(err.xml(e), res=t_res)