def get_payload(startkey=None, endkey={}, startkey_docid=None, from_date=None, until_date=None, key=None, keys=None, any_tags=None, identity=None): payload = {} payload["startkey"] = startkey payload["endkey"] = endkey if key: payload["key"] = key if keys: payload["keys"] = keys if startkey_docid: payload["startkey_docid"] = startkey_docid if any_tags: payload['any_tags'] = any_tags if identity: payload['identity'] = identity if from_date and isinstance(from_date, datetime.datetime): from lr.lib import helpers as h payload["from_date"] = h.convertToISO8601Zformat(from_date) if until_date and isinstance(until_date, datetime.datetime): payload["until_date"] = h.convertToISO8601Zformat(until_date) return payload
def list_identifiers(self,h, body ,params, verb = 'GET'): data = self.get_base_response(verb,body) if params.has_key('from'): data['request']['from'] = params['from'] if params.has_key('until'): data['request']['until'] = params['until'] from_date, until_date = self._test_time_params(params) data['listidentifiers'] = [] base_response = json.dumps(data).split('[') self._getServiceDocment(False) resumption_token = None if self.enable_flow_control and params.has_key('resumption_token'): resumption_token = rt.parse_token(self.service_id,params['resumption_token']) yield base_response[0] +'[' first = True; count = 0 lastID = None lastKey = None for d in h.list_identifiers(from_date,until_date,resumption_token=resumption_token, limit=self.limit): count += 1 lastID = d['id'] lastKey = d['key'] if not first: yield ',\n' first = False return_value = {"header":{'identifier':d['id'], 'datestamp':helpers.convertToISO8601Zformat(datetime.today()) ,'status':'active'}} yield json.dumps(return_value) if self.enable_flow_control and self.limit <= count: token = rt.get_token(serviceid=self.service_id,startkey=lastKey,endkey=helpers.convertToISO8601Zformat(until_date),startkey_docid=lastID,from_date=helpers.convertToISO8601Zformat(from_date),until_date=helpers.convertToISO8601Zformat(until_date)) resp = base_response[1] yield resp[:-1] +(',"resumption_token":"%s"' %token) +resp[-1:] else: yield base_response[1]
def getrecord(): data = self.get_base_response(verb, body) by_doc_ID = self._check_bool_param(params, 'by_doc_ID') by_resource_ID = self._check_bool_param(params, 'by_resource_ID') if not params.has_key(self.REQUESTID) and not params.has_key( self.REQUESTID.lower()): data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) if by_doc_ID and by_resource_ID: data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) if params.has_key(self.REQUESTID): request_id = params[self.REQUESTID] elif params.has_key(self.REQUESTID.lower()): request_id = params[self.REQUESTID.lower()] if by_doc_ID: document = h.get_record(request_id) if document is not None: records = map( lambda doc: { "header": { 'identifier': doc['_id'], 'datestamp': helpers.convertToISO8601Zformat(datetime.today( )), 'status': 'active' }, 'resource_data': doc }, [document]) else: records = [] else: request_id = unquote_plus(request_id) records = map( lambda doc: { "header": { 'identifier': doc['_id'], 'datestamp': helpers.convertToISO8601Zformat(datetime.today()), 'status': 'active' }, 'resource_data': doc }, h.get_records_by_resource(request_id)) if len(records) == 0: data['OK'] = False data['error'] = 'idDoesNotExist' return json.dumps(data) data['getrecord'] = {'record': records} data['request']['identifier'] = request_id data['request']['by_doc_ID'] = by_doc_ID data['request']['by_resource_ID'] = by_resource_ID return json.dumps(data)
def getrecord(): data = self.get_base_response(verb, body) by_doc_ID = params.has_key('by_doc_ID') and ast.literal_eval( str(params['by_doc_ID'])) by_resource_ID = params.has_key( 'by_resource_ID') and ast.literal_eval( str(params['by_resource_ID'])) if not params.has_key('request_id'): data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) if by_doc_ID and by_resource_ID: data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) request_id = params['request_id'] if by_doc_ID: records = map( lambda doc: { 'record': { "header": { 'identifier': doc.id, 'datestamp': helpers.convertToISO8601Zformat(datetime.today( )), 'status': 'active' } }, 'resource_data': doc }, [h.get_record(request_id)]) else: records = map( lambda doc: { 'record': { "header": { 'identifier': doc.id, 'datestamp': helpers.convertToISO8601Zformat(datetime.today( )), 'status': 'active' } }, 'resource_data': doc }, h.get_records_by_resource(request_id)) data['getrecord'] = {'record': records} return json.dumps(data)
def list_identifiers(self, h, body, params, verb='GET'): data = self.get_base_response(verb, body) if params.has_key('from'): data['request']['from'] = params['from'] if params.has_key('until'): data['request']['until'] = params['until'] from_date, until_date = self._test_time_params(params) data['listidentifiers'] = [] base_response = json.dumps(data).split('[') yield base_response[0] + '[' first = True for id in h.list_identifiers(from_date, until_date): if not first: yield ',' first = False return_value = { "header": { 'identifier': id, 'datestamp': helpers.convertToISO8601Zformat(datetime.today()), 'status': 'active' } } yield json.dumps(return_value) yield base_response[1]
def get_payload(startkey=None, endkey={}, startkey_docid=None, from_date=None, until_date=None): payload = {} payload["startkey"] = startkey payload["endkey"] = endkey if startkey_docid: payload["startkey_docid"] = startkey_docid if from_date and isinstance(from_date, datetime.datetime): from lr.lib import helpers as h payload["from_date"] = h.convertToISO8601Zformat(from_date) if until_date and isinstance(until_date, datetime.datetime): payload["until_date"] = h.convertToISO8601Zformat(until_date) return payload
def get_base_response(self, verb, body): return { 'OK': True, 'error': '', 'responseDate': helpers.convertToISO8601Zformat(datetime.today()), 'request': { 'verb': verb, 'HTTP_request': body } }
def get_base_response(self, verb, body): return { 'OK':True, 'error':'', 'responseDate':helpers.convertToISO8601Zformat(datetime.today()), 'request':{ 'verb':verb, 'HTTP_request': body } }
def debug_map(doc): data = { 'record': { "header": { 'identifier': doc.id, 'datestamp': helpers.convertToISO8601Zformat(datetime.today()), 'status': 'active' }, 'resource_data': doc } } return data
def list_identifiers(self,h,body ,params, verb = 'GET'): data = self.get_base_response(verb,body) if params.has_key('from'): data['request']['from'] = params['from'] if params.has_key('until'): data['request']['until'] = params['until'] from_date, until_date = self._test_time_params(params) data['listidentifiers'] = [] base_response = json.dumps(data).split('[') yield base_response[0] +'[' first = True; for id in h.list_identifiers(from_date,until_date): if not first: yield ',' first = False return_value = {"header":{'identifier':id, 'datestamp':helpers.convertToISO8601Zformat(datetime.today()) ,'status':'active'}} yield json.dumps(return_value) yield base_response[1]
def list_records(self, h , body , params, verb = 'GET' ): data = self.get_base_response(verb,body) if params.has_key('from'): data['request']['from'] = params['from'] if params.has_key('until'): data['request']['until'] = params['until'] from_date, until_date = self._test_time_params(params) data['listrecords'] = [] self._getServiceDocment(False) resumption_token = None count = 0 lastID = None lastKey = None if self.enable_flow_control and params.has_key('resumption_token'): resumption_token = rt.parse_token(self.service_id,params['resumption_token']) base_response = json.dumps(data).split('[') yield base_response[0] +'[' def debug_map(doc): data ={'record':{"header":{'identifier':doc['_id'], 'datestamp':helpers.convertToISO8601Zformat(datetime.today()),'status':'active'},'resource_data':doc}} return data if from_date > until_date: data['OK'] = False data['error'] = 'badArgument' else: first = True for data in h.list_records(from_date,until_date,resumption_token=resumption_token, limit=self.limit): lastID = data['id'] lastKey = data['key'] doc = data['doc'] count += 1 if not first: yield ',\n' first = False yield json.dumps(debug_map(doc)) if self.enable_flow_control and self.limit <= count: token = rt.get_token(serviceid=self.service_id,startkey=lastKey,endkey=helpers.convertToISO8601Zformat(until_date),startkey_docid=lastID,from_date=helpers.convertToISO8601Zformat(from_date),until_date=helpers.convertToISO8601Zformat(until_date)) resp = base_response[1] yield resp[:-1] +(',"resumption_token":"%s"' %token) +resp[-1:] else: yield base_response[1]
def getrecord(): data = self.get_base_response(verb,body) by_doc_ID = self._check_bool_param(params,'by_doc_ID') by_resource_ID = self._check_bool_param(params,'by_resource_ID') if not params.has_key(self.REQUESTID): data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) if by_doc_ID and by_resource_ID: data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) request_id = params[self.REQUESTID] if by_doc_ID: document = h.get_record(request_id) if document is not None: records = map(lambda doc: {"header":{'identifier':doc['_id'], 'datestamp':helpers.convertToISO8601Zformat(datetime.today()),'status':'active'},'resource_data':doc},[document]) else: records = [] else: records = map(lambda doc: {"header":{'identifier':doc['_id'], 'datestamp':helpers.convertToISO8601Zformat(datetime.today()),'status':'active'},'resource_data':doc},h.get_records_by_resource(request_id)) if len(records) == 0: abort(500,'idDoesNotExist') data['getrecord'] ={ 'record': records } data['request']['identifier'] = request_id data['request']['by_doc_ID'] = by_doc_ID data['request']['by_resource_ID'] = by_resource_ID return json.dumps(data)
def debug_map(doc): data ={'record':{"header":{'identifier':doc['_id'], 'datestamp':helpers.convertToISO8601Zformat(datetime.today()),'status':'active'},'resource_data':doc}} return data
def _handleOAIRequest(self, format='html'): t_req = request._current_obj() t_res = response._current_obj() enable_flow_control = False fc_id_limit = None fc_doc_limit = None service_id = None serviceDoc = h.getServiceDocument(appConfig['lr.oaipmh.docid']) if serviceDoc != None: if 'service_id' in serviceDoc: service_id = serviceDoc['service_id'] if 'service_data' in serviceDoc: serviceData = serviceDoc['service_data'] if 'flow_control' in serviceData: enable_flow_control = serviceData['flow_control'] if enable_flow_control and 'id_limit' in serviceData: fc_id_limit = serviceData['id_limit'] elif enable_flow_control: fc_id_limit = 100 if enable_flow_control and 'doc_limit' in serviceData: fc_doc_limit = serviceData['doc_limit'] elif enable_flow_control: fc_doc_limit = 100 o = oaipmh() def GetRecord(params): try: from lr.mustache.oaipmh import GetRecord as must_GetRecord identifier = params["identifier"] if params["by_doc_ID"] == True: resolver = OAIPMHDocumentResolver() single_doc = o.get_record(params["identifier"]) if single_doc is not None: docList = [resolver.process({"doc": single_doc})] else: docList = [] else: docList = o.get_records_by_resource(params["identifier"]) doc_idx = 0 valid_docs = 0 mustache = must_GetRecord() for doc in docList: if doc is not None: doc_idx += 1 if "payload_schema" in doc and params[ "metadataPrefix"] in map( lambda x: o_mod.getMetadataPrefix(x), doc["payload_schema"] ) and OAIPMHDocumentResolver.PAYLOAD_ERROR not in doc: valid_docs += 1 if valid_docs == 1: part = mustache.prefix(**self._initMustache( args=params, req=t_req)) yield h.fixUtf8( self._returnResponse(part, res=t_res)) part = mustache.doc(doc) yield h.fixUtf8( self._returnResponse(part, res=t_res)) if doc_idx == 0: raise IdDoesNotExistError(params['verb'], req=t_req) elif valid_docs == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: yield h.fixUtf8( self._returnResponse(mustache.suffix(), res=t_res)) except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) def ListGeneric(params, showDocs=False, record_limit=None): if not showDocs: from lr.mustache.oaipmh import ListIdentifiers as must_ListID mustache = must_ListID() else: from lr.mustache.oaipmh import ListRecords as must_ListRec mustache = must_ListRec() try: doc_index = 0 err_count = 0 metadataPrefix = params["metadataPrefix"] from_date = params["from"] until_date = params["until"] doc_err = None rendered_init = False resumptionToken = None if "resumptionToken" not in params else params[ 'resumptionToken'] records = o.list_identifiers_or_records(metadataPrefix, from_date=from_date, until_date=until_date, rt=resumptionToken, fc_limit=record_limit, include_docs=showDocs) for ident in records: doc_index += 1 doc_err = False if OAIPMHDocumentResolver.PAYLOAD_ERROR in ident: err_count += 1 doc_err = True log.debug( "Payload Error detected, doc_index: {0}, err_count: {1}" .format(doc_index, err_count)) if doc_index - err_count == 1: rendered_init = True part = mustache.prefix( **self._initMustache(args=params, req=t_req)) yield h.fixUtf8(self._returnResponse(part, res=t_res)) if doc_err is False and (record_limit is None or doc_index <= record_limit): part = mustache.doc(ident) yield h.fixUtf8(part) elif enable_flow_control: from lr.lib import resumption_token if doc_index - err_count > 0 and doc_index > record_limit: opts = o.list_opts( metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] token = resumption_token.get_token( serviceid=service_id, from_date=from_date, until_date=until_date, **opts) part = mustache.resumptionToken(token) yield h.fixUtf8(part) break elif doc_index - err_count == 0 and doc_index > record_limit: opts = o.list_opts( metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] payload = resumption_token.get_payload( from_date=from_date, until_date=until_date, **opts) records = o.list_identifiers_or_records( metadataPrefix, from_date=from_date, until_date=until_date, rt=payload, fc_limit=record_limit, include_docs=showDocs) doc_index = 0 err_count = 0 if doc_index == 0 and err_count == 0: raise NoRecordsMatchError(params['verb'], req=t_req) elif (doc_index - err_count) == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: if enable_flow_control and doc_index <= record_limit: yield h.fixUtf8(mustache.resumptionToken()) yield h.fixUtf8(mustache.suffix()) except oaipmherrors.Error as e: if not rendered_init: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) else: from lr.mustache.oaipmh import ErrorOnly as err_stache err = err_stache() yield h.fixUtf8( self._returnResponse(err.xml(e) + mustache.suffix(), res=t_res)) except: log.exception("Unknown Error Occurred") def ListIdentifiers(params): return ListGeneric(params, False, fc_id_limit) def ListRecords(params): return ListGeneric(params, True, fc_doc_limit) # def ListRecords(params): # try: # from lr.mustache.oaipmh import ListRecords as must_ListRec # # doc_index = 0 # mustache = must_ListRec() # for record in o.list_records(params["metadataPrefix"],from_date=params["from"], until_date=params["until"] ): # doc_index += 1 # log.debug(json.dumps(record)) # if doc_index == 1: # part = mustache.prefix(**self._initMustache(args=params, req=t_req)) # yield self._returnResponse(part, res=t_res) # # part = mustache.doc(record) # yield self._returnResponse(part, res=t_res) # # # if doc_index == 0: # raise NoRecordsMatchError(params['verb'], req=t_req) # else: # yield mustache.suffix() # # except oaipmherrors.Error as e: # from lr.mustache.oaipmh import Error as err_stache # err = err_stache() # yield self._returnResponse(err.xml(e), res=t_res) # except: # log.exception("Unable to render template") def Identify(params=None): body = "" try: self._initRender(params, ctx=c, req=t_req) c.identify = o.identify() body = render("/oaipmh-Identify.mako") except Exception as e: raise BadVerbError() return self._returnResponse(body, res=t_res) def ListMetadataFormats(params): body = "" try: self._initRender(params, ctx=c, req=t_req) fmts = o.list_metadata_formats(identity=params["identifier"], by_doc_ID=params["by_doc_ID"]) if len(fmts) == 0: raise NoMetadataFormats(params["verb"]) c.formats = fmts body = render("/oaipmh-ListMetadataFormats.mako") return self._returnResponse(body, res=t_res) except Error as e: raise e def ListSets(params=None): raise NoSetHierarchyError(verb) def NotYetSupported(params=None): raise BadVerbError() switch = { 'GetRecord': GetRecord, 'ListRecords': ListRecords, 'ListIdentifiers': ListIdentifiers, 'Identify': Identify, 'ListMetadataFormats': ListMetadataFormats, 'ListSets': ListSets } try: params = self._parseParams(flow_control=enable_flow_control, serviceid=service_id) # If this is a special case where we are actually using OAI interface to serve basic harvest if params.has_key("metadataPrefix") and params[ "metadataPrefix"] == "LR_JSON_0.10.0": if params.has_key("identifier") == True: params[self.REQUESTID] = params["identifier"] if params.has_key("from") and isinstance( params["from"], datetime): params["from"] = h.convertToISO8601Zformat(params["from"]) if params.has_key("until") and isinstance( params["until"], datetime): params["until"] = h.convertToISO8601Zformat( params["until"]) return HarvestController.harvest(self, params, request.body, params['verb'].lower()) verb = params['verb'] response.headers['Content-Type'] = "text/xml; charset=utf-8" return switch[verb](params) except Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() return self._returnResponse(err.xml(e), res=t_res)
def listGeneral(self, h , body , params, includeDocs,verb = 'GET'): data = self.get_base_response(verb,body) try: from_date, until_date = self._test_time_params(params) except Exception as ex: log.error(ex) data['OK'] = False data['error'] = 'badArgument' yield json.dumps(data) return data['request']['from'] = from_date data['request']['until'] = until_date if from_date > until_date: data['OK'] = False data['error'] = 'badArgument' yield json.dumps(data) else: self._getServiceDocment(includeDocs) resumption_token = None count = 0 lastID = None lastKey = None if self.enable_flow_control and params.has_key('resumption_token'): resumption_token = rt.parse_token(self.service_id,params['resumption_token']) if includeDocs: data['listrecords'] = [] viewResults = h.list_records(from_date,until_date,resumption_token=resumption_token, limit=self.limit) debug_map = lambda doc: {'record':{"header":{'identifier':doc['id'], 'datestamp':doc['key']+"Z",'status':'active'},'resource_data':doc['doc']}} else: data['listidentifiers'] = [] viewResults = h.list_identifiers(from_date,until_date,resumption_token=resumption_token, limit=self.limit) debug_map = lambda doc:{"header":{'identifier':doc['id'], 'datestamp':doc['key']+"Z",'status':'active'}} base_response = json.dumps(data).split('[') yield base_response[0] +'[' first = True for data in viewResults: lastID = data['id'] lastKey = data['key'] count += 1 if not first: yield ',\n' first = False yield json.dumps(debug_map(data)) if self.enable_flow_control and self.limit <= count: token = rt.get_token(serviceid=self.service_id,startkey=lastKey,endkey=helpers.convertToISO8601Zformat(until_date),startkey_docid=lastID,from_date=helpers.convertToISO8601Zformat(from_date),until_date=helpers.convertToISO8601Zformat(until_date)) resp = base_response[1] yield resp[:-1] +(',"resumption_token":"%s"' %token) +resp[-1:] elif self.limit > count: resp = base_response[1] yield resp[:-1] +(',"resumption_token":"%s"' %'null') +resp[-1:] else: yield base_response[1]
def _handleOAIRequest(self, format='html'): t_req = request._current_obj() t_res = response._current_obj() enable_flow_control = False fc_id_limit = None fc_doc_limit = None service_id = None serviceDoc = h.getServiceDocument(appConfig['lr.oaipmh.docid']) if serviceDoc != None: if 'service_id' in serviceDoc: service_id = serviceDoc['service_id'] if 'service_data' in serviceDoc: serviceData = serviceDoc['service_data'] if 'flow_control' in serviceData: enable_flow_control = serviceData['flow_control'] if enable_flow_control and 'id_limit' in serviceData: fc_id_limit = serviceData['id_limit'] elif enable_flow_control: fc_id_limit = 100 if enable_flow_control and 'doc_limit' in serviceData: fc_doc_limit = serviceData['doc_limit'] elif enable_flow_control: fc_doc_limit = 100 o = oaipmh() def GetRecord(params): try: from lr.mustache.oaipmh import GetRecord as must_GetRecord identifier = params["identifier"] if params["by_doc_ID"] == True: resolver = OAIPMHDocumentResolver() single_doc = o.get_record(params["identifier"]) if single_doc is not None: docList = [resolver.process({ "doc": single_doc })] else: docList = [] else: docList = o.get_records_by_resource(params["identifier"]) doc_idx = 0 valid_docs = 0 mustache = must_GetRecord() for doc in docList: if doc is not None: doc_idx += 1 if "payload_schema" in doc and params["metadataPrefix"] in map(lambda x: o_mod.getMetadataPrefix(x), doc["payload_schema"]) and OAIPMHDocumentResolver.PAYLOAD_ERROR not in doc: valid_docs += 1 if valid_docs == 1: part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield h.fixUtf8(self._returnResponse(part, res=t_res)) part = mustache.doc(doc) yield h.fixUtf8(self._returnResponse(part, res=t_res)) if doc_idx == 0: raise IdDoesNotExistError(params['verb'], req=t_req) elif valid_docs == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: yield h.fixUtf8(self._returnResponse(mustache.suffix(), res=t_res)) except oaipmherrors.Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) def ListGeneric(params, showDocs=False, record_limit=None): if not showDocs: from lr.mustache.oaipmh import ListIdentifiers as must_ListID mustache = must_ListID() else: from lr.mustache.oaipmh import ListRecords as must_ListRec mustache = must_ListRec() try: doc_index = 0 err_count = 0 metadataPrefix=params["metadataPrefix"] from_date=params["from"] until_date=params["until"] doc_err = None rendered_init = False resumptionToken = None if "resumptionToken" not in params else params['resumptionToken'] records = o.list_identifiers_or_records(metadataPrefix, from_date=from_date, until_date=until_date, rt=resumptionToken, fc_limit=record_limit, include_docs=showDocs ) for ident in records: doc_index += 1 doc_err = False if OAIPMHDocumentResolver.PAYLOAD_ERROR in ident: err_count += 1 doc_err = True log.debug("Payload Error detected, doc_index: {0}, err_count: {1}".format(doc_index, err_count)) if doc_index - err_count == 1: rendered_init = True part = mustache.prefix(**self._initMustache(args=params, req=t_req)) yield h.fixUtf8(self._returnResponse(part, res=t_res)) if doc_err is False and (record_limit is None or doc_index <= record_limit): part = mustache.doc(ident) yield h.fixUtf8(part) elif enable_flow_control: from lr.lib import resumption_token if doc_index - err_count > 0 and doc_index > record_limit: opts = o.list_opts(metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] token = resumption_token.get_token(serviceid=service_id, from_date=from_date, until_date=until_date, **opts) part = mustache.resumptionToken(token) yield h.fixUtf8(part) break elif doc_index - err_count == 0 and doc_index > record_limit: opts = o.list_opts(metadataPrefix, h.convertToISO8601UTC(ident["node_timestamp"]), until_date) opts["startkey_docid"] = ident["doc_ID"] payload = resumption_token.get_payload(from_date=from_date, until_date=until_date, **opts) records = o.list_identifiers_or_records(metadataPrefix, from_date=from_date, until_date=until_date, rt=payload, fc_limit=record_limit, include_docs=showDocs ) doc_index = 0 err_count = 0 if doc_index == 0 and err_count == 0: raise NoRecordsMatchError(params['verb'], req=t_req) elif (doc_index - err_count) == 0: raise CannotDisseminateFormatError(params['verb'], req=t_req) else: if enable_flow_control and doc_index <= record_limit: yield h.fixUtf8(mustache.resumptionToken()) yield h.fixUtf8(mustache.suffix()) except oaipmherrors.Error as e: if not rendered_init: from lr.mustache.oaipmh import Error as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e), res=t_res)) else: from lr.mustache.oaipmh import ErrorOnly as err_stache err = err_stache() yield h.fixUtf8(self._returnResponse(err.xml(e)+mustache.suffix(), res=t_res)) except: log.exception("Unknown Error Occurred") def ListIdentifiers(params): return ListGeneric(params, False, fc_id_limit) def ListRecords(params): return ListGeneric(params, True, fc_doc_limit) # def ListRecords(params): # try: # from lr.mustache.oaipmh import ListRecords as must_ListRec # # doc_index = 0 # mustache = must_ListRec() # for record in o.list_records(params["metadataPrefix"],from_date=params["from"], until_date=params["until"] ): # doc_index += 1 # log.debug(json.dumps(record)) # if doc_index == 1: # part = mustache.prefix(**self._initMustache(args=params, req=t_req)) # yield self._returnResponse(part, res=t_res) # # part = mustache.doc(record) # yield self._returnResponse(part, res=t_res) # # # if doc_index == 0: # raise NoRecordsMatchError(params['verb'], req=t_req) # else: # yield mustache.suffix() # # except oaipmherrors.Error as e: # from lr.mustache.oaipmh import Error as err_stache # err = err_stache() # yield self._returnResponse(err.xml(e), res=t_res) # except: # log.exception("Unable to render template") def Identify(params=None): body = "" try: self._initRender(params, ctx=c, req=t_req) c.identify = o.identify() body = render("/oaipmh-Identify.mako") except Exception as e: raise BadVerbError() return self._returnResponse(body, res=t_res) def ListMetadataFormats(params): body = "" try: self._initRender(params, ctx=c, req=t_req) fmts = o.list_metadata_formats(identity=params["identifier"], by_doc_ID=params["by_doc_ID"]) if len(fmts) == 0: raise NoMetadataFormats(params["verb"]) c.formats = fmts body = render("/oaipmh-ListMetadataFormats.mako") return self._returnResponse(body, res=t_res) except Error as e: raise e def ListSets(params=None): raise NoSetHierarchyError(verb) def NotYetSupported(params=None): raise BadVerbError() switch = { 'GetRecord': GetRecord, 'ListRecords': ListRecords, 'ListIdentifiers': ListIdentifiers, 'Identify': Identify, 'ListMetadataFormats': ListMetadataFormats, 'ListSets': ListSets } try: params = self._parseParams(flow_control=enable_flow_control, serviceid=service_id) # If this is a special case where we are actually using OAI interface to serve basic harvest if params.has_key("metadataPrefix") and params["metadataPrefix"] == "LR_JSON_0.10.0": if params.has_key("identifier") == True: params[self.REQUESTID] = params["identifier"] if params.has_key("from") and isinstance(params["from"], datetime): params["from"] = h.convertToISO8601Zformat(params["from"]) if params.has_key("until") and isinstance(params["until"], datetime): params["until"] = h.convertToISO8601Zformat(params["until"]) return HarvestController.harvest(self, params, request.body, params['verb'].lower()) verb = params['verb'] response.headers['Content-Type'] = "text/xml; charset=utf-8" return switch[verb](params) except Error as e: from lr.mustache.oaipmh import Error as err_stache err = err_stache() return self._returnResponse(err.xml(e), res=t_res)
def getrecord(): data = self.get_base_response(verb,body) by_doc_ID = params.has_key('by_doc_ID') and ast.literal_eval(str(params['by_doc_ID'])) by_resource_ID = params.has_key('by_resource_ID') and ast.literal_eval(str(params['by_resource_ID'])) if not params.has_key('request_id'): data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) if by_doc_ID and by_resource_ID: data['OK'] = False data['error'] = 'badArgument' return json.dumps(data) request_id = params['request_id'] if by_doc_ID: records = map(lambda doc: {'record':{"header":{'identifier':doc.id, 'datestamp':helpers.convertToISO8601Zformat(datetime.today()),'status':'active'}},'resource_data':doc},[h.get_record(request_id)]) else: records = map(lambda doc: {'record':{"header":{'identifier':doc.id, 'datestamp':helpers.convertToISO8601Zformat(datetime.today()),'status':'active'}},'resource_data':doc},h.get_records_by_resource(request_id)) data['getrecord'] ={ 'record': records } return json.dumps(data)
def listGeneral(self, h, body, params, includeDocs, verb='GET'): data = self.get_base_response(verb, body) try: from_date, until_date = self._test_time_params(params) except: data['OK'] = False data['error'] = 'badArgument' yield json.dumps(data) return data['request']['from'] = from_date data['request']['until'] = until_date if from_date > until_date: data['OK'] = False data['error'] = 'badArgument' yield json.dumps(data) else: self._getServiceDocment(includeDocs) resumption_token = None count = 0 lastID = None lastKey = None if self.enable_flow_control and params.has_key('resumption_token'): resumption_token = rt.parse_token(self.service_id, params['resumption_token']) if includeDocs: data['listrecords'] = [] viewResults = h.list_records(from_date, until_date, resumption_token=resumption_token, limit=self.limit) debug_map = lambda doc: { 'record': { "header": { 'identifier': doc['id'], 'datestamp': doc['key'] + "Z", 'status': 'active' }, 'resource_data': doc['doc'] } } else: data['listidentifiers'] = [] viewResults = h.list_identifiers( from_date, until_date, resumption_token=resumption_token, limit=self.limit) debug_map = lambda doc: { "header": { 'identifier': doc['id'], 'datestamp': doc['key'] + "Z", 'status': 'active' } } base_response = json.dumps(data).split('[') yield base_response[0] + '[' first = True for data in viewResults: lastID = data['id'] lastKey = data['key'] count += 1 if not first: yield ',\n' first = False yield json.dumps(debug_map(data)) if self.enable_flow_control and self.limit <= count: token = rt.get_token( serviceid=self.service_id, startkey=lastKey, endkey=helpers.convertToISO8601Zformat(until_date), startkey_docid=lastID, from_date=helpers.convertToISO8601Zformat(from_date), until_date=helpers.convertToISO8601Zformat(until_date)) resp = base_response[1] yield resp[:-1] + (',"resumption_token":"%s"' % token) + resp[-1:] elif self.limit > count: resp = base_response[1] yield resp[:-1] + (',"resumption_token":"%s"' % 'null') + resp[-1:] else: yield base_response[1]