def setUp(self): SeecrTestCase.setUp(self) self.observer1 = CallTrace('observer1', emptyGeneratorMethods=['message'], returnValues={ 'function': 41, 'gen': (i for i in [41]), 'noop': None }) self.observer2 = object() self.observer3 = CallTrace('observer3', emptyGeneratorMethods=['message'], returnValues={ 'function': 42, 'gen': (i for i in [42]), 'noop': None }) self.dna = be( (Observable(), (FilterMessages( disallowed=['message', 'function', 'gen', 'noop']), (self.observer1, )), (FilterMessages(allowed=['message', 'function', 'gen', 'noop']), (self.observer2, )), (FilterMessages(allowed=['message', 'function', 'gen', 'noop']), (self.observer3, ))))
def uploadHelix(lucene, termNumerator, storageComponent, drilldownFields, fieldRegistry): indexHelix = (Fields2LuceneDoc('record', fieldRegistry=fieldRegistry), (termNumerator, ), (lucene, )) return \ (SruRecordUpdate(), (TransactionScope('record'), (Venturi(should=[{'partname': 'record', 'xpath': '.'}], namespaces={'doc': 'http://meresco.org/namespace/example'}), (FilterMessages(allowed=['delete']), (lucene,), (storageComponent,) ), (FilterMessages(allowed=['add']), (Xml2Fields(), (RenameField(lambda name: name.split('.', 1)[-1]), (FilterField(lambda name: 'fieldHier' not in name), indexHelix, ), (FilterField(lambda name: name == 'intfield1'), (RenameField(lambda name: SORTED_PREFIX + name), indexHelix, ) ), (FilterField(lambda name: name in ['field2', 'field3']), (RenameField(lambda name: UNTOKENIZED_PREFIX + name), indexHelix, ) ), ) ), (FieldHier(), indexHelix, ) ), (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data'), (storageComponent,) ) ) ) )
def testWriteLogMustNotFail(self): logwriter = CallTrace('logwriter', emptyGeneratorMethods=['someMessage']) logwriter.exceptions['writeLog'] = ValueError top = be((Observable(), (LogCollector('default'), (logwriter, ), (FilterMessages(allowed=['someMessage']), (LogKeyValue(dict(name='A')), ))))) with stderr_replaced() as err: try: consume(top.all.someMessage()) except ValueError: self.fail("Should not raise an error; Only print it") self.assertTrue('ValueError' in err.getvalue(), err.getvalue())
def testScope(self): logwriter = CallTrace('logwriter', emptyGeneratorMethods=['someMessage']) top = be( (Observable(), (LogCollector('default'), (logwriter, ), (FilterMessages(allowed=['someMessage']), ( LogKeyValue(dict(name='A')), (LogKeyValue(dict(name='B')), ), ( LogCollectorScope('scope_one'), (LogKeyValue(dict(name='C')), ), ), (LogKeyValue(dict(name='D')), ), ( LogCollectorScope('scope_two'), ( LogKeyValue(dict(name='E')), ( LogCollectorScope('scope_two_one'), (LogKeyValue(dict(name='F')), ), ), ), (LogKeyValue(dict(name='G')), ), ), ))))) consume(top.all.someMessage()) self.assertEqual(['someMessage', 'writeLog'], logwriter.calledMethodNames()) self.assertEqual( { 'name': ['A', 'B', 'D'], 'scope_one': { 'name': ['C'], }, 'scope_two': { 'name': ['E', 'G'], 'scope_two_one': { 'name': ['F'] } } }, logwriter.calledMethods[-1].kwargs['collectedLog'])
def createDownloadHelix(reactor, periodicDownload, oaiDownload, dbStorageComponent): return \ (periodicDownload, # Scheduled connection to a remote (response / request)... (XmlParseLxml(fromKwarg="data", toKwarg="lxmlNode", parseOptions=dict(huge_tree=True, remove_blank_text=True)), # Convert from plain text to lxml-object. (oaiDownload, # Implementation/Protocol of a PeriodicDownload... (UpdateAdapterFromOaiDownloadProcessor(), # Maakt van een SRU update/delete bericht (lxmlNode) een relevante message: 'delete' of 'add' message. # (FilterMessages(['delete']), # Filtert delete messages # # (LogComponent("Delete msg:"),), # # Write a 'deleted' part to the storage, that holds the (Record)uploadId. # # (WriteTombstone(), # # (storageComponent,), # # ) # ), (FilterMessages(allowed=['add']), # (LogComponent("AddToNBNRES"),), (Resolver(ro=False, nsMap=NAMESPACEMAP), (dbStorageComponent,), ), # (XmlXPath(['//document:document/document:part[@name="normdoc"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), # (XmlParseLxml(fromKwarg='data', toKwarg='lxmlNode'), # (LogComponent("NORMDOC"),), #TODO: get urn:nbn and location from document. # # (RewritePartname(NL_DIDL_NORMALISED_PREFIX), # Hernoemt partname van 'record' naar "metadata". # # (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), # # (storageComponent,) # Schrijft oai:metadata (=origineel) naar storage. # # ) # # ) # ) # ) ) ) ) ) )
def createDownloadHelix(reactor, periodicDownload, oaiDownload, storageComponent, oaiJazz): return \ (periodicDownload, # Scheduled connection to a remote (response / request)... (XmlParseLxml(fromKwarg="data", toKwarg="lxmlNode", parseOptions=dict(huge_tree=True, remove_blank_text=True)), # Convert from plain text to lxml-object. (oaiDownload, # Implementation/Protocol of a PeriodicDownload... (UpdateAdapterFromOaiDownloadProcessor(), # Maakt van een SRU update/delete bericht (lxmlNode) een relevante message: 'delete' of 'add' message. (FilterMessages(['delete']), # Filtert delete messages # (LogComponent("Delete Update"),), (storageComponent,), # Delete from storage (oaiJazz,), # Delete from OAI-pmh repo # Write a 'deleted' part to the storage, that holds the (Record)uploadId. (WriteTombstone(), (storageComponent,), ) ), (FilterMessages(allowed=['add']), # (LogComponent("ADD"),), (XmlXPath(['//document:document/document:part[@name="normdoc"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), # (LogComponent("NORMDOC"),), (XmlParseLxml(fromKwarg='data', toKwarg='lxmlNode'), (RewritePartname(NL_DIDL_NORMALISED_PREFIX), # Hernoemt partname van 'record' naar "metadata". (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft oai:metadata (=origineel) naar storage. ) ) ) ), (XmlXPath(['//document:document/document:part[@name="record"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (XmlParseLxml(fromKwarg='data', toKwarg='lxmlNode'), # TODO: Check indien conversies misgaan, dat ook de meta en header part niet naar storage gaan: geen enkel part als het even kan... # Schrijf 'header' partname naar storage: (XmlXPath(['/oai:record/oai:header'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), (RewritePartname("header"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft OAI-header naar storage. ) ) ), # Schrijf 'metadata' partname naar storage: # Op gharvester21 gaat dit niet goed: Daar is het root element <metadata> in het 'metadata' part, in plaats van <DIDL>. # Liever hier een child::node(), echter gaat deze syntax mis i.c.m. XmlXPath component?? (XmlXPath(['/oai:record/oai:metadata/didl:DIDL'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # (LogComponent("METADATA_PART"),), (RewritePartname("metadata"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft metadata naar storage. ) ) ) ) ), (XmlXPath(['//document:document/document:part[@name="record"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (XmlParseLxml(fromKwarg='data', toKwarg='lxmlNode'), (NlDidlCombined(nsMap=NAMESPACEMAP, fromKwarg='lxmlNode'), # Create combined format from stored metadataPart and normalized part. (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data'), # Convert it to plaintext (RewritePartname(NL_DIDL_COMBINED_PREFIX), # Rename combined partName (storageComponent,) # Write combined partName to storage ) ) ) ) ), (XmlXPath(['//document:document/document:part[@name="meta"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (RewritePartname("meta"), (storageComponent,) # Schrijft harvester 'meta' data naar storage. ) ), (OaiAddRecord(metadataPrefixes=[('metadata', 'http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-21_schema_files/did/didmodel.xsd', 'urn:mpeg:mpeg21:2002:02-DIDL-NS'), (NL_DIDL_NORMALISED_PREFIX, '', NAMESPACEMAP.gmhnorm), (NL_DIDL_COMBINED_PREFIX, '', NAMESPACEMAP.gmhcombined)]), #[(partname, schema, namespace)] # (LogComponent("OaiAddRecord:"),), (storageComponent,), (oaiJazz,) # Assert partNames header and meta are available from storage! ), (ResurrectTombstone(), (storageComponent,), ), ), # (FilterMessages(allowed=['add']), # # (LogComponent("UnDelete"),), # (ResurrectTombstone(), # (storageComponent,), # ) # ) ) ) ) )
def main(reactor, port, statePath, **ignored): oaiSuspendRegister = SuspendRegister() oaiJazz = be((OaiJazz(join(statePath, 'oai')), (oaiSuspendRegister, ))) # WST: # strategie = HashDistributeStrategy() # filename (=partname) is also hashed: difficult to read by human eye... strategie = Md5HashDistributeStrategy() storeComponent = StorageComponent( join(statePath, 'store'), strategy=strategie, partsRemovedOnDelete=[NORMALISED_DOC_NAME]) return \ (Observable(), # (scheduledCommitPeriodicCall,), # (DebugPrompt(reactor=reactor, port=port+1, globals=locals()),), (ObservableHttpServer(reactor=reactor, port=port), (BasicHttpHandler(), (IpFilter(allowedIps=['127.0.0.1']), (PathFilter('/oaix', excluding=['/oaix/info']), (OaiPmh(repositoryName='Gateway', adminEmail='*****@*****.**', supportXWait=True, batchSize=2000 # Override default batch size of 200. ), (oaiJazz,), (oaiSuspendRegister,), (StorageAdapter(), (storeComponent,), ), ) ), (PathFilter('/oaix/info'), (OaiInfo(reactor=reactor, oaiPath='/oai'), (oaiJazz,), ) ), ), (PathFilter('/update'), (SruRecordUpdate(sendRecordData=False, logErrors=True,), (FilterMessages(allowed=['delete']), (storeComponent,), (oaiJazz,), ), (FilterMessages(allowed=['add']), # Does not work? See comments in component... # (AddMetadataFormat(fromKwarg="lxmlNode", name='md_format'), # (LogComponent("AddMetadataFormat"),), # ), (XmlXPath(['srw:recordData/*'], fromKwarg='lxmlNode'), # Stuurt IEDERE matching node in een nieuw bericht door. # (LogComponent("TO LONG CONVERTER:"),), (AddMetadataNamespace(dateformat="%Y-%m-%dT%H:%M:%SZ", fromKwarg='lxmlNode'), # Adds metadataNamespace to meta part in the message. (NormaliseOaiRecord(fromKwarg='lxmlNode'), # Normalises record to: long & original parts. Raises ValidationException if no 'known' metadataformat (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data', pretty_print=False), (RewritePartname(NORMALISED_DOC_NAME), # Rename converted part. (storeComponent,), # Store converted/renamed part. ) ) ), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[NORMALISED_DOC_NAME]), (oaiJazz,), ) ) ) ) ) ) ) ) )
def main(reactor, port, statePath, lucenePort, gatewayPort, quickCommit=False, **ignored): ######## START Lucene Integration ############################################################### defaultLuceneSettings = LuceneSettings( commitTimeout=30, readonly=True, ) http11Request = be(( HttpRequest1_1(), (SocketPool(reactor=reactor, unusedTimeout=5, limits=dict(totalSize=100, destinationSize=10)), ), )) luceneIndex = luceneAndReaderConfig( defaultLuceneSettings.clone(readonly=True), http11Request, lucenePort) luceneRoHelix = be( (AdapterToLuceneQuery(defaultCore=DEFAULT_CORE, coreConverters={ DEFAULT_CORE: QueryExpressionToLuceneQueryDict( UNQUALIFIED_TERM_FIELDS, luceneSettings=luceneIndex.settings), }), ( MultiLucene(host='localhost', port=lucenePort, defaultCore=DEFAULT_CORE), (luceneIndex, ), (http11Request, ), ))) ######## END Lucene Integration ############################################################### fieldnameRewrites = { # UNTOKENIZED_PREFIX+'genre': UNTOKENIZED_PREFIX+'dc:genre', } def fieldnameRewrite(name): return fieldnameRewrites.get(name, name) def drilldownFieldnamesTranslate(fieldname): untokenizedName = untokenizedFieldname(fieldname) if untokenizedName in untokenizedFieldnames: fieldname = untokenizedName return fieldnameRewrite(fieldname) convertToComposedQuery = ConvertToComposedQuery( resultsFrom=DEFAULT_CORE, matches=[], drilldownFieldnamesTranslate=drilldownFieldnamesTranslate) strategie = Md5HashDistributeStrategy() storage = StorageComponent(join(statePath, 'store'), strategy=strategie, partsRemovedOnDelete=[ HEADER_PARTNAME, META_PARTNAME, METADATA_PARTNAME, OAI_DC_PARTNAME, LONG_PARTNAME, SHORT_PARTNAME, OPENAIRE_PARTNAME ]) oaiJazz = OaiJazz(join(statePath, 'oai')) oaiJazz.updateMetadataFormat( OAI_DC_PARTNAME, "http://www.openarchives.org/OAI/2.0/oai_dc.xsd", "http://purl.org/dc/elements/1.1/") oai_oa_cerifJazz = OaiJazz(join(statePath, 'oai_cerif')) oai_oa_cerifJazz.updateMetadataFormat( OPENAIRE_PARTNAME, "https://www.openaire.eu/schema/cris/current/openaire-cerif-profile.xsd", "https://www.openaire.eu/cerif-profile/1.1/") # All of the following OAI-PMH sets shall be recognized by the CRIS, even if not all of them are populated. oai_oa_cerifJazz.updateSet("openaire_cris_projects", "OpenAIRE_CRIS_projects") oai_oa_cerifJazz.updateSet("openaire_cris_orgunits", "OpenAIRE_CRIS_orgunits") oai_oa_cerifJazz.updateSet("openaire_cris_persons", "OpenAIRE_CRIS_persons") oai_oa_cerifJazz.updateSet("openaire_cris_patents", "OpenAIRE_CRIS_patents") oai_oa_cerifJazz.updateSet("openaire_cris_products", "OpenAIRE_CRIS_products") oai_oa_cerifJazz.updateSet("openaire_cris_publications", "OpenAIRE_CRIS_publications") oai_oa_cerifJazz.updateSet("openaire_cris_funding", "OpenAIRE_CRIS_funding") oai_oa_cerifJazz.updateSet("openaire_cris_events", "OpenAIRE_CRIS_events") oai_oa_cerifJazz.updateSet("openaire_cris_equipments", "OpenAIRE_CRIS_equipments") cqlClauseConverters = [ RenameFieldForExact( untokenizedFields=untokenizedFieldnames, untokenizedPrefix=UNTOKENIZED_PREFIX, ).filterAndModifier(), SearchTermFilterAndModifier( shouldModifyFieldValue=lambda *args: True, fieldnameModifier=fieldnameRewrite).filterAndModifier(), ] periodicGateWayDownload = PeriodicDownload( reactor, host='localhost', port=gatewayPort, schedule=Schedule( period=1 if quickCommit else 10 ), # WST: Interval in seconds before sending a new request to the GATEWAY in case of an error while processing batch records.(default=1). IntegrationTests need 1 second! Otherwise tests will fail! name='api', autoStart=True) oaiDownload = OaiDownloadProcessor(path='/oaix', metadataPrefix=NORMALISED_DOC_NAME, workingDirectory=join( statePath, 'harvesterstate', 'gateway'), userAgentAddition='ApiServer', xWait=True, name='api', autoCommit=False) executeQueryHelix = \ (FilterMessages(allowed=['executeQuery']), (CqlMultiSearchClauseConversion(cqlClauseConverters, fromKwarg='query'), (DrilldownQueries(), (convertToComposedQuery, (luceneRoHelix,), ) ) ) ) return \ (Observable(), createDownloadHelix(reactor, periodicGateWayDownload, oaiDownload, storage, oaiJazz, oai_oa_cerifJazz), (ObservableHttpServer(reactor, port, compressResponse=True), (BasicHttpHandler(), (PathFilter(["/oai"]), (OaiPmh(repositoryName="NARCIS OAI-pmh", adminEmail="*****@*****.**", externalUrl="http://oai.narcis.nl"), (oaiJazz,), (StorageAdapter(), (storage,) ), (OaiBranding( url="http://www.narcis.nl/images/logos/logo-knaw-house.gif", link="http://oai.narcis.nl", title="Narcis - The gateway to scholarly information in The Netherlands"), ), (OaiProvenance( nsMap=NAMESPACEMAP, baseURL=('meta', '//meta:repository/meta:baseurl/text()'), harvestDate=('meta', '//meta:record/meta:harvestdate/text()'), metadataNamespace=('meta', '//meta:record/meta:metadataNamespace/text()'), identifier=('header','//oai:identifier/text()'), datestamp=('header', '//oai:datestamp/text()') ), (storage,) ) ) ), (PathFilter(["/cerif"]), (OaiPmhDans(repositoryName="OpenAIRE CERIF", adminEmail="*****@*****.**", repositoryIdentifier="services.nod.dans.knaw.nl", externalUrl="http://services.nod.dans.knaw.nl"), #TODO: pathFilter should resemble proxy path (oai_oa_cerifJazz,), (StorageAdapter(), (storage,) ), (OaiOpenAIREDescription( serviceid='organisation:ORG1242054', acronym='services.nod.dans.knaw.nl', name='NARCIS', description='Compliant with the OpenAIRE Guidelines for CRIS Managers v.1.1.', website='https://www.narcis.nl', baseurl='http://services.nod.dans.knaw.nl/oa-cerif', subjectheading='', orgunitid='organisation:ORG1242054', owneracronym='DANS'), ), # (OaiBranding( # url="http://www.narcis.nl/images/logos/logo-knaw-house.gif", # link="http://oai.narcis.nl", # title="Narcis - The gateway to scholarly information in The Netherlands"), # ), (OaiProvenance( nsMap=NAMESPACEMAP, baseURL=('meta', '//meta:repository/meta:baseurl/text()'), harvestDate=('meta', '//meta:record/meta:harvestdate/text()'), metadataNamespace=('meta', '//meta:record/meta:metadataNamespace/text()'), identifier=('header','//oai:identifier/text()'), datestamp=('header', '//oai:datestamp/text()') ), (storage,) ) ) ), (PathFilter(['/sru']), (SruParser( host='sru.narcis.nl', port=80, defaultRecordSchema='knaw_short', defaultRecordPacking='xml'), (SruLimitStartRecord(limitBeyond=4000), (SruHandler( includeQueryTimes=False, extraXParameters=[], enableCollectLog=False), (SruTermDrilldown(),), executeQueryHelix, (StorageAdapter(), (storage,) ) ) ) ) ), (PathFilter('/rss'), (Rss( supportedLanguages = ['nl','en'], # defaults to first, if requested language is not available or supplied. title = {'nl':'NARCIS', 'en':'NARCIS'}, description = {'nl':'NARCIS: De toegang tot de Nederlandse wetenschapsinformatie', 'en':'NARCIS: The gateway to Dutch scientific information'}, link = {'nl':'http://www.narcis.nl/?Language=nl', 'en':'http://www.narcis.nl/?Language=en'}, maximumRecords = 20), executeQueryHelix, (RssItem( nsMap=NAMESPACEMAP, title = ('knaw_short', {'nl':'//short:metadata/short:titleInfo[not (@xml:lang)]/short:title/text()', 'en':'//short:metadata/short:titleInfo[@xml:lang="en"]/short:title/text()'}), description = ('knaw_short', {'nl':'//short:abstract[not (@xml:lang)]/text()', 'en':'//short:abstract[@xml:lang="en"]/text()'}), pubdate = ('knaw_short', '//short:dateIssued/short:parsed/text()'), linkTemplate = 'http://www.narcis.nl/%(wcpcollection)s/RecordID/%(oai_identifier)s/Language/%(language)s', wcpcollection = ('meta', '//*[local-name() = "collection"]/text()'), oai_identifier = ('meta', '//meta:record/meta:id/text()'), language = ('Dummy: Language is auto provided by the calling RSS component, but needs to be present to serve the linkTemplate.') ), (StorageAdapter(), (storage,) ) ) ) ) ) ) )
def createDownloadHelix(reactor, periodicDownload, oaiDownload, storageComponent, oaiJazz, oai_oa_cerifJazz): return \ (periodicDownload, # Scheduled connection to a remote (response / request)... (XmlParseLxml(fromKwarg="data", toKwarg="lxmlNode", parseOptions=dict(huge_tree=True, remove_blank_text=True)), # Convert from plain text to lxml-object. (oaiDownload, # Implementation/Protocol of a PeriodicDownload... (UpdateAdapterFromOaiDownloadProcessor(), # Maakt van een SRU update/delete bericht (lxmlNode) een relevante message: 'delete' of 'add' message. (FilterMessages(['delete']), # Filtert delete messages # (LogComponent("Delete Update"),), (storageComponent,), # Delete from storage (oaiJazz,), # Delete from OAI-pmh repo (oai_oa_cerifJazz,), # Write a 'deleted' part to the storage, that holds the (Record)uploadId. (WriteTombstone(), (storageComponent,), ) ), (FilterMessages(allowed=['add']), (XmlXPath(['/oai:record/oai:metadata/document:document/document:part[@name="record"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (XmlParseLxml(fromKwarg='data', toKwarg='lxmlNode'), (FilterWcpCollection(allowed=['research']), (XmlXPath(['/oai:record/oai:metadata/norm:md_original/child::*'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Origineel 'metadata' formaat (XsltCrosswalk([join(dirname(abspath(__file__)), '..', '..', 'xslt', 'cerif-project.xsl')], fromKwarg="lxmlNode"), (RewritePartname(OPENAIRE_PARTNAME), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) ) ) ) ) ), (FilterWcpCollection(allowed=['person']), (XmlXPath(['/oai:record/oai:metadata/norm:md_original/child::*'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Origineel 'metadata' formaat (XsltCrosswalk([join(dirname(abspath(__file__)), '..', '..', 'xslt', 'cerif-person.xsl')], fromKwarg="lxmlNode"), (RewritePartname(OPENAIRE_PARTNAME), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) ) ) ) ) ), (FilterWcpCollection(allowed=['organisation']), (XmlXPath(['/oai:record/oai:metadata/norm:md_original/child::*'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Origineel 'metadata' formaat (XsltCrosswalk([join(dirname(abspath(__file__)), '..', '..', 'xslt', 'cerif-orgunit.xsl')], fromKwarg="lxmlNode"), (RewritePartname(OPENAIRE_PARTNAME), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) ) ) ) ) ), (FilterWcpCollection(allowed=['dataset']), # START CERIF CONVERSION FOR DATASET COLLECTION: cerif-dataset / cerif-software. (XmlXPath(['/oai:record/oai:metadata/norm:normalized/long:knaw_long'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Genormaliseerd 'long' formaat. (XsltCrosswalk([join(dirname(abspath(__file__)), '..', '..', 'xslt', 'cerif-product.xsl')], fromKwarg="lxmlNode"), (RewritePartname(OPENAIRE_PARTNAME), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) ) ) ) ) ), (FilterWcpCollection(allowed=['publication']), # START CERIF CONVERSION FOR PUBLICATIONS COLLECTION (XmlXPath(['/oai:record/oai:metadata/norm:normalized/long:knaw_long'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Genormaliseerd 'long' formaat. (FilterKnawLongGenre(allowed=['patent']), # START PATENTS CONVERSION (XsltCrosswalk([join(dirname(abspath(__file__)), '..', '..', 'xslt', 'cerif-patent.xsl')], fromKwarg="lxmlNode"), (RewritePartname(OPENAIRE_PARTNAME), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) ) ) ) ), (FilterKnawLongGenre(disallowed=['patent']), # START Publication CONVERSION (XsltCrosswalk([join(dirname(abspath(__file__)), '..', '..', 'xslt', 'cerif-publication.xsl')], fromKwarg="lxmlNode"), (RewritePartname(OPENAIRE_PARTNAME), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) ) ) ) ) ) ), (XmlXPath(['/oai:record/oai:metadata/norm:md_original/child::*'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Origineel 'metadata' formaat (RewritePartname("metadata"), # Hernoemt partname van 'record' naar "metadata". (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) # Schrijft oai:metadata (=origineel) naar storage. ) ) ), (XmlXPath(['/oai:record/oai:metadata/norm:normalized/long:knaw_long'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Genormaliseerd 'long' formaat. (RewritePartname("knaw_long"), # Hernoemt partname van 'record' naar "knaw_long". (FilterWcpCollection(disallowed=['person', 'research', 'organisation']), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,), # Schrijft 'long' (=norm:normdoc) naar storage. ) ), (ShortConverter(fromKwarg='lxmlNode'), # creeer 'knaw_short' subset formaat. (RewritePartname("knaw_short"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft 'short' naar storage. ) ) ), (FilterWcpCollection(disallowed=['person', 'research', 'organisation']), (DcConverter(fromKwarg='lxmlNode'), # Hernoem partname van 'record' naar "oai_dc". (RewritePartname("oai_dc"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft 'oai_dc' naar storage. ) ) ) ) ) ), # TODO: Check indien conversies misgaan, dat ook de meta en header part niet naar storage gaan: geen 1 part als het even kan... # Schrijf 'header' partname naar storage: (XmlXPath(['/oai:record/oai:header'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), (RewritePartname("header"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) # Schrijft OAI-header naar storage. ) ) ), (FilterWcpCollection(allowed=['publication']), # (LogComponent("PUBLICATION"),), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['publication']), (oaiJazz,), ), (XmlXPath(["//long:knaw_long[long:accessRights ='openAccess']"], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['oa_publication', 'openaire']), (oaiJazz,), ) ), (XmlXPath(["//long:knaw_long/long:metadata[long:genre ='doctoralthesis']"], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['thesis']), (oaiJazz,), ) ), (XmlXPath(['//long:knaw_long/long:metadata/long:grantAgreements/long:grantAgreement[long:code[contains(.,"greement/EC/") or contains(.,"greement/ec/")]][1]'], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['ec_fundedresources', 'openaire']), (oaiJazz,), ) ), (XmlXPath(["//long:knaw_long/long:metadata[long:genre ='patent']"], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[OPENAIRE_PARTNAME], setSpecs=["openaire_cris_patents"]), (oai_oa_cerifJazz,), ) ), (XmlXPath(["//long:knaw_long/long:metadata[long:genre !='patent']"], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[OPENAIRE_PARTNAME], setSpecs=["openaire_cris_publications"]), (oai_oa_cerifJazz,), ) ), ), (FilterWcpCollection(allowed=['dataset']), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['dataset']), (oaiJazz,), ), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[OPENAIRE_PARTNAME], setSpecs=["openaire_cris_products"]), (oai_oa_cerifJazz,), ) ), # Add NOD OpenAIRE Cerif to OpenAIRE-PMH repo. (FilterWcpCollection(allowed=['research']), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[OPENAIRE_PARTNAME], setSpecs=["openaire_cris_projects"]), (oai_oa_cerifJazz,), ) ), (FilterWcpCollection(allowed=['person']), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[OPENAIRE_PARTNAME], setSpecs=['openaire_cris_persons']), (oai_oa_cerifJazz,), ) ), (FilterWcpCollection(allowed=['organisation']), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[OPENAIRE_PARTNAME], setSpecs=['openaire_cris_orgunits']), (oai_oa_cerifJazz,), ) ) ) ), # Schrijf 'meta' partname naar storage: (XmlXPath(['/oai:record/oai:metadata/document:document/document:part[@name="meta"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (RewritePartname("meta"), (storageComponent,) # Schrijft harvester 'meta' data naar storage. ) ) ), (FilterMessages(allowed=['add']), # (LogComponent("UnDelete"),), (ResurrectTombstone(), (storageComponent,), ) ) ) ) ) )
def createUploadHelix(storageComponent, oaiJazz, loggerComponent): return \ (TransactionScope('batch'), (TransactionScope('record'), (Venturi( should=[ # Order DOES matter: First part goes first! {'partname':'header', 'xpath':'/document:document/document:part[@name="header"]/text()', 'asString':False}, {'partname':'meta', 'xpath':'/document:document/document:part[@name="meta"]/text()', 'asString':False}, {'partname':'metadata', 'xpath':'/document:document/document:part[@name="metadata"]/text()', 'asString':False} ], namespaceMap=namespacesMap), # Remove all delete msgs from storage and OAI: (FilterMessages(allowed=['delete']), #(DNADebug(enabled=False, prefix='DELETE'), (storageComponent,), (oaiJazz,) #) ), (FilterMessages(allowed=['add']), ## Write harvestdate (=now()) to meta part (OAI provenance) (FilterPartByName(included=['meta']), (AddHarvestDateToMetaPart(verbose=False),) ), # Store ALL (original)parts retrieved by Venturi (required ('should') and optional ('could') parts). # Write all uploadParts to storage (header, meta & metadata) (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data'), (storageComponent,) ), (FilterPartByName(included=['metadata']), # Normalize 'metadata' part: #(DNADebug(enabled=False, prefix='add metadata'), # Validate DIDL and MODS part against their xsd-schema: (Validate([('DIDL container','//didl:DIDL', 'didl.xsd'), ('MODS metadata', '//mods:mods', 'mods-3-6.xsd')], nsMap=namespacesMap), (Normalize_nl_DIDL(nsMap=namespacesMap), # Normalize DIDL in metadataPart (loggerComponent,), (Normalize_nl_MODS(nsMap=namespacesMap), # Normalize MODS in metadataPart. (loggerComponent,), (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data'), # Convert it from etree.ElementTree to plaintext (RewritePartname(NL_DIDL_NORMALISED_PREFIX), # Rename normalized partName from 'metadata' to 'nl_didl_norm' #(DNADebug(enabled=False, prefix='to storage'), (storageComponent,) # Write normalized partName to storage #) ) ), # Create and store Combined format: (NL_DIDL_combined(nsMap=namespacesMap), # Create combined format from stored metadataPart and normalized part. (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data'), # Convert it to plaintext (RewritePartname(NL_DIDL_COMBINED_PREFIX), # Rename combined partName (storageComponent,) # Write combined partName to storage ) ) ), # Add parts to OAI repository/index #(DNADebug(enabled=False, prefix='ADD2OAI'), (OaiAddRecordWithDefaults(metadataFormats=[('metadata', 'http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-21_schema_files/did/didmodel.xsd', 'urn:mpeg:mpeg21:2002:02-DIDL-NS'), (NL_DIDL_NORMALISED_PREFIX, '', 'http://gh.kb-dans.nl/normalised/v0.9/'), (NL_DIDL_COMBINED_PREFIX, '', 'http://gh.kb-dans.nl/combined/v0.9/')]), (storageComponent,), (oaiJazz,) # Assert partNames header and meta are available from storage! ) #! OaiAddRecord #) #!Debug ) ) ) #) #Debug ) #!FilterPartNames(allowed=['metadata'] ) # !FilterMessages(allowed=['add'] ) # !venturi ) # !record ) # !batch
def createDownloadHelix(reactor, periodicDownload, oaiDownload, storageComponent, oaiJazz): return \ (periodicDownload, # Scheduled connection to a remote (response / request)... (XmlParseLxml(fromKwarg="data", toKwarg="lxmlNode", parseOptions=dict(huge_tree=True, remove_blank_text=True)), # Convert from plain text to lxml-object. (oaiDownload, # Implementation/Protocol of a PeriodicDownload... (UpdateAdapterFromOaiDownloadProcessor(), # Maakt van een SRU update/delete bericht (lxmlNode) een relevante message: 'delete' of 'add' message. (FilterMessages(['delete']), # Filtert delete messages # (LogComponent("Delete Update"),), (storageComponent,), # Delete from storage (oaiJazz,), # Delete from OAI-pmh repo # Write a 'deleted' part to the storage, that holds the (Record)uploadId. (WriteTombstone(), (storageComponent,), ) ), (FilterMessages(allowed=['add']), # TODO: onderstaande toKwarg='data' kan eruit. Dan de volgende regel ook:-) (XmlXPath(['/oai:record/oai:metadata/document:document/document:part[@name="record"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (XmlParseLxml(fromKwarg='data', toKwarg='lxmlNode'), (XmlXPath(['/oai:record/oai:metadata/norm:md_original/child::*'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Origineel 'metadata' formaat (RewritePartname("metadata"), # Hernoemt partname van 'record' naar "metadata". (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) # Schrijft oai:metadata (=origineel) naar storage. ) ) ), (XmlXPath(['/oai:record/oai:metadata/norm:normalized/long:knaw_long'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), # Genormaliseerd 'long' formaat. (RewritePartname("knaw_long"), # Hernoemt partname van 'record' naar "knaw_long". (FilterWcpCollection(disallowed=['person', 'research', "organisation"]), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,), # Schrijft 'long' (=norm:normdoc) naar storage. ) ), (ShortConverter(fromKwarg='lxmlNode'), # creeer 'knaw_short' subset formaat. (RewritePartname("knaw_short"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft 'short' naar storage. ) ) ), (FilterWcpCollection(disallowed=['person', 'research', "organisation"]), (DcConverter(fromKwarg='lxmlNode'), # Hernoem partname van 'record' naar "oai_dc". (RewritePartname("oai_dc"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=True), (storageComponent,) # Schrijft 'oai_dc' naar storage. ) ) ) ) ) ), # TODO: Check indien conversies misgaan, dat ook de meta en header part niet naar storage gaan: geen 1 part als het even kan... # Schrijf 'header' partname naar storage: (XmlXPath(['/oai:record/oai:header'], fromKwarg='lxmlNode', namespaces=NAMESPACEMAP), (RewritePartname("header"), (XmlPrintLxml(fromKwarg="lxmlNode", toKwarg="data", pretty_print=False), (storageComponent,) # Schrijft OAI-header naar storage. ) ) ), (FilterWcpCollection(allowed=['publication']), # (LogComponent("PUBLICATION"),), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['publication'], name='NARCISPORTAL'), #TODO: Skip name='NARCISPORTAL' (oaiJazz,), ), (XmlXPath(["//long:knaw_long[long:accessRights ='openAccess']"], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), # (LogComponent("OPENACCESS"),), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['oa_publication', 'openaire'], name='NARCISPORTAL'), (oaiJazz,), ) ), (XmlXPath(["//long:knaw_long/long:metadata[long:genre ='doctoralthesis']"], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['thesis'], name='NARCISPORTAL'), (oaiJazz,), ) ), (XmlXPath(['//long:knaw_long/long:metadata/long:grantAgreements/long:grantAgreement[long:code[contains(.,"greement/EC/") or contains(.,"greement/ec/")]][1]'], fromKwarg='lxmlNode', namespaceMap=NAMESPACEMAP), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['ec_fundedresources', 'openaire'], name='NARCISPORTAL'), (oaiJazz,), ) ) ), (FilterWcpCollection(allowed=['dataset']), # (LogComponent("DATASET"),), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=["oai_dc"], setSpecs=['dataset'], name='NARCISPORTAL'), (oaiJazz,), ) ) ) ), # Schrijf 'meta' partname naar storage: (XmlXPath(['/oai:record/oai:metadata/document:document/document:part[@name="meta"]/text()'], fromKwarg='lxmlNode', toKwarg='data', namespaces=NAMESPACEMAP), (RewritePartname("meta"), (storageComponent,) # Schrijft harvester 'meta' data naar storage. ) ) ), (FilterMessages(allowed=['add']), # TODO: Remove this line. # (LogComponent("UnDelete"),), (ResurrectTombstone(), (storageComponent,), ) ) ) ) ) )
def main(reactor, port, statePath, **ignored): oaiSuspendRegister = SuspendRegister() oaiJazz = be((OaiJazz(join(statePath, 'oai'), alwaysDeleteInPrefixes=[NORMALISED_DOC_NAME]), (oaiSuspendRegister, ))) normLogger = Logger(join(statePath, 'normlogger')) # strategie = HashDistributeStrategy() # filename (=partname) is also hashed: difficult to read by human eye... strategie = Md5HashDistributeStrategy() storeComponent = StorageComponent( join(statePath, 'store'), strategy=strategie, partsRemovedOnDelete=[NORMALISED_DOC_NAME]) return \ (Observable(), # (scheduledCommitPeriodicCall,), # (DebugPrompt(reactor=reactor, port=port+1, globals=locals()),), (ObservableHttpServer(reactor=reactor, port=port), (BasicHttpHandler(), (IpFilter(allowedIps=['127.0.0.1']), (PathFilter('/oaix', excluding=['/oaix/info']), (OaiPmh(repositoryName='Gateway', adminEmail='*****@*****.**', supportXWait=True, batchSize=2000 # Override default batch size of 200. ), (oaiJazz,), (oaiSuspendRegister,), (StorageAdapter(), (storeComponent,), ), ) ), (PathFilter('/oaix/info'), (OaiInfo(reactor=reactor, oaiPath='/oai'), (oaiJazz,), ) ), ), (PathFilter('/update'), (SruRecordUpdate(sendRecordData=False, logErrors=True,), (FilterMessages(allowed=['delete']), (storeComponent,), (oaiJazz,), ), (FilterMessages(allowed=['add']), # (LogComponent("LXML:"),), (Validate([('DIDL container','//didl:DIDL', 'didl.xsd'), ('MODS metadata', '//mods:mods', 'mods-3-6.xsd')]), # (LogComponent("VALIDATED:"),), (AddMetadataDocumentPart(partName='normdoc', fromKwarg='lxmlNode'), (NormaliseDIDL(nsMap=namespacesMap, fromKwarg='lxmlNode'), # Normalise DIDL in partname=normdoc metadata (normLogger,), (NormaliseMODS(nsMap=namespacesMap, fromKwarg='lxmlNode'), # Normalise MODS in partname=normdoc metadata (normLogger,), (XmlPrintLxml(fromKwarg='lxmlNode', toKwarg='data'), (RewritePartname(NORMALISED_DOC_NAME), # Rename converted part. (storeComponent,), # Store converted/renamed part. ) ), (OaiAddDeleteRecordWithPrefixesAndSetSpecs(metadataPrefixes=[NORMALISED_DOC_NAME]), (oaiJazz,), ) ) ) ) ) ) ) ) ) ) )
def testEitherAllowedOrDisallowed(self): self.assertRaises( ValueError, lambda: FilterMessages(allowed=['either'], disallowed=['or']))
def writerMain(writerReactor, statePath, luceneserverPort, gatewayPort, quickCommit=False): http11Request = be( (HttpRequest1_1(), (SocketPool(reactor=writerReactor, unusedTimeout=5, limits=dict(totalSize=100, destinationSize=10)),), ) ) indexCommitTimeout = 30 defaultLuceneSettings = LuceneSettings( commitTimeout=indexCommitTimeout, readonly=False, ) luceneWriter = luceneAndReaderConfig(defaultLuceneSettings, http11Request, luceneserverPort) periodicDownload = PeriodicDownload( writerReactor, host='localhost', port=gatewayPort, schedule=Schedule(period=1 if quickCommit else 10), # WST: Interval in seconds before sending a new request to the GATEWAY in case of an error while processing batch records.(default=1). IntegrationTests need 1 second! Otherwise tests will fail! name='index', autoStart=True) oaiDownload = OaiDownloadProcessor( path='/oaix', metadataPrefix=NORMALISED_DOC_NAME, workingDirectory=join(statePath, 'harvesterstate', 'gateway'), userAgentAddition='idx-server', xWait=True, name='index', autoCommit=False) # Post commit naar Lucene(server): scheduledCommitPeriodicCall = be( (PeriodicCall(writerReactor, message='commit', name='Scheduled commit', schedule=Schedule(period=1 if quickCommit else 300), initialSchedule=Schedule(period=1)), # WST: Flushes data from memory to disk. IntegrationTests need 1 second! Otherwise tests will fail! (API). (AllToDo(), # broadcast message to all components, despite of what kind of message... # (periodicDownload,), # WST: periodicDownload does not do anything with a 'commit' message? So why send it to it??? (LuceneCommit(host='localhost', port=luceneserverPort,), # 'commit' message results in http post to /commit/ to Lucene server: # (LogComponent("PERIODIC"),#), # [PERIODIC] httprequest1_1(*(), **{'body': None, 'host': 'localhost', 'request': '/commit/', 'port': 52501, 'method': 'POST'}) (http11Request,), # ), ) ) ) ) writerServer = \ (Observable(), (scheduledCommitPeriodicCall,), # Stuur periodiek een 'Commit' naar de LuceneServer... # (DebugPrompt(reactor=writerReactor, port=readerPort-1, globals=locals()),), (periodicDownload, # Ga/connect (periodiek) naar de Gateway-server... (XmlParseLxml(fromKwarg="data", toKwarg="lxmlNode", parseOptions=dict(huge_tree=True, remove_blank_text=True)), (oaiDownload, # Haal OAI spulletjes van de Gateway... (UpdateAdapterFromOaiDownloadProcessor(), # Maakt van een SRU update/delete bericht (lxmlNode) een relevante message: 'delete' of 'add' message. # (LogComponent("SRU harvest van GATEWAY"),), #[SRU harvest van GATEWAY] add(*(), **{'partname': 'record', 'identifier': 'meresco:record:1', 'lxmlNode': '_ElementTree(<record xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><header><identifier>meresco:record:1</identifier><datestamp>2016-07-13T15:31:10Z</datestamp></header><metadata><document xmlns="http://meres (FilterMessages(allowed=['add']), (XmlXPath(['/oai:record/oai:metadata/document:document'], fromKwarg='lxmlNode'), # (LogComponent("NormdocToFieldsList"),), (NormdocToFieldsList(), # Platte lijst met veldnamen en waardes... (RecordPidToAuthNid(),), # (LogComponent("NormdocToFieldsList"),), # [DcToFieldsList] add(*(), **{'fieldslist': [('dc:identifier', 'http://meresco.com?record=1'), ('dc:description', 'This is an example program about Search with Meresco'), ('dc:title', 'Example Program 1'), ('dc:creator', 'Seecr'), ('dc:publisher', 'Seecr'), ('dc:date', '2016'), ('dc:type', 'Example'), ('dc:subject', 'Search'), ('dc:language', 'en'), ('dc:rights', 'Open Source')], 'partname': 'record', 'identifier': 'meresco:record:1'}) # [NormdocToFieldsList] lookupNameIds(*(set(['doi:10.1002/lno.10611', 'wos:000423029300003']),), **{}) (FieldsListToLuceneDocument( # Maakt addDocument messege + creeert de facet/drilldown velden waarvan de value's tot max. 256 chars getruncated worden. fieldRegistry=luceneWriter.settings.fieldRegistry, # o.a. drilldownfields definitie untokenizedFieldnames=untokenizedFieldnames, # untokenized fields indexFieldFactory=DcFields, # Creeert een "__all__", veldnaam en optioneel "untokenized.veldnaam"... #rewriteIdentifier=(lambda idee: idee.split(':', 1)[-1]) # meresco:record:1' => 'record:1' ), # (LogComponent("FieldsListToLuceneDocument"),), # [LUCENE_WRITER] addDocument(*(), **{'fields': [{'type': 'TextField', 'name': '__all__', 'value': 'http://meresco.com?record=1'}, {'type': 'TextField', 'name': 'dc:identifier', 'value': 'http://meresco.com?record=1'}, {'type': 'StringField', 'name': 'untokenized.dc:identifier', 'value': 'http://meresco.com?record=1'}, {'type': 'TextField', 'name': '__all__', 'value': 'This is an example program about Search with Meresco'}, {'type': 'TextField', 'name': 'dc:description', 'value': 'This is an example program about Search with Meresco'}, {'type': 'TextField', 'name': '__all__', 'value': 'Example Program 1'}, {'type': 'TextField', 'name': 'dc:title', 'value': 'Example Program 1'}, {'type': 'TextField', 'name': '__all__', 'value': 'Seecr'}, {'type': 'TextField', 'name': 'dc:creator', 'value': 'Seecr'}, {'type': 'TextField', 'name': '__all__', 'value': 'Seecr'}, {'type': 'TextField', 'name': 'dc:publisher', 'value': 'Seecr'}, {'type': 'TextField', 'name': '__all__', 'value': '2016'}, {'type': 'TextField', 'name': 'dc:date', 'value': '2016'}, {'path': ['2016'], 'type': 'FacetField', 'name': 'untokenized.dc:date'}, {'type': 'TextField', 'name': '__all__', 'value': 'Example'}, {'type': 'TextField', 'name': 'dc:type', 'value': 'Example'}, {'type': 'TextField', 'name': '__all__', 'value': 'Search'}, {'type': 'TextField', 'name': 'dc:subject', 'value': 'Search'}, {'path': ['Search'], 'type': 'FacetField', 'name': 'untokenized.dc:subject'}, {'type': 'TextField', 'name': '__all__', 'value': 'en'}, {'type': 'TextField', 'name': 'dc:language', 'value': 'en'}, {'type': 'TextField', 'name': '__all__', 'value': 'Open Source'}, {'type': 'TextField', 'name': 'dc:rights', 'value': 'Open Source'}], 'identifier': 'meresco:record:1'}) # [####LUCENE_WRITER] addDocument(*(), **{'fields': [{'type': 'TextField', 'name': '__all__', 'value': 'knaw'}, {'type': 'TextField', 'name': 'meta:id', 'value': 'knaw'}, {'type': 'TextField', 'name': '__all__', 'value': 'olddata'}, {'type': 'TextField', 'name': 'meta:set', 'value': 'olddata'}, {'type': 'TextField', 'name': '__all__', 'value': 'http://oai.knaw.nl/oai'}, {'type': 'TextField', 'name': 'meta:baseurl', 'value': 'http://oai.knaw.nl/oai'}, {'type': 'TextField', 'name': '__all__', 'value': 'knaw'}, {'type': 'TextField', 'name': 'meta:repositoryGroupId', 'value': 'knaw'}, {'type': 'TextField', 'name': '__all__', 'value': 'nl_didl'}, {'type': 'TextField', 'name': 'meta:metadataPrefix', 'value': 'nl_didl'}, {'type': 'TextField', 'name': '__all__', 'value': 'publication'}, {'type': 'TextField', 'name': 'meta_collection', 'value': 'publication'}, {'path': ['publication'], 'type': 'FacetField', 'name': 'untokenized.meta_collection'}], 'identifier': 'knaw:record:3'}) (luceneWriter,), # ), ) ) # ) # ) ) ), (FilterMessages(allowed=['delete']), (luceneWriter,), ) ) ) ) ) ) return writerServer
def main(reactor, port, statePath, lucenePort, **ignored): ######## START Lucene Integration ############################################################### defaultLuceneSettings = LuceneSettings( commitTimeout=30, readonly=True,) http11Request = be( (HttpRequest1_1(), (SocketPool(reactor=reactor, unusedTimeout=5, limits=dict(totalSize=100, destinationSize=10)),), ) ) luceneIndex = luceneAndReaderConfig(defaultLuceneSettings.clone(readonly=True), http11Request, lucenePort) luceneRoHelix = be( (AdapterToLuceneQuery( defaultCore=DEFAULT_CORE, coreConverters={ DEFAULT_CORE: QueryExpressionToLuceneQueryDict(UNQUALIFIED_TERM_FIELDS, luceneSettings=luceneIndex.settings), } ), (MultiLucene(host='127.0.0.1', port=lucenePort, defaultCore=DEFAULT_CORE), (luceneIndex,), (http11Request,), ) ) ) ######## END Lucene Integration ############################################################### fieldnameRewrites = {} def fieldnameRewrite(name): return fieldnameRewrites.get(name, name) def drilldownFieldnamesTranslate(fieldname): untokenizedName = untokenizedFieldname(fieldname) if untokenizedName in untokenizedFieldnames: fieldname = untokenizedName return fieldnameRewrite(fieldname) convertToComposedQuery = ConvertToComposedQuery( resultsFrom=DEFAULT_CORE, matches=[], drilldownFieldnamesTranslate=drilldownFieldnamesTranslate ) strategie = Md5HashDistributeStrategy() storage = StorageComponent(join(statePath, 'store'), strategy=strategie, partsRemovedOnDelete=[HEADER_PARTNAME, META_PARTNAME, METADATA_PARTNAME, OAI_DC_PARTNAME, LONG_PARTNAME, SHORT_PARTNAME]) # Wat doet dit? cqlClauseConverters = [ RenameFieldForExact( untokenizedFields=untokenizedFieldnames, untokenizedPrefix=UNTOKENIZED_PREFIX, ).filterAndModifier(), SearchTermFilterAndModifier( shouldModifyFieldValue=lambda *args: True, fieldnameModifier=fieldnameRewrite ).filterAndModifier(), ] executeQueryHelix = \ (FilterMessages(allowed=['executeQuery']), (CqlMultiSearchClauseConversion(cqlClauseConverters, fromKwarg='query'), (DrilldownQueries(), (convertToComposedQuery, (luceneRoHelix,), ) ) ) ) return \ (Observable(), (ObservableHttpServer(reactor, port, compressResponse=True), (BasicHttpHandler(), (PathFilter(['/sru']), (SruParser( host='sru.narcis.nl', port=80, defaultRecordSchema='knaw_short', defaultRecordPacking='xml'), (SruLimitStartRecord(limitBeyond=4000), (SruHandler( includeQueryTimes=False, extraXParameters=[], enableCollectLog=False), #2017-03-24T12:00:33Z 127.0.0.1 3.5K 0.019s - /sru OF (TRUE): 2017-03-24T11:58:53Z 127.0.0.1 2.3K 0.004s 1hits /sru maximumRecords=10&operation=searchRetrieve&query=untokenized.dd_year+exact+%221993%22&recordPacking=xml&recordSchema=knaw_short&startRecord=1&version=1.2 (SruTermDrilldown(),), executeQueryHelix, (StorageAdapter(), (storage,) ) ) ) ) ), (PathFilter('/rss'), (Rss( supportedLanguages = ['nl','en'], # defaults to first, if requested language is not available or supplied. title = {'nl':'NARCIS', 'en':'NARCIS'}, description = {'nl':'NARCIS: De toegang tot de Nederlandse wetenschapsinformatie', 'en':'NARCIS: The gateway to Dutch scientific information'}, link = {'nl':'http://www.narcis.nl/?Language=nl', 'en':'http://www.narcis.nl/?Language=en'}, maximumRecords = 20), executeQueryHelix, (RssItem( nsMap=NAMESPACEMAP, title = ('knaw_short', {'nl':'//short:metadata/short:titleInfo[not (@xml:lang)]/short:title/text()', 'en':'//short:metadata/short:titleInfo[@xml:lang="en"]/short:title/text()'}), description = ('knaw_short', {'nl':'//short:abstract[not (@xml:lang)]/text()', 'en':'//short:abstract[@xml:lang="en"]/text()'}), pubdate = ('knaw_short', '//short:dateIssued/short:parsed/text()'), linkTemplate = 'http://www.narcis.nl/%(wcpcollection)s/RecordID/%(oai_identifier)s/Language/%(language)s', wcpcollection = ('meta', '//*[local-name() = "collection"]/text()'), oai_identifier = ('meta', '//meta:record/meta:id/text()'), language = ('Dummy: Language is auto provided by the calling RSS component, but needs to be present to serve the linkTemplate.') ), (StorageAdapter(), (storage,) ) ) ) ) ) ) )