def testFieldsFromSummaryAnnotation(self):
     self._createAnnotationToFieldsList()
     consume(self.annotationTofieldslist.add(lxmlNode=XML(ANNOTATION_SUMMARIZING)))
     fields = self. observer.calledMethods[0].kwargs['fieldslist']
     self.assertEquals([
             ('oa:annotatedBy.uri', "http://data.bibliotheek.nl/id/bnl"),
             ('oa:motivatedBy.uri', "http://data.bibliotheek.nl/ns/nbc/oa#summarizing"),
             ('oa:hasTarget.uri', "http://data.bibliotheek.nl/ggc/ppn/78240829X"),
             ('rdf:type.uri', "http://dbpedia.org/ontology/Book"),
             ('dcterms:type.uri', "http://dbpedia.org/ontology/Book"),
             ('dcterms:title', 'De Båèrkểnhuizen, Anno 1349'),
             ('dcterms:identifier.uri', 'http://data.bibliotheek.nl/ggc/ppn/78240829X'),
             ('dcterms:creator', 'Nieuwkerk Kramer, H G'),
             ('dcterms:creator.uri', 'http://data.bibliotheek.nl/ggc/ppn/987'),
             ('dcterms:creator.rdfs:label', 'Some Author'),
             ('dcterms:date', '1966'),
             ('dcterms:language.uri', 'urn:iso:std:iso:639:-2:dut'),
             ('dcterms:language.rdfs:label', 'Nederlands'),
             ('dcterms:extent', '15 p'),
             ('dcterms:isFormatOf.uri', "urn:a:work:123"),
             ('skos:note', 'BQM_14'),
             ('dcterms:spatial.uri', 'http://data.bibliotheek.nl/uitburo/location/8e71243e-abb0-407b-83a1-303db1f676e0'),
             ('dcterms:spatial.rdfs:label', 'Museum Boerhaave'),
             ('dcterms:spatial.geo:lat', '52.1613636'),
             ('dcterms:spatial.geo:long', '4.4891784'),
             ('dcterms:spatial.vcard:region', 'Leiden')
         ], fields)
Example #2
0
 def testAddWithoutIdentifier(self):
     registry = FieldRegistry()
     fields = [registry.createField("id", "id1")]
     consume(self._lucene.addDocument(fields=fields))
     self.assertEqual(1, len(self.post))
     self.assertEqual('/lucene/update/?', self.post[0]['path'])
     self.assertEqual('[{"type": "TextField", "name": "id", "value": "id1"}]', self.post[0]['data'])
Example #3
0
    def testAddTypeAndMissingValueToSortField(self):
        self.response = JsonDict({"total": 887, "queryTime": 6, "hits": [{"id": "record:1", "score": 0.1234}]}).dumps()

        cq = ComposedQuery("coreA")
        q = QueryExpressionToLuceneQueryDict([], LuceneSettings()).convert(cqlToExpression("field=value"))
        cq.setCoreQuery("coreB", q)
        cq.sortKeys = [dict(sortBy="sortField", core="coreA", sortDescending=True)]
        cq.addMatch(dict(core="coreA", uniqueKey="A"), dict(core="coreB", key="B"))
        consume(self._multiLucene.executeComposedQuery(cq))
        self.assertEqual(
            {
                "_sortKeys": [
                    {
                        "core": "coreA",
                        "sortBy": "sortField",
                        "sortDescending": True,
                        "type": "String",
                        "missingValue": "STRING_FIRST",
                    }
                ],
                "resultsFrom": "coreA",
                "_matches": {"coreA->coreB": [{"core": "coreA", "uniqueKey": "A"}, {"core": "coreB", "key": "B"}]},
                "_facets": {},
                "_otherCoreFacetFilters": {},
                "_rankQueries": {},
                "_drilldownQueries": {},
                "_unites": [],
                "_queries": {"coreB": {"term": {"field": "field", "value": "value"}, "type": "TermQuery"}},
                "cores": ["coreB", "coreA"],
                "_filterQueries": {},
            },
            loads(self.post[0]["data"]),
        )
    def testCollectLog(self):
        handler = SruHandler(enableCollectLog=True)
        observer = CallTrace('observer', emptyGeneratorMethods=['echoedExtraRequestData', 'extraResponseData'])
        __callstack_var_logCollector__ = dict()
        times = [1, 2.5, 3.5]
        def timeNow():
            return times.pop(0)
        handler._timeNow = timeNow

        def executeQuery(**kwargs):
            response = Response(total=0, hits=[])
            response.queryTime=5
            raise StopIteration(response)
            yield
        observer.methods['executeQuery'] = executeQuery
        handler.addObserver(observer)
        arguments = dict(startRecord=11, maximumRecords=15, query='query', recordPacking='string', recordSchema='schema')
        consume(handler.searchRetrieve(sruArguments=arguments, **arguments))

        self.assertEquals({
            'sru': {
                'handlingTime': [Decimal('2.500')],
                'queryTime': [Decimal('1.500')],
                'indexTime': [Decimal('0.005')],
                'numberOfRecords': [0],
                'arguments': [{
                    'startRecord': 11,
                    'query': 'query',
                    'recordPacking': 'string',
                    'maximumRecords': 15,
                    'recordSchema': 'schema',
                }],
            }
        }, __callstack_var_logCollector__)
 def testUpdatableConfig(self):
     config = UpdatableConfig()
     consume(config.updateConfig(services={'dont':'care'}, config=CONFIG))
     self.assertEqual(8000, config.get('port'))
     self.assertEqual(9000, config.get('otherPort', 9000))
     self.assertEqual('localhost', config['hostname'])
     self.assertRaises(KeyError, lambda: config['doesnotexist'])
Example #6
0
 def testLuceneReadonly(self):
     self.setUpLucene(readonly=True)
     self._lucene.observer_init()
     self.assertEqual([], self.post)
     self.assertRaises(RuntimeError, lambda: consume(self._lucene.setSettings()))
     self.assertRaises(RuntimeError, lambda: consume(self._lucene.addDocument(fields=[])))
     self.assertRaises(RuntimeError, lambda: consume(self._lucene.delete('identifier')))
Example #7
0
    def assertMaxAndSort(self, maximumRecords, sortKey, sortDirection, rssArgs, sruArgs):
        rss = Rss(
            title = 'Test title',
            description = 'Test description',
            link = 'http://www.example.org',
            **rssArgs
        )
        recordIds = []
        def getRecord(identifier):
            recordIds.append(identifier)
            return '<item/>'

        def executeQuery(start, stop, *args, **kwargs):
            response = Response(total=50, hits=[Hit(i) for i in range(start, stop)])
            raise StopIteration(response)
            yield
        observer = CallTrace(
            methods={
                'executeQuery': executeQuery,
                'getRecord': getRecord,
            },
            ignoredAttributes=['extraResponseData', 'echoedExtraRequestData'])
        rss.addObserver(observer)

        consume(rss.handleRequest(RequestURI='/?query=aQuery&' + urlencode(sruArgs)))

        method = observer.calledMethods[0]
        self.assertEquals('executeQuery', method.name)
        if sortKey is not None:
            self.assertEquals([{'sortBy': sortKey, 'sortDescending': sortDirection}], method.kwargs['sortKeys'])
        else:
            self.assertEquals(None, method.kwargs['sortKeys'])
        self.assertEquals(maximumRecords, len(recordIds))
    def testRewrite(self):
        rewrite = MessageRewrite(fromMessage='this_message', toMessage='to_message')
        observer = CallTrace(emptyGeneratorMethods=['to_message'])
        tree = be((Observable(),
            (rewrite,
                (observer,),
            )
        ))
        consume(tree.all.this_message(aap='noot'))
        self.assertEqual(['to_message'], observer.calledMethodNames())
        self.assertEqual(dict(aap='noot'), observer.calledMethods[0].kwargs)

        observer.calledMethods.reset()
        consume(tree.any.this_message(aap='noot'))
        self.assertEqual(['to_message'], observer.calledMethodNames())
        self.assertEqual(dict(aap='noot'), observer.calledMethods[0].kwargs)

        del observer.emptyGeneratorMethods[:]
        observer.calledMethods.reset()
        tree.call.this_message(aap='noot')
        self.assertEqual(['to_message'], observer.calledMethodNames())
        self.assertEqual(dict(aap='noot'), observer.calledMethods[0].kwargs)

        observer.calledMethods.reset()
        tree.do.this_message(aap='noot')
        self.assertEqual(['to_message'], observer.calledMethodNames())
        self.assertEqual(dict(aap='noot'), observer.calledMethods[0].kwargs)
Example #9
0
    def testUpdateRecordWithDifferentFragments(self):
        uri = "uri:someuri"
        rdfDescription = """<rdf:Description rdf:about="%s" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
    <dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">title</dc:title>
</rdf:Description>""" % uri

        lxmlNode = parse(StringIO("""<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
        %s
</rdf:RDF>""" % rdfDescription))
        consume(self.dna.all.add(identifier="identifier", partname="ignored", lxmlNode=lxmlNode))

        record = self.oaiJazz.getRecord(uri)
        data = self.storage.getData(identifier=record.identifier, name='rdf')
        self.assertTrue('<dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">title</dc:title>' in data, data)

        # now add with new title
        rdfDescription = """<rdf:Description rdf:about="%s" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
    <dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">new title</dc:title>
</rdf:Description>""" % uri

        lxmlNode = parse(StringIO("""<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
        %s
</rdf:RDF>""" % rdfDescription))
        consume(self.dna.all.add(identifier="identifier", partname="ignored", lxmlNode=lxmlNode))

        record = self.oaiJazz.getRecord(uri)
        data = self.storage.getData(identifier=record.identifier, name='rdf')
        self.assertFalse('<dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">title</dc:title>' in data, data)
        self.assertTrue('<dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">new title</dc:title>' in data, data)
Example #10
0
 def testListRecordsUsesFetchedRecords(self):
     self._addRecords(['id:0&0', 'id:1'])
     self.observer.methods['getMultipleData'] = lambda name, identifiers, ignoreMissing=False: [('id:0&0', 'data1'), ('id:1', 'data2'), ('id:2', 'data3')]
     consume(self.oaiList.listRecords(arguments={'verb':['ListRecords'], 'metadataPrefix': ['oai_dc']}, **self.httpkwargs))
     self.assertEquals(['getAllPrefixes', 'oaiSelect', 'oaiWatermark', 'getMultipleData', 'oaiRecord', 'oaiRecord'], self.observer.calledMethodNames())
     self.assertEquals({'id:0&0': 'data1', 'id:1': 'data2', 'id:2': 'data3'}, self.observer.calledMethods[4].kwargs['fetchedRecords'])
     self.assertEquals({'id:0&0': 'data1', 'id:1': 'data2', 'id:2': 'data3'}, self.observer.calledMethods[4].kwargs['fetchedRecords'])
Example #11
0
 def testDeleteUnseenRecord(self):
     try:
         consume(self.dna.all.delete(identifier="identifier"))
     except:
         # The above delete should just be silently ignored and not raise an exception
         # (as it did on some point).
         self.fail()
Example #12
0
    def testUpdateRecordThatOrphansUriCausesUriDelete(self):
        uri1 = "uri:someuri1"
        rdfDescription = """<rdf:Description rdf:about="%s" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
    <dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">title</dc:title>
</rdf:Description>""" % uri1

        lxmlNode = parse(StringIO("""<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
        %s
</rdf:RDF>""" % rdfDescription))
        consume(self.dna.all.add(identifier="identifier", partname="ignored", lxmlNode=lxmlNode))
        record1 = self.oaiJazz.getRecord(uri1)
        self.assertFalse(record1.isDeleted)

        # now add with different uri
        uri2 = "uri:someuri2"
        rdfDescription = """<rdf:Description rdf:about="%s" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
    <dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">new title</dc:title>
</rdf:Description>""" % uri2

        lxmlNode = parse(StringIO("""<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
        %s
</rdf:RDF>""" % rdfDescription))
        consume(self.dna.all.add(identifier="identifier", partname="ignored", lxmlNode=lxmlNode))

        record1 = self.oaiJazz.getRecord(uri1)
        self.assertTrue(record1.isDeleted)
Example #13
0
    def testAddInitialRecord(self):
        uri = "some:uri"

        rdfDescription = """<rdf:Description rdf:about="%s" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.openarchives.org/OAI/2.0/">
    <dc:title xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en">title</dc:title>
    <prov:wasDerivedFrom xmlns:prov="http://www.w3.org/ns/prov#">
        <prov:Entity>
            <dcterms:source rdf:resource="http://first.example.org"/>
        </prov:Entity>
    </prov:wasDerivedFrom>
</rdf:Description>""" % uri

        lxmlNode = parse(StringIO("""<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
        %s
</rdf:RDF>""" % rdfDescription))

        consume(self.dna.all.add(identifier="identifier", lxmlNode=lxmlNode))

        record = self.oaiJazz.getRecord(identifier=uri)
        expected = XML(lxmltostring(xpathFirst(lxmlNode, '//rdf:RDF')))
        cleanup_namespaces(expected)
        self.assertXmlEquals(expected, self.storage.getData(identifier=record.identifier, name='rdf'))

        self.assertEquals(set(['rdf']), record.prefixes)
        self.assertEquals(set(), record.sets)

        self.plein.close()
        plein2 = self._newPlein()
        self.assertEquals(['some:uri'], [fragment.uri for fragment in plein2._fragmentsForRecord('identifier')])
Example #14
0
    def testUpdateSettings(self):
        self.response = JsonDict(numberOfConcurrentTasks=6, similarity="BM25(k1=1.2,b=0.75)", clustering=JsonDict(clusterMoreRecords=100, clusteringEps=0.4, clusteringMinPoints=1))
        settings = retval(self._lucene.getSettings())
        self.assertEqual(['/settings/'], self.read)
        self.assertEquals({'numberOfConcurrentTasks': 6, 'similarity': u'BM25(k1=1.2,b=0.75)', 'clustering': {'clusterMoreRecords': 100, 'clusteringEps': 0.4, 'clusteringMinPoints': 1}}, settings)

        clusterFields = [
            {"filterValue": None, "fieldname": "untokenized.dcterms:isFormatOf.uri", "weight": 0}
        ]
        self.response = ""
        consume(self._lucene.setSettings(similarity=dict(name="bm25", k1=1.0, b=2.0), numberOfConcurrentTasks=10, clustering=dict(clusterMoreRecords=200, clusteringEps=1.0, clusteringMinPoints=2, fields=clusterFields)))
        self.assertEqual(1, len(self.post))
        self.assertEqual('/lucene/settings/', self.post[0]['path'])
        self.assertEqual({
                "numberOfConcurrentTasks": 10,
                "similarity": dict(type="BM25Similarity", k1=1.0, b=2.0),
                "clustering": {
                    "clusterMoreRecords": 200,
                    "clusteringEps": 1.0,
                    "clusteringMinPoints": 2,
                    "fields": [
                        {"filterValue": None, "fieldname": "untokenized.dcterms:isFormatOf.uri", "weight": 0}
                    ]
                }
            }, loads(self.post[0]['data']))

        consume(self._lucene.setSettings(numberOfConcurrentTasks=5, similarity=None, clustering=None))
        self.assertEqual(2, len(self.post))
        self.assertEqual('/lucene/settings/', self.post[1]['path'])
        self.assertEqual({
                "numberOfConcurrentTasks": 5,
            }, loads(self.post[1]['data']))
    def testAdd(self):
        class Factory():
            def __init__(self, observable, untokenizedFieldnames):
                self.observable = observable
                self.untokenizedFieldnames = untokenizedFieldnames

            def fieldsFor(self, fieldname, value):
                raise StopIteration([(fieldname, value)])
                yield
        fieldFactory = Factory

        fieldRegistry = FieldRegistry(drilldownFields=[DrilldownField('drilldown.field')])
        index = FieldsListToLuceneDocument(fieldRegistry, untokenizedFieldnames=[], indexFieldFactory=fieldFactory)
        observer = CallTrace(emptyGeneratorMethods=['addDocument'])
        index.addObserver(observer)
        fields = [
            ("field1", "value1"),
            ("field2", "value2"),
            ("drilldown.field", "a drilldown value"),
            ("__key__.field", "a key value"),
            ("__key__.field1", 2),
        ]
        consume(index.add(identifier="", fieldslist=fields))
        self.assertEquals(['addDocument'], observer.calledMethodNames())
        fields = observer.calledMethods[0].kwargs['fields']
        self.assertEqual([
                {'name': 'field1', 'type': 'TextField', 'value': 'value1'},
                {'name': 'field2', 'type': 'TextField', 'value': 'value2'},
                {'name': 'drilldown.field', 'type': 'FacetField', 'path': ['a drilldown value']},
                {'name': '__key__.field', 'type': 'KeyField', 'value': 'a key value'},
                {'name': '__key__.field1', 'type': 'KeyField', 'value': 2},
            ], fields)
Example #16
0
    def testDeproxyUpdateIps(self):
        # Expose updateIps from IpFilter
        self.createTree(deproxyForIps=['127.7.7.7'])

        # White box
        allowDeproxying = lambda ip: self.deproxy._ipfilter.filterIpAddress(ip)
        self.assertEquals(True, allowDeproxying('127.7.7.7'))
        self.assertEquals(False, allowDeproxying('127.0.0.1'))
        self.assertEquals(False, allowDeproxying('10.0.0.1'))

        self.deproxy.updateIps(ipAddresses=['192.168.96.96'], ipRanges=[('10.0.0.0', '10.0.0.2')])
        self.assertEquals(True, allowDeproxying('192.168.96.96'))
        self.assertEquals(True, allowDeproxying('10.0.0.1'))
        self.assertEquals(False, allowDeproxying('127.7.7.7'))
        self.assertEquals(False, allowDeproxying('127.0.0.1'))

        # Black box
        consume(self.top.all.handleRequest(
            Client=("192.168.96.96", 12345),
            Headers={
                "X-Forwarded-For": "2.2.2.2",
                "X-Forwarded-Host": "example.org"}))

        self.assertEquals(1, len(self.observer.calledMethods))
        handleRequestCallKwargs = self.observer.calledMethods[0].kwargs
        self.assertEquals("2.2.2.2", handleRequestCallKwargs['Client'][0])
        self.assertEquals('192.168.96.96', handleRequestCallKwargs['OriginalClient'][0])
        self.assertEquals({"X-Forwarded-For": "2.2.2.2", "X-Forwarded-Host": "example.org", "Host": "example.org"}, handleRequestCallKwargs['Headers'])
Example #17
0
    def testComposedQuery(self):
        self.response = JsonDict({
                "total": 887,
                "queryTime": 6,
                "hits": [{"id": "record:1", "score": 0.1234}]
            }).dumps()

        cq = ComposedQuery('coreA')
        q = QueryExpressionToLuceneQueryDict([], LuceneSettings()).convert(cqlToExpression("field=value"))
        cq.setCoreQuery("coreA", q)

        consume(self._multiLucene.executeComposedQuery(cq))
        self.assertEqual(1, len(self.post))
        self.assertEqual("/query/", self.post[0]['path'])
        self.assertEqual({
                "_sortKeys": [],
                "resultsFrom": "coreA",
                "_matches": {},
                "_facets": {},
                "_otherCoreFacetFilters": {},
                "_rankQueries": {},
                "_drilldownQueries": {},
                "_unites": [],
                "_queries": {"coreA": {"term": {"field": "field", "value": "value"}, "type": "TermQuery"}},
                "cores": ["coreA"],
                "_filterQueries": {}
            }, loads(self.post[0]['data']))
    def testCollectLogWhenIndexRaisesError(self):
        handler = SruHandler(enableCollectLog=True)
        observer = CallTrace('observer', emptyGeneratorMethods=['echoedExtraRequestData', 'extraResponseData', 'additionalDiagnosticDetails'])
        __callstack_var_logCollector__ = dict()
        times = [1]
        def timeNow():
            return times.pop(0)
        handler._timeNow = timeNow

        def executeQuery(**kwargs):
            raise Exception('Sorry')
            yield
        observer.methods['executeQuery'] = executeQuery
        handler.addObserver(observer)
        arguments = dict(startRecord=11, maximumRecords=15, query='query', recordPacking='string', recordSchema='schema')
        consume(handler.searchRetrieve(sruArguments=arguments, **arguments))

        self.assertEquals({
            'sru': {
                'arguments': [{
                    'startRecord': 11,
                    'query': 'query',
                    'recordPacking': 'string',
                    'maximumRecords': 15,
                    'recordSchema': 'schema',
                }],
            }
        }, __callstack_var_logCollector__)
Example #19
0
 def testServiceConfigListen(self):
     c = ServiceConfig(type='api', minVersion="4.2", untilVersion="5.0", path='/main', port=443)
     configServices = CONFIG_SERVICES()
     configServices['config']['api.frontend']['ipAddress'] = '10.0.0.1'
     configServices['config']['api.frontend']['ipAddresses'] = ['10.0.0.2', '10.0.0.3']
     consume(c.updateConfig(**configServices))
     self.assertEquals('    listen 10.0.0.1:443;\n    listen 10.0.0.2:443;\n    listen 10.0.0.3:443;\n', asString(c.listenLines()))
Example #20
0
    def testAddTypeAndMissingValueToSortField(self):
        self.response = JsonDict({
                "total": 887,
                "queryTime": 6,
                "hits": [{"id": "record:1", "score": 0.1234}]
            }).dumps()

        cq = ComposedQuery('coreA')
        q = QueryExpressionToLuceneQueryDict([], LuceneSettings()).convert(cqlToExpression("field=value"))
        cq.setCoreQuery('coreB', q)
        cq.sortKeys = [dict(sortBy='sortField', core='coreA', sortDescending=True)]
        cq.addMatch(dict(core='coreA', uniqueKey='A'), dict(core='coreB', key='B'))
        consume(self._multiLucene.executeComposedQuery(cq))
        self.assertEqual({
                "_sortKeys": [{'core': 'coreA', 'sortBy': 'sortField', 'sortDescending': True, 'type': 'String', 'missingValue': 'STRING_FIRST'}],
                "resultsFrom": "coreA",
                '_matches': {'coreA->coreB': [{'core': 'coreA', 'uniqueKey': 'A'}, {'core': 'coreB', 'key': 'B'}]},
                "_facets": {},
                "_otherCoreFacetFilters": {},
                "_rankQueries": {},
                "_drilldownQueries": {},
                "_unites": [],
                '_queries': {'coreB': {'term': {'field': 'field', 'value': 'value'}, 'type': 'TermQuery'}},
                "cores": ["coreB", "coreA"],
                "_filterQueries": {}
            }, loads(self.post[0]['data']))
Example #21
0
 def testServiceConfig(self):
     c = ServiceConfig(type='api', minVersion="4.2", untilVersion="5.0")
     consume(c.updateConfig(**CONFIG_SERVICES()))
     self.assertEquals(['api.front.example.org', 'alias1', 'alias2'], asList(c.servernames()))
     self.assertEquals('', asString(c.zones()))
     self.assertEquals('    location / {\n        proxy_pass http://__var_af9b2fd9f1c7f17413223dc3c26aeee4_api;\n    }', asString(c.locations()))
     self.assertEquals('    listen 0.0.0.0:80;\n', asString(c.listenLines()))
    def testExecuteQueryWithtriplestoreHostPortFromObserver(self):
        self.request = TriplestoreRequest()
        self.request.addObserver(self.observer)
        self.observer.returnValues['triplestoreServer'] = ('this.server.nl', 1234)
        self.responseData = RESULT_JSON
        consume(self.request.executeQuery("select ?x where {}"))
        self.assertEquals(['triplestoreServer', 'httprequest', 'handleQueryTimes'], self.observer.calledMethodNames())

        httprequestKwargs = self.observer.calledMethods[1].kwargs
        request = httprequestKwargs.pop('request')
        self.assertEquals({
                'headers': None,
                'method': 'GET',
                'host': 'this.server.nl',
                'port': 1234,
            }, httprequestKwargs)
        parsed = urlparse(request)
        self.assertEquals('/query', parsed.path)
        self.assertEquals({'query': ['''select ?x where {}''']}, parse_qs(parsed.query))

        handleQueryTimesKwargs = self.observer.calledMethods[2].kwargs
        self.assertEquals(['index', 'queryTime'], handleQueryTimesKwargs.keys())
        self.assertEquals(Decimal('0.042'), handleQueryTimesKwargs['index'])
        qt = float(handleQueryTimesKwargs['queryTime'])
        self.assertTrue(0.0 <= qt <0.1, qt)
Example #23
0
 def testServiceConfigThrottling(self):
     c = ServiceConfig(type='api', minVersion="4.2", untilVersion="5.0", path='/main')
     configServices = CONFIG_SERVICES()
     configServices['config']['api.frontend']['throttling'] = {
         '/path': {'max_connections_per_ip' : 10, 'max_connections': 100},
         '/other': {'max_connections_per_ip' : 30, 'max_connections': 150}
     }
     consume(c.updateConfig(**configServices))
     self.assertEquals([
         'limit_conn_zone $binary_remote_addr zone=api-other-byip:10m;',
         'limit_conn_zone $server_name zone=api-other-total:10m;',
         'limit_conn_zone $binary_remote_addr zone=api-path-byip:10m;',
         'limit_conn_zone $server_name zone=api-path-total:10m;'
         ], asString(c.zones()).split('\n'))
     self.assertEquals([
         '    location /main {',
         '        proxy_pass http://__var_af9b2fd9f1c7f17413223dc3c26aeee4_api;',
         '    }',
         '    location /other {',
         '        proxy_pass http://__var_af9b2fd9f1c7f17413223dc3c26aeee4_api;',
         '        limit_conn api-other-byip 30;',
         '        limit_conn api-other-total 150;',
         '    }',
         '    location /path {',
         '        proxy_pass http://__var_af9b2fd9f1c7f17413223dc3c26aeee4_api;',
         '        limit_conn api-path-byip 10;',
         '        limit_conn api-path-total 100;',
         '    }',
         ], asString(c.locations()).split('\n'))
 def testMaxFiles(self):
     self.assertRaises(ValueError, lambda: PersistLog(self.tempdir, maxSize=10, maxFiles=2))
     persist = PersistLog(self.tempdir, maxSize=10, maxFiles=3)
     def dataDict(nr):
         d = LOGDATADICT.copy()
         d['arguments'] += '&line=%s' % nr
         return d
     consume(persist.logData(dataDict=dataDict(1)))
     self.assertEquals(1, len(listdir(self.tempdir)))
     consume(persist.logData(dataDict=dataDict(2)))
     persist._thread.join()
     self.assertEquals(2, len(listdir(self.tempdir)))
     consume(persist.logData(dataDict=dataDict(3)))
     persist._thread.join()
     self.assertEquals(3, len(listdir(self.tempdir)))
     consume(persist.logData(dataDict=dataDict(4)))
     persist._thread.join()
     self.assertEquals(3, len(listdir(self.tempdir)))
     consume(persist.logData(dataDict=dataDict(5)))
     persist._thread.join()
     self.assertEquals(3, len(listdir(self.tempdir)))
     persist.close()
     zipped, notzipped, current = sorted(listdir(self.tempdir))
     self.assertEquals('current', current)
     self.assertTrue(zipped.endswith('.gz'))
     self.assertTrue('line=5' in open(join(self.tempdir, current)).read())
     self.assertTrue('line=4' in open(join(self.tempdir, notzipped)).read())
     self.assertTrue('line=3' in gzip.open(join(self.tempdir, zipped)).read())
 def testDeleteAlreadAdded(self):
     self.observer.returnValues['getRecord'].isDeleted = False
     consume(self.top.all.delete(identifier='identifier'))
     self.assertEquals(['getRecord', 'delete'], self.observer.calledMethodNames())
     getRecordCall, deleteCall = self.observer.calledMethods
     self.assertEquals(dict(identifier='identifier'), getRecordCall.kwargs)
     self.assertEquals(dict(identifier='identifier'), deleteCall.kwargs)
 def testLogData(self):
     persist = PersistLog(join(self.tempdir, 'store'), dictToLine=dictToLogline)
     consume(persist.logData(dataDict=LOGDATADICT))
     persist.close()
     self.assertEquals(
             '2009-11-02T11:25:36Z 11.12.13.14 4.0K 12.340s 0hits /path key=value\n',
             open(join(self.tempdir, 'store', 'current')).read())
def startServer(port, stateDir, luceneserverPort, gatewayPort, quickCommit=False, **ignored):
    
    setSignalHandlers()
    print 'Firing up Index Server.'

    statePath = abspath(stateDir)
    writerReactor = Reactor()

    writer = writerMain(
            writerReactor=writerReactor,
            statePath=statePath,
            luceneserverPort=luceneserverPort,
            gatewayPort=gatewayPort,
            quickCommit=quickCommit,
        )

    writerServer = be(writer)

    # Start writer in main (this) thread:
    consume(writerServer.once.observer_init())

    registerShutdownHandler(statePath=statePath, server=writerServer, reactor=writerReactor, shutdownMustSucceed=False)


    print "Ready to rumble at port %s" % port
    stdout.flush()

    writerReactor.loop()
    
 def testExcludingPaths(self):
     f = PathFilter('/path', excluding=['/path/not/this'])
     f.addObserver(self.interceptor)
     consume(f.handleRequest(path='/path/not/this/path'))
     self.assertEquals(0, len(self.interceptor.calledMethods))
     consume(f.handleRequest(path='/path/other'))
     self.assertEquals(1, len(self.interceptor.calledMethods))
 def testSimplePath(self):
     f = PathFilter('/path')
     f.addObserver(self.interceptor)
     consume(f.handleRequest(path='/path', otherArgument='value'))
     self.assertEquals(1, len(self.interceptor.calledMethods))
     self.assertEquals('handleRequest', self.interceptor.calledMethods[0].name)
     self.assertEquals({'path':'/path', 'otherArgument':'value'}, self.interceptor.calledMethods[0].kwargs)
 def testShouldNotChangeCallsWithStartRecordLowerThanOrEqualTo1000(self):
     sruArguments = dict(
         version="1.1", recordSchema="schema", recordPacking="xml", startRecord=1, maximumRecords=10, query="query"
     )
     consume(self.dna.all.searchRetrieve(sruArguments=sruArguments, otherKwarg="otherKwarg", **sruArguments))
     self.assertEquals(["searchRetrieve"], self.observer.calledMethodNames())
     self.assertDictEquals(
         {
             "sruArguments": {
                 "recordSchema": "schema",
                 "version": "1.1",
                 "recordPacking": "xml",
                 "maximumRecords": 10,
                 "startRecord": 1,
                 "query": "query",
             },
             "otherKwarg": "otherKwarg",
             "limitBeyond": 1000,
             "recordSchema": "schema",
             "version": "1.1",
             "recordPacking": "xml",
             "maximumRecords": 10,
             "startRecord": 1,
             "query": "query",
         },
         self.observer.calledMethods[0].kwargs,
     )
    def testAddDocument(self):
        fields2LuceneDoc = Fields2LuceneDoc('tsname',
                                            fieldRegistry=FieldRegistry())
        observer = CallTrace()
        fields2LuceneDoc.addObserver(observer)
        fields2LuceneDoc.ctx.tx = Transaction('tsname')
        fields2LuceneDoc.ctx.tx.locals['id'] = 'identifier'
        fields2LuceneDoc.addField('field', 'value')
        consume(fields2LuceneDoc.commit('unused'))

        self.assertEquals(['addDocument'], observer.calledMethodNames())
        self.assertEquals('identifier',
                          observer.calledMethods[0].kwargs['identifier'])
 def testUpdateOnce(self):
     consume(
         self.client.updateConfig(
             config={'gustos': {
                 'host': 'localhost',
                 'port': self.port
             }},
             services='ignored'))
     self.client.report(values=DATA)
     sleep(0.1)
     log = self.listen.log()
     self.assertEqual(1, len(log))
     self.assertEqual(DATA, loads(log[0])['data'])
Example #33
0
    def testWriteLogMustNotFail(self):
        logwriter = CallTrace('logwriter',
                              emptyGeneratorMethods=['someMessage'])
        logwriter.exceptions['writeLog'] = ValueError

        top = be((Observable(), (LogCollector('default'), (logwriter, ),
                                 (FilterMessages(allowed=['someMessage']),
                                  (LogKeyValue(dict(name='A')), )))))
        with stderr_replaced() as err:
            try:
                consume(top.all.someMessage())
            except ValueError:
                self.fail("Should not raise an error; Only print it")
        self.assertTrue('ValueError' in err.getvalue(), err.getvalue())
    def testCreateFacet(self):
        fields = {
            'field1': ['value1'],
            'sorted.field3': ['value3'],
            'untokenized.field4': ['value4'],
            'untokenized.field5': ['value5', 'value6'],
            'untokenized.field6': ['value5/value6'],
            'untokenized.field7': ['valuex'],
            'untokenized.field8': [['grandparent', 'parent', 'child'],
                                   ['parent2', 'child']]
        }
        fields2LuceneDoc = Fields2LuceneDoc(
            'tsname',
            fieldRegistry=FieldRegistry(drilldownFields=[
                DrilldownField('untokenized.field4'),
                DrilldownField('untokenized.field5'),
                DrilldownField('untokenized.field6'),
                DrilldownField('untokenized.field8', hierarchical=True),
            ]))
        observer = CallTrace()
        fields2LuceneDoc.addObserver(observer)
        fields2LuceneDoc.ctx.tx = Transaction('tsname')
        fields2LuceneDoc.ctx.tx.locals['id'] = 'identifier'
        for field, values in fields.items():
            for value in values:
                fields2LuceneDoc.addField(field, value)

        consume(fields2LuceneDoc.commit('unused'))

        document = observer.calledMethods[0].kwargs['document']
        searchFields = [
            f for f in document.getFields() if not FacetField.instance_(f)
        ]
        self.assertEquals(['field1', 'sorted.field3', 'untokenized.field7'],
                          [f.name() for f in searchFields])

        facetsFields = [
            FacetField.cast_(f) for f in document.getFields()
            if FacetField.instance_(f)
        ]
        self.assertEquals(6, len(facetsFields))
        self.assertEquals([
            ('untokenized.field8', ['grandparent', 'parent', 'child']),
            ('untokenized.field8', ['parent2', 'child']),
            ('untokenized.field6', ['value5/value6']),
            ('untokenized.field4', ['value4']),
            ('untokenized.field5', ['value5']),
            ('untokenized.field5', ['value6']),
        ], [(f.dim, list(f.path))
            for f in facetsFields])  # Note: a FacetField doesn't have a name
 def testOnlyOneSortValueAllowed(self):
     fields2LuceneDoc = Fields2LuceneDoc('tsname',
         fieldRegistry=FieldRegistry()
     )
     observer = CallTrace()
     fields2LuceneDoc.addObserver(observer)
     fields2LuceneDoc.ctx.tx = Transaction('tsname')
     fields2LuceneDoc.ctx.tx.locals['id'] = 'identifier'
     fields2LuceneDoc.addField('sorted.field', 'value1')
     fields2LuceneDoc.addField('sorted.field', 'value2')
     consume(fields2LuceneDoc.commit('unused'))
     fields = observer.calledMethods[0].kwargs['fields']
     self.assertEquals(1, len(fields))
     self.assertEqual({'sort': True, 'type': 'StringField', 'name': 'sorted.field', 'value': 'value1'}, fields[0])
Example #36
0
    def _deferredHandleShutdown(self, signum, frame):
        assert isfile(self._runningMarkerFile)
        consume(self._server.once.handleShutdown())
        remove(self._runningMarkerFile)
        print('Shutdown completed.')
        sys.stdout.flush()

        previousHandler = self._previouslyRegisteredHandlers[signum]
        if previousHandler not in [SIG_DFL, SIG_IGN, None]:
            previousHandler(signum, frame)
        if signum == SIGINT:
            raise KeyboardInterrupt()
        else:
            exit(0)
    def testMaxFileSize(self):
        with open(join(self.tempdir, 'current'), 'w') as f:
            f.write(
                '2008-11-02T11:25:36Z 11.12.13.14 4.0K 12.340s 0hits /path key=value\n'
            )
        persist = PersistLog(self.tempdir,
                             maxSize=10,
                             dictToLine=dictToLogline)
        try:
            consume(persist.logData(dataDict=LOGDATADICT))
            persist.close()

            with open(join(self.tempdir, 'current')) as fp:
                self.assertEqual(
                    '2009-11-02T11:25:36Z 11.12.13.14 4.0K 12.340s 0hits /path key=value\n',
                    fp.read())
            self.assertEqual(2, len(listdir(self.tempdir)))
            consume(persist.logData(dataDict=LOGDATADICT))
            consume(persist.logData(dataDict=LOGDATADICT))
            persist._thread.join()
            consume(persist.logData(dataDict=LOGDATADICT))
            persist._thread.join()
            self.assertEqual(5, len(listdir(self.tempdir)))
            self.assertTrue('current' in listdir(self.tempdir))
            self.assertEqual(
                3,
                len([l for l in listdir(self.tempdir) if l.endswith('.gz')]))
        finally:
            persist.close()
Example #38
0
 def testAddNew(self):
     self.observer.returnValues['getRecord'] = None
     self.observer.returnValues['getData'] = None
     consume(
         self.top.all.add(identifier='identifier',
                          partname='partname',
                          data="data"))
     self.assertEquals(['getRecord', 'add'],
                       self.observer.calledMethodNames())
     getRecordCall, addCall = self.observer.calledMethods
     self.assertEquals(dict(identifier='identifier'), getRecordCall.kwargs)
     self.assertEquals(
         dict(identifier='identifier', partname='partname', data='data'),
         addCall.kwargs)
Example #39
0
    def testSignalHarvestingDone(self):
        observer = CallTrace(emptyGeneratorMethods=['add'])
        oaiDownloadProcessor = OaiDownloadProcessor(
            path='/p',
            metadataPrefix='p',
            workingDirectory=self.tempdir,
            incrementalHarvestSchedule=None)
        oaiDownloadProcessor.addObserver(observer)

        consume(
            oaiDownloadProcessor.handle(
                parse(StringIO(LISTRECORDS_RESPONSE % ''))))
        self.assertEqual(
            ['startOaiBatch', 'add', 'stopOaiBatch', 'signalHarvestingDone'],
            observer.calledMethodNames())
 def testRetrieveCookie(self):
     sessions = []
     class MyObserver(Observable):
         def handleRequest(self, *args, **kwargs):
             session = self.ctx.session
             sessions.append(session)
             yield  utils.okHtml + '<html/>'
     self.handler.addObserver(MyObserver())
     headers = asString(self.handler.handleRequest(RequestURI='/path', Client=('127.0.0.1', 12345), Headers={})).split(CRLF*2,1)[0]
     headers = parseHeaders(headers)
     self.assertTrue('Set-Cookie' in headers, headers)
     cookie = findCookies(headers, self.cookiestore.cookieName(), 'Set-Cookie')[0]
     consume(self.handler.handleRequest(RequestURI='/path', Client=('127.0.0.1', 12345), Headers={'Cookie': '{0}={1}'.format(self.cookiestore.cookieName(), cookie)}))
     self.assertEquals(sessions[0], sessions[1])
     self.assertEquals(id(sessions[0]),id(sessions[1]))
Example #41
0
    def testWebQueryUsage(self):
        observer = CallTrace(
            ignoredAttributes=['unknown', 'extraResponseData', 'echoedExtraRequestData'])
        def executeQuery(**kwargs):
            raise StopIteration(Response(total=0, hits=[]))
            yield
        observer.methods['executeQuery'] = executeQuery
        rss = Rss(title = 'Title', description = 'Description', link = 'Link')
        rss.addObserver(observer)

        consume(rss.handleRequest(RequestURI='/?query=one+two'))
        self.assertEquals(['executeQuery'], [m.name for m in observer.calledMethods])
        self.assertEquals(None, observer.calledMethods[0].kwargs['sortKeys'])
        self.assertEquals(0, observer.calledMethods[0].kwargs['start'])
        self.assertEquals(10, observer.calledMethods[0].kwargs['stop'])
Example #42
0
 def testDeleteData(self):
     consume(
         self.storageComponent.add("id_0", "partName",
                                   b"The contents of the part"))
     self.assertEqual(
         b'The contents of the part',
         self.storageComponent.getData(identifier='id_0', name='partName'))
     self.storageComponent.deleteData(identifier='id_0')
     self.assertEqual(
         b'The contents of the part',
         self.storageComponent.getData(identifier='id_0', name='partName'))
     self.storageComponent.deleteData(identifier='id_0', name='partName')
     self.assertRaises(
         KeyError,
         lambda: self.storageComponent.getData('id_0', 'partName'))
    def testLoginEnrichesAUserByAnObserver(self):
        observer = CallTrace(returnValues={'hasUser': True, 'validateUser': True})
        def enrichUser(user):
            user.title = lambda: "Full username"
            user.additionalInfo = "more info"
        observer.methods['enrichUser'] = enrichUser
        self.form.addObserver(observer)
        Body = urlencode(dict(username='******', password='******'))
        session = {}

        consume(self.form.handleRequest(path='/login', Client=('127.0.0.1', 3451), Method='POST', Body=Body, session=session))

        self.assertEquals("more info", session['user'].additionalInfo)
        self.assertEquals('Full username', session['user'].title())
        self.assertEquals(['validateUser', 'hasUser', 'enrichUser'], observer.calledMethodNames())
 def testOne(self):
     normdocToFieldsList = NormdocToFieldsList()
     observer = CallTrace(emptyGeneratorMethods=['add'])
     normdocToFieldsList.addObserver(observer)
     consume(
         normdocToFieldsList.add(identifier='record:4',
                                 lxmlNode=parse(open("data/normdoc.xml"))))
     self.assertEqual(['add'], observer.calledMethodNames())
     # print "FIELDLIST:", observer.calledMethods[0].kwargs
     self.assertEqual(
         {
             'fieldslist':
             [('meta_repositoryid', 'knaw'), ('oai_id', 'record:4'),
              ('meta_repositorygroupid', 'knaw'), ('dare_id', 'record:4'),
              ('meta_collection', 'publication'),
              ('humanstartpage', 'http://repository.tue.nl/711504'),
              ('persistentid', 'URN:NBN:NL:UI:25-711504'),
              ('access', 'closedAccess'),
              ('',
               'Copyright (c) Aiki, T (Toyohiko); Copyright (c) Hilhorst, D; Copyright (c) Mimura, M; Copyright (c) Muntean, A (Adrian)'
               ), ('genre', 'article'),
              ('', 'Discrete and Continuous Dynamical Systems - Series S'),
              ('', 'Discrete and Continuous Dynamical Systems - Series S'),
              ('', 'Veenendaal'), ('', 'Springer'), ('', 'text'),
              ('fundingid', 'info:eu-repo/grantAgreement/EC/FP5/654321'),
              ('',
               'EERA Design Tools for Offshore Wind Farm Cluster                    (EERA-DTOC)'
               ),
              ('',
               'The European Energy Research Alliance (EERA)                    together with some high-impact industry partners addresses                    the call proposing an integrated and validated design tool                    combining the state-of-the-art wake, yield and electrical                    models available in the consortium, as a plug-in                    architecture with possibility for third party                    models.'
               ), ('', 'Funder, $ (Adrian)'),
              ('fundingid', 'info:eu-repo/grantAgreement/EC/FP7/282797'),
              ('authors', 'Aiki, T (Toyohiko)'), ('authors', 'Hilhorst, D'),
              ('authors', 'Mimura, M'), ('names', 'Aiki, T (Toyohiko)'),
              ('names', 'Hilhorst, D'), ('names', 'Mimura, M'),
              ('names', 'Funder, $ (Adrian)'),
              ('publicationid', 'info:doi/10.3934/dcdss.2012.5.1i'),
              ('publicationid', '1937-1632-REL'),
              ('pidref', 'http://repository.tue.nl/711504'),
              ('title_en',
               'Preface to special issue (Fast reaction - slow diffusion scenarios: PDE approximations and free boundaries)'
               ),
              ('title',
               'Preface to special issue (Fast reaction - slow diffusion scenarios: PDE approximations and free boundaries)'
               ), ('dd_year', '2014'), ('dateissued', '2014')],
             'identifier':
             'record:4'
         }, observer.calledMethods[0].kwargs)
 def testAddFacetField(self):
     fields2LuceneDoc = Fields2LuceneDoc('tsname',
         fieldRegistry=FieldRegistry(drilldownFields=[
             DrilldownField('untokenized.field'),
         ])
     )
     observer = CallTrace()
     fields2LuceneDoc.addObserver(observer)
     fields2LuceneDoc.ctx.tx = Transaction('tsname')
     fields2LuceneDoc.ctx.tx.locals['id'] = 'identifier'
     fields2LuceneDoc.addField('field', 'value')
     fields2LuceneDoc.addFacetField('untokenized.field', 'untokenized value')
     consume(fields2LuceneDoc.commit('unused'))
     fields = observer.calledMethods[0].kwargs['fields']
     facetsFields = [f for f in fields if "path" in f]
     self.assertEquals(1, len(facetsFields))
Example #46
0
 def testShouldRenameSortKeysFieldname(self):
     consume(
         self.sortKeysRename.executeQuery(query='AbstractSyntaxTree',
                                          sortKeys=[{
                                              'sortBy': 'sortKeys',
                                              'sortDescending': True
                                          }]))
     self.assertEqual(1, len(self.observer.calledMethods))
     self.assertEqual(
         {
             'query': 'AbstractSyntaxTree',
             'sortKeys': [{
                 'sortBy': 'new.sortKeys',
                 'sortDescending': True
             }]
         }, self.observer.calledMethods[0].kwargs)
Example #47
0
 def testIncrementalHarvestScheduleNone(self):
     observer = CallTrace(emptyGeneratorMethods=['add'])
     oaiDownloadProcessor = OaiDownloadProcessor(
         path="/oai",
         metadataPrefix="oai_dc",
         workingDirectory=self.tempdir,
         xWait=False,
         err=StringIO(),
         incrementalHarvestSchedule=None)
     oaiDownloadProcessor.addObserver(observer)
     consume(
         oaiDownloadProcessor.handle(
             parse(StringIO(LISTRECORDS_RESPONSE % ''))))
     self.assertEqual(None, oaiDownloadProcessor._resumptionToken)
     self.assertEqual('2002-06-01T19:20:30Z', oaiDownloadProcessor._from)
     self.assertEqual(None, oaiDownloadProcessor._earliestNextRequestTime)
Example #48
0
    def testWebQueryUsesFilters(self):
        observer = CallTrace(
            ignoredAttributes=['unknown', 'extraResponseData', 'echoedExtraRequestData'])
        def executeQuery(**kwargs):
            return Response(total=0, hits=[])
            yield
        observer.methods['executeQuery'] = executeQuery
        rss = Rss(title = 'Title', description = 'Description', link = 'Link')
        rss.addObserver(observer)

        consume(rss.handleRequest(RequestURI='/?query=one+two&filter=field1:value1&filter=field2:value2'))
        self.assertEqual(['executeQuery'], [m.name for m in observer.calledMethods])
        self.assertEqual(None, observer.calledMethods[0].kwargs['sortKeys'])
        self.assertEqual(0, observer.calledMethods[0].kwargs['start'])
        self.assertEqual(10, observer.calledMethods[0].kwargs['stop'])
        self.assertEqual(cqlToExpression("(one AND two) AND field1 exact value1 AND field2 exact value2"), observer.calledMethods[0].kwargs['query'])
Example #49
0
 def testSetIncrementalHarvestSchedule(self):
     oaiDownloadProcessor = OaiDownloadProcessor(
         path="/oai",
         metadataPrefix="oai_dc",
         workingDirectory=self.tempdir,
         xWait=False,
         err=StringIO(),
         incrementalHarvestSchedule=None)
     oaiDownloadProcessor._time = lambda: 10
     oaiDownloadProcessor.setIncrementalHarvestSchedule(schedule=Schedule(
         period=3))
     self.assertEqual(0, oaiDownloadProcessor._earliestNextRequestTime)
     consume(
         oaiDownloadProcessor.handle(
             parse(StringIO(LISTRECORDS_RESPONSE % ''))))
     self.assertEqual(13, oaiDownloadProcessor._earliestNextRequestTime)
Example #50
0
 def testIgnoreOtherMethodsWithQueryArgument(self):
     def canModify(expression):
         return expression.term == 'term'
     def modify(expression):
         expression.term = 'changed'
     observer = CallTrace(emptyGeneratorMethods=['method'])
     top = be((Observable(),
         (CqlMultiSearchClauseConversion([(canModify, modify)], fromKwarg='query'),
             (observer,)
         )
     ))
     consume(top.any.method(query='index = term'))
     self.assertEqual({'query': 'index = term'}, observer.calledMethods[0].kwargs)
     observer.calledMethods.reset()
     consume(top.any.method(query=cqlToExpression('index = term')))
     self.assertEqual({'query': cqlToExpression('index = changed')}, observer.calledMethods[0].kwargs)
Example #51
0
 def testMatchesOptional(self):
     self.tree = be(
         (Observable(),
             (ConvertToComposedQuery(resultsFrom='defaultCore'),
                 (self.observer,)
             )
         )
     )
     consume(self.tree.any.executeQuery(cqlAbstractSyntaxTree=parseCQL('*'), extraArguments={'x-filter': ['prefix:field=value']}, facets=[], start=1))
     self.assertEquals(['executeComposedQuery'], self.observer.calledMethodNames())
     cq = self.observer.calledMethods[0].kwargs['query']
     cq.validate()
     self.assertEquals(1, cq.start)
     self.assertEquals(set(['defaultCore']), cq.cores)
     self.assertRaises(KeyError, lambda: cq.keyName('defaultCore', 'otherCore'))
     self.assertEquals([cqlToExpression("*"), cqlToExpression("prefix:field=value")], cq.queriesFor('defaultCore'))
 def testRewriteFields(self):
     def rewriteFields(fields):
         fields['keys'] = list(sorted(fields.keys()))
         return fields
     fields2LuceneDoc = Fields2LuceneDoc('tsname', rewriteFields=rewriteFields, fieldRegistry=FieldRegistry())
     observer = CallTrace()
     fields2LuceneDoc.addObserver(observer)
     fields2LuceneDoc.ctx.tx = Transaction('tsname')
     fields2LuceneDoc.ctx.tx.locals['id'] = 'identifier'
     fields2LuceneDoc.addField('field1', 'value1')
     fields2LuceneDoc.addField('field2', 'value2')
     consume(fields2LuceneDoc.commit('unused'))
     self.assertEquals(['addDocument'], observer.calledMethodNames())
     fields = observer.calledMethods[0].kwargs['fields']
     self.assertEquals(set(['field1', 'field2', 'keys']), set([f['name'] for f in fields]))
     self.assertEquals(['field1', 'field2'], [f['value'] for f in fields if f['name'] == 'keys'])
Example #53
0
 def testIncrementalHarvestWithFromWithDefaultScheduleMidnight(self):
     observer = CallTrace(emptyGeneratorMethods=['add'])
     oaiDownloadProcessor = OaiDownloadProcessor(
         path="/oai",
         metadataPrefix="oai_dc",
         workingDirectory=self.tempdir,
         xWait=False,
         err=StringIO())
     oaiDownloadProcessor._time = oaiDownloadProcessor._incrementalHarvestSchedule._time = lambda: 0o1 * 60 * 60
     oaiDownloadProcessor._incrementalHarvestSchedule._utcnow = lambda: datetime.strptime(
         "01:00", "%H:%M")
     oaiDownloadProcessor.addObserver(observer)
     consume(
         oaiDownloadProcessor.handle(parse(StringIO(LISTRECORDS_RESPONSE))))
     self.assertEqual(None, oaiDownloadProcessor._resumptionToken)
     self.assertEqual(24 * 60 * 60.0,
                      oaiDownloadProcessor._earliestNextRequestTime)
Example #54
0
 def testShouldPassthroughHandleRequestIfUnconfigured(self):
     self.createTree()
     consume(
         self.top.all.handleRequest(Client=("9.1.8.2", 99),
                                    Headers={'H': 'eaders'},
                                    other='item'))
     self.assertEquals(['handleRequest'], self.observer.calledMethodNames())
     handleRequest, = self.observer.calledMethods
     self.assertEquals(tuple(), handleRequest.args)
     self.assertEquals(
         dict(
             Client=("9.1.8.2", 99),
             Headers={'H': 'eaders'},
             port=80,
             other='item',
             OriginalClient=None,
         ), handleRequest.kwargs)
Example #55
0
    def testEmptyQueryWithAntiUnaryClauseIsPassedToWebQuery(self):
        observer = CallTrace(
            ignoredAttributes=['unknown', 'extraResponseData', 'echoedExtraRequestData'])
        def executeQuery(**kwargs):
            raise StopIteration(Response(total=0, hits=[]))
            yield
        observer.methods['executeQuery'] = executeQuery
        rss = Rss(title='Title', description='Description', link='Link', antiUnaryClause='antiunary')
        rss.addObserver(observer)

        consume(rss.handleRequest(RequestURI='/?query='))

        self.assertEquals(['executeQuery'], [m.name for m in observer.calledMethods])
        self.assertEquals(None, observer.calledMethods[0].kwargs['sortKeys'])
        self.assertEquals(0, observer.calledMethods[0].kwargs['start'])
        self.assertEquals(10, observer.calledMethods[0].kwargs['stop'])
        self.assertEquals(cqlToExpression("antiunary"), observer.calledMethods[0].kwargs['query'])
Example #56
0
    def testMetadataPrefixesFromRootTag(self):
        consume(
            self.subject.add(
                '456', 'oai_dc',
                parseLxml('''<oai_dc:dc
        xmlns:oai_dc="http://oai_dc"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
        xsi:schemaLocation="http://other
                            http://other.com/file.xsd
                            http://oai_dc
                            http://oai_dc/dc.xsd">
</oai_dc:dc>''')))
        self.assertEqual(
            {
                'prefix': 'oai_dc',
                'schema': 'http://oai_dc/dc.xsd',
                'namespace': 'http://oai_dc'
            }, self.observer.calledMethods[0].kwargs)
Example #57
0
 def testRestartAfterFinish(self):
     observer = CallTrace(emptyGeneratorMethods=['add'])
     oaiDownloadProcessor = OaiDownloadProcessor(
         path="/oai",
         metadataPrefix="oai_dc",
         workingDirectory=self.tempdir,
         xWait=False,
         err=StringIO(),
         restartAfterFinish=True)
     oaiDownloadProcessor.addObserver(observer)
     consume(
         oaiDownloadProcessor.handle(parse(StringIO(LISTRECORDS_RESPONSE))))
     self.assertEqual(None, oaiDownloadProcessor._resumptionToken)
     request = oaiDownloadProcessor.buildRequest()
     self.assertTrue(
         request.startswith(
             'GET /oai?verb=ListRecords&metadataPrefix=oai_dc HTTP/1.0\r\nX-Meresco-Oai-Client-Identifier: '
         ), request)
Example #58
0
 def testShopClosedButNotAdded(self):
     data = {
         'redirectUri': 'http://example.org',
         "repositoryGroupId": "ignored",
         "identifier": "repository",
         "domainId": "domain",
         "numberOfTimeslots": "0",
         'shopclosedWeek_0': '*',
         'shopclosedWeekDay_0': '*',
         'shopclosedBegin_0': '7',
         'shopclosedEnd_0': '9',
     }
     consume(
         self.hda.handleRequest(Method='POST',
                                path='/somewhere/updateRepository',
                                Body=urlencode(data, doseq=True)))
     repository = self.hd.getRepository('repository', 'domain')
     self.assertEquals([], repository['shopclosed'])
Example #59
0
    def testScope(self):
        logwriter = CallTrace('logwriter',
                              emptyGeneratorMethods=['someMessage'])

        top = be(
            (Observable(), (LogCollector('default'), (logwriter, ),
                            (FilterMessages(allowed=['someMessage']), (
                                LogKeyValue(dict(name='A')),
                                (LogKeyValue(dict(name='B')), ),
                                (
                                    LogCollectorScope('scope_one'),
                                    (LogKeyValue(dict(name='C')), ),
                                ),
                                (LogKeyValue(dict(name='D')), ),
                                (
                                    LogCollectorScope('scope_two'),
                                    (
                                        LogKeyValue(dict(name='E')),
                                        (
                                            LogCollectorScope('scope_two_one'),
                                            (LogKeyValue(dict(name='F')), ),
                                        ),
                                    ),
                                    (LogKeyValue(dict(name='G')), ),
                                ),
                            )))))

        consume(top.all.someMessage())

        self.assertEqual(['someMessage', 'writeLog'],
                         logwriter.calledMethodNames())
        self.assertEqual(
            {
                'name': ['A', 'B', 'D'],
                'scope_one': {
                    'name': ['C'],
                },
                'scope_two': {
                    'name': ['E', 'G'],
                    'scope_two_one': {
                        'name': ['F']
                    }
                }
            }, logwriter.calledMethods[-1].kwargs['collectedLog'])
    def testShouldReportToClientOnHandle(self):
        self.client.returnValues['listServices'] = {
            'joe-joe-aiyedee': {
                'ipAddress': "127.0.0.1",
                'fqdn': "name.example.org",
                'number': 42,
                'lastseen': 1234567890.123456,
                'active': True,
                'type': "typeName",
                'port': 4321,
                'data': {'errors': 1, 'warnings': 1}},
            'other': {
                'ipAddress': "127.0.0.2",
                'fqdn': "other.example.org",
                'number': 2,
                'lastseen': 1.1,
                'active': False,  # False active
                'type': "typeName",
                'port': 4321},
        }

        with stdout_replaced() as out:
            consume(self.dna.all.handle())
            self.assertEqual('''\
AdminServicesReport: Error Services:
    type: typeName, identifier: joe-joe-aiyedee
AdminServicesReport: Inactive Services:
    type: typeName, identifier: other
''', out.getvalue())

        self.assertEqual(['listServices', 'report'], self.client.calledMethodNames())
        self.assertEqual({'Admin Information':
            {'Services':
                {'active':
                    {'count': 1},
                 'inactive':
                    {'count': 1}
                 }
            ,'Errors':
                {'errors': {'count': 1},
                'warnings': {'count': 1}
                }
            }
         }, self.client.calledMethods[-1].kwargs['values'])