Exemple #1
0
class TestSwordController(TestController):
    def __init__(self, *args, **kwargs):
        TestController.__init__(self, *args, **kwargs)
        self.controllerName = "swordservice"

    def test_index(self):
        response = self.app.get(url(controller='swordservice', action='index'),
                                headers={
                                    'content-type': 'application/json',
                                    'user-agent': 'python',
                                    'X-On-Behalf-Of': 'wegrata',
                                    'X-Verbose': 'False'
                                })
        # Test response...

    @decorators.ModifiedServiceDoc(config["app_conf"]['lr.sword.docid'],
                                   decorators.update_authz())
    def test_create(self):
        with codecs.open(file_path, 'r', 'utf-8-sig') as f:
            pub_data = json.load(f)
        response = self.app.post(url(controller='swordservice',
                                     action='create'),
                                 params=json.dumps(pub_data),
                                 headers={
                                     'Content-Type': 'application/json',
                                     'user-agent': 'python',
                                     'X-On-Behalf-Of': 'wegrata',
                                     'X-Verbose': 'False'
                                 })
class TestObtainController(TestController):
    def __init__(self, *args, **kwargs):
        TestController.__init__(self, *args, **kwargs)
        self.controllerName = "obtain"

    @classmethod
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def setupClass(cls):
        cls.setup = True

        with open("lr/tests/data/nsdl_dc/data-000000000.json", 'r') as f:
            data = json.load(f)
        if hasattr(cls, "attr"):
            app = cls.app
        else:
            controller = TestObtainController(methodName="test_empty")
            app = controller.app

        cls.server = couchdb.Server(config['couchdb.url.dbadmin'])
        cls.db = cls.server[config['couchdb.db.resourcedata']]

        view = cls.db.view("_all_docs")
        for row in view.rows:
            if not row.id.startswith("_design/"):
                del cls.db[row.id]

        result = app.post('/publish', params=json.dumps(data), headers=headers)
        result = json.loads(result.body)
        cls.ids = []
        cls.ids.extend(
            map(lambda doc: doc['doc_ID'], result['document_results']))
        result = app.post('/publish', params=json.dumps(data), headers=headers)
        result = json.loads(result.body)
        cls.ids.extend(
            map(lambda doc: doc['doc_ID'], result['document_results']))
        cls.resourceLocators = map(lambda doc: doc['resource_locator'],
                                   data['documents'])
        done = False
        distributableIds = map(lambda id: id + '-distributable', cls.ids)
        while not done:
            view = cls.db.view('_all_docs', keys=distributableIds)
            done = len(distributableIds) == len(view.rows)
            time.sleep(0.5)

    @classmethod
    def tearDownClass(cls):
        for doc in cls.ids:
            try:
                cls.db.delete(cls.db[doc])
            except:
                pass
            try:
                cls.db.delete(cls.db[doc + '-distributable'])
            except:
                pass
        cls.db.commit()

    def _getInitialPostData(self):
        data = {"by_doc_ID": False, "by_resource_ID": True, "ids_only": False}
        return data

    def _validateResponse(self, resp, requestData, testids):
        data = json.loads(resp.body)
        requestData = json.loads(requestData)
        assert 'documents' in data
        assert len(data['documents']) > 0, json.dumps(data['documents'])
        byDocId = requestData.has_key('by_doc_ID') and requestData['by_doc_ID']
        for d in data['documents']:
            if byDocId:
                assert d['doc_ID'] in testids
            else:
                testids = [urllib.unquote_plus(x) for x in testids]
                assert urllib.unquote_plus(d['doc_ID']) in testids
            if not requestData.has_key('ids_only') or (
                    requestData.has_key('ids_only')
                    and not requestData['ids_only']):
                for doc in d['document']:
                    if requestData.has_key(
                            'by_doc_ID') and requestData['by_doc_ID']:
                        assert doc['doc_ID'] == d['doc_ID']
                    else:
                        assert urllib.unquote_plus(
                            doc['resource_locator']) == urllib.unquote_plus(
                                d['doc_ID'])
        return data

    def _validateError(self, error):
        data = json.loads(error)
        assert (data["OK"] == False)

    @ForceCouchDBIndexing()
    def test_create(self):
        params = self._getInitialPostData()
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(
            response, params,
            map(
                lambda doc: doc['key'],
                TestObtainController.db.view(
                    '_design/learningregistry-resource-location/_view/docs').
                rows))
        # Test response...
    @ForceCouchDBIndexing()
    def test_create_ids_only(self):
        params = self._getInitialPostData()
        params['ids_only'] = True
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(
            response, params,
            map(
                lambda doc: doc['key'],
                TestObtainController.db.view(
                    '_design/learningregistry-resource-location/_view/docs').
                rows))

    @SetFlowControl(True, config["lr.obtain.docid"])
    @ForceCouchDBIndexing()
    def test_flow_control_enabled(self):
        params = self._getInitialPostData()
        params['ids_only'] = True
        params['by_doc_ID'] = True
        params['by_resource_ID'] = False
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        result = json.loads(response.body)
        assert result.has_key('resumption_token')
        assert len(result['documents']) == 100

    @SetFlowControl(False, config["lr.obtain.docid"])
    @ForceCouchDBIndexing()
    def test_flow_control_disabled(self):
        params = self._getInitialPostData()
        params['ids_only'] = True
        params['by_doc_ID'] = True
        params['by_resource_ID'] = False
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        result = json.loads(response.body)
        assert not result.has_key('resumption_token')

    @ForceCouchDBIndexing()
    def test_create_by_doc_id(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        params['by_resource_ID'] = False
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(response, params, TestObtainController.db)

    @ForceCouchDBIndexing()
    def test_create_by_resource_id(self):
        params = self._getInitialPostData()
        del params['by_doc_ID']
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(
            response, params,
            map(
                lambda doc: doc['key'],
                TestObtainController.db.view(
                    '_design/learningregistry-resource-location/_view/docs').
                rows))

    @ForceCouchDBIndexing()
    def test_create_by_doc_id_and_by_resource_id(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        params['by_resource_ID'] = False
        params['request_IDs'] = self.ids
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(response, params, TestObtainController.db)

    @ForceCouchDBIndexing()
    def test_create_by_doc_id_and_by_resource_id_fail(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = False
        params['by_resource_ID'] = True
        params['request_IDs'] = self.resourceLocators
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(response, params,
                               TestObtainController.resourceLocators)

    @ForceCouchDBIndexing()
    def test_create_by_doc_id_subset_of_ids(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        params['by_resource_ID'] = False
        params['ids_only'] = True
        params['request_IDs'] = self.ids[0:2]
        print params
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(response, params, self.ids[0:2])

    @ForceCouchDBIndexing()
    def test_create_by_doc_id_and_by_resource_id_both_true(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        params['by_resource_ID'] = True
        params['request_IDs'] = self.ids[0:1]
        params = json.dumps(params)
        try:
            response = self.app.post(url(controller='obtain'),
                                     params=params,
                                     headers=headers)
        except AppError as ex:
            self._validateError(ex.message[ex.message.rfind('{'):])

    @ForceCouchDBIndexing()
    def test_create_by_doc_id_and_by_resource_id_fail_both_false(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = False
        params['by_resource_ID'] = False
        params['request_IDs'] = self.resourceLocators[0:1]
        params = json.dumps(params)
        try:
            response = self.app.post(url(controller='obtain'),
                                     params=params,
                                     headers=headers)
        except AppError as ex:
            self._validateError(ex.message[ex.message.rfind('{'):])
            pass  #expected error

    @ForceCouchDBIndexing()
    def test_create_by_doc_id_and_by_resource_id_empty(self):
        params = self._getInitialPostData()
        del params['by_doc_ID']
        del params['by_resource_ID']
        params['request_IDs'] = TestObtainController.resourceLocators
        params = json.dumps(params)
        response = self.app.post(url(controller='obtain'),
                                 params=params,
                                 headers=headers)
        self._validateResponse(response, params,
                               TestObtainController.resourceLocators)

    @ForceCouchDBIndexing()
    def test_request_id_with_uri_escaped_characters(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = False
        params['by_resource_ID'] = True
        testId = urllib.quote_plus(TestObtainController.resourceLocators[0])
        params['request_ID'] = testId
        response = self.app.get(url(controller='obtain', **params))
        self._validateResponse(response, json.dumps(params), [testId])

    @ForceCouchDBIndexing()
    def test_request_ID_doc_get(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        del params['by_resource_ID']
        params['request_ID'] = TestObtainController.ids[0]
        response = self.app.get(url(controller='obtain', **params))
        self._validateResponse(response, json.dumps(params),
                               [TestObtainController.ids[0]])

    @ForceCouchDBIndexing()
    def test_request_id_doc_get(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        del params['by_resource_ID']
        params['request_id'] = TestObtainController.ids[0]
        response = self.app.get(url(controller='obtain', **params))
        self._validateResponse(response, json.dumps(params),
                               [TestObtainController.ids[0]])

    @ForceCouchDBIndexing()
    def test_request_ID_resource_get(self):
        params = self._getInitialPostData()
        params['request_ID'] = TestObtainController.resourceLocators[0]
        response = self.app.get(url(controller='obtain', **params))
        self._validateResponse(response, json.dumps(params),
                               [TestObtainController.resourceLocators[0]])

    @ForceCouchDBIndexing()
    def test_request_ID_resource_get(self):
        params = self._getInitialPostData()
        params['request_id'] = TestObtainController.resourceLocators[0]
        response = self.app.get(url(controller='obtain', **params))
        self._validateResponse(response, json.dumps(params),
                               [TestObtainController.resourceLocators[0]])

    @SetFlowControl(True, config["lr.obtain.docid"])
    @ForceCouchDBIndexing()
    def test_request_ID_resource_and_token_get(self):
        params = self._getInitialPostData()
        params['request_id'] = TestObtainController.resourceLocators[0]
        firstResponse = json.loads(
            self.app.get(url(controller='obtain', **params)).body)
        params['resumption_token'] = firstResponse['resumption_token']
        response = json.loads(
            self.app.get(url(controller='obtain', **params), status=500).body)
        assert response["OK"] == False

    @SetFlowControl(True, config["lr.obtain.docid"])
    @ForceCouchDBIndexing()
    def test_request_ID_resource_and_token_get_complete(self):
        params = self._getInitialPostData()
        testKey = TestObtainController.resourceLocators[0]
        params['request_id'] = testKey
        while True:
            path = url(controller='obtain', **params)
            response = self.app.get(path)
            data = json.loads(response.body)
            self._validateResponse(response, json.dumps(params), [testKey])
            if "resumption_token" not in data or data[
                    'resumption_token'] is None:
                break
            params = {'resumption_token': data['resumption_token']}

    @SetFlowControl(True, config["lr.obtain.docid"], doc_limit=37, id_limit=37)
    @ForceCouchDBIndexing()
    def test_request_ID_resource_and_token_get_complete_no_key(self):
        params = self._getInitialPostData()
        while True:
            path = url(controller='obtain', **params)
            response = self.app.get(path)
            data = json.loads(response.body)
            self._validateResponse(response, json.dumps(params),
                                   TestObtainController.resourceLocators)
            if "resumption_token" not in data or data[
                    'resumption_token'] is None:
                break
            params = {'resumption_token': data['resumption_token']}

    @ForceCouchDBIndexing()
    def test_get_fail_both_false(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = False
        params['by_resource_ID'] = False
        params['request_ID'] = TestObtainController.resourceLocators[0:1]
        try:
            response = self.app.get(url(controller='obtain', **params),
                                    headers=headers)
        except AppError as ex:
            self._validateError(ex.message[ex.message.rfind('{'):])

    @ForceCouchDBIndexing()
    def test_obtain_empty_resource_locator(self):
        import uuid
        params = self._getInitialPostData()
        params['by_doc_ID'] = False
        params['by_resource_ID'] = True
        for id in TestObtainController.ids:
            doc = TestObtainController.db[id]
            del doc['_id']
            del doc['_rev']
            doc['resource_locator'] = ""
            doc['doc_ID'] = uuid.uuid1().hex
            TestObtainController.db.save(doc)
        results = TestObtainController.db.view(
            '_design/learningregistry-resource-location/_view/docs')
        response = self.app.get(url(controller='obtain', **params),
                                headers=headers)
        resourceLocators = [row.key for row in results]
        self._validateResponse(response, json.dumps(params), resourceLocators)
        items_to_delete = (r for r in results if r.key == "")
        for i in items_to_delete:
            del self.db[i.id]

    @ForceCouchDBIndexing()
    def test_get_fail_both_true(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        params['by_resource_ID'] = True
        params['request_ID'] = TestObtainController.resourceLocators[0:1]
        try:
            response = self.app.get(url(controller='obtain', **params),
                                    headers=headers)
        except AppError as ex:
            self._validateError(ex.message[ex.message.rfind('{'):])

    @ForceCouchDBIndexing()
    def test_bad_doc_id(self):
        params = self._getInitialPostData()
        params['by_doc_ID'] = True
        params['by_resource_ID'] = False
        params['request_ID'] = 'xxxx'
        response = self.app.get(url(controller='obtain', **params),
                                headers=headers)

    def test_empty(self):
        pass
def DataCleaner(testName, type="Basic"):

    #write a document for each combination of test key and test identity (currently 3X3), multiplied
    #by the data multiplier. Returns the response from posting this array of docs to the publish
    #service. Also attempts to force a reindex (by calling the slice view directly) before returning.
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def writeTestData(obj):
        test_data = {"documents": []}

        for x in xrange(0, DATA_MULTIPLIER):
            for testKey in obj.testKeys:
                for testIdentity in obj.identities:
                    obj.setupCount = obj.setupCount + 1
                    setupCountFlag = testName + "setupCount" + str(obj.setupCount)
                    testDoc = buildTestDoc(testIdentity + testName,
                                        [setupCountFlag, obj.testDataKey, testKey + testName, obj.otherKeys[0], obj.otherKeys[1]],
                                        "metadata",
                                        [obj.testSchema + testName])
                    test_data["documents"].append(testDoc)

        docs_json = json.dumps(test_data)

        #all the commented out code below is for debugging, principally to measure time to
        #publish and time to reindex, but also to determine whether the re-index call is
        #actually working
        #info = urlopen(obj.couch_url+"/resource_data/_design/learningregistry-slice/_info")
        #print "info, pre-publish: " + str(info.read())
        #start = time.clock()
        #print "about to publish " + str(len(test_data["documents"])) + " documents."
        response = obj.app.post('/publish', params=docs_json, headers={"Content-type": "application/json"})
        #pub_time = time.clock()
        #print "published, elapsed time: " + str(pub_time - start) + ". about to wait for index..."
        #info = urlopen(obj.couch_url+"/resource_data/_design/learningregistry-slice/_info")
        #print "info, post-publish: " + str(info.read())
        #This call is here to attempt to force a re-index. Not clear if it is working properly
        urlopen(obj.couch_url + "/resource_data/_design/learningregistry-slicelite/_view/by-date?limit=1&reduce=false&descending=true&limit=1").read()
        #print "indexed, elapsed time: " + str(time.clock() - pub_time) + ", output is: " + str(url_result.read())
        #info = urlopen(obj.couch_url+"/resource_data/_design/learningregistry-slice/_info")
        #print "info, post-index: " + str(info.read())

        return response

    #for each identity in test indentities, writes a doc with all 3 test keys
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def writeMultiKeyTestData(obj):
        test_data = {"documents": []}
        for testIdentity in obj.identities:
            obj.setupCount = obj.setupCount + 1
            setupCountFlag = testName + "setupCount" + str(obj.setupCount)
            testDoc = buildTestDoc(testIdentity + testName,
                                   [setupCountFlag, obj.testDataKey, obj.testKeys[0] + testName, obj.testKeys[1] + testName, obj.testKeys[2] + testName, obj.otherKeys[0], obj.otherKeys[1]], "metadata", [obj.testSchema + testName])
            test_data["documents"].append(testDoc)

        docs_json = json.dumps(test_data)
        response = obj.app.post('/publish', params=docs_json, headers=json_headers)
        urlopen(obj.couch_url + "/resource_data/_design/learningregistry-slicelite/_view/by-date?limit=1").read()
        return response

    #writes 150 docs for the purpose of resumption testing
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def writeResumptionTestData(obj):
        num_docs = 150
        #i=0
        test_data = {"documents": []}
        #while i<num_docs:
        for x in xrange(0, num_docs):
            obj.setupCount = obj.setupCount + 1
            setupCountFlag = testName + "setupCount" + str(obj.setupCount)
            testDoc = buildTestDoc(obj.identities[1] + testName, [setupCountFlag, obj.testDataKey, obj.testKeys[0] + testName, obj.testKeys[1] + testName, obj.testKeys[2] + testName, obj.otherKeys[0], obj.otherKeys[1]], "metadata", [obj.testSchema + testName])
            test_data["documents"].append(testDoc)
            #i = i+1
        docs_json = json.dumps(test_data)
        response = obj.app.post('/publish', params=docs_json, headers=json_headers)
        urlopen(obj.couch_url + "/resource_data/_design/learningregistry-slicelite/_view/by-date?limit=1")
        return response

    #simple template for writing test docs
    def buildTestDoc(submitter, keys, type, schemas):
        testDoc = {
                   "resource_data": "data",
                   "keys": keys, \
                   "TOS": {
                        "submission_attribution": "My Attribution",
                        "submission_TOS": "My TOS"
                    },
                   "payload_placement": "inline",
                   "active": True,
                   "resource_locator": "http://my.resource.locator",
                   "doc_type": "resource_data",
                   "resource_data_type": type,
                   "payload_schema_locator": "http://my.scehma.locator",
                   "payload_schema": schemas,
                   "doc_version": "0.23.0",
                   "identity": {
                               "submitter": submitter,
                               "submitter_type": "agent"
                               }
                   }
        return testDoc

    #attempt to delete all test data. get a list of doc ids to be deleted by slicing for the signature test
    #data key. then attempt to delete a doc having each id and each id+"-distributable". A single pass at
    #this can fail to delete many docs, but the exception message thrown is empty so it is not yet known
    #why. making multiple passes at this can improve the number of documents successfully deleted, so
    #we iterate until all docs are successfully deleted or that we've made 10 attempts.
    def removeTestData(obj):
        deleteFail = 0
        deleteDistributableFail = 0
        deleteAttempts = 0
        while True:
            deleteFail = 0
            deleteDistributableFail = 0
            #del_key = quote("{\"tag\": \"metadata\"}")
            url = obj.couch_url + "/resource_data/_design/learningregistry-slicelite/_view/any-tags-by-date?reduce=false"
            #url = url % obj.testDataKey
            #fragment = &startkey=[\"%s\", 0]
            response = urlopen(url)
            data = json.loads(response.read())
            rows = data["rows"]
            for row in rows:
                doc_id = row["id"]
                try:
                    del obj.db[doc_id]
                except Exception:
                    #print "error deleting doc_id: " + doc_id + ". Message: " + e.message
                    deleteFail = deleteFail + 1

            deleteAttempts = deleteAttempts + 1
            if (deleteFail == 0 and deleteDistributableFail == 0) or deleteAttempts > 10:
                break
            else:
                pass  # print "deleteFail: " + str(deleteFail) + ", deleteDistributableFail: " + str(deleteDistributableFail)

    #a decorator to wrap each test case in that writes test data before the test is run and removes is after
    def test_decorator(fn):
        @wraps(fn)
        def test_decorated(self, *args, **kw):
            try:
                #print "Wrapper Before...."
                if(type == "Basic"):
                    self.test_data_response = writeTestData(self)
                elif(type == "Multi"):
                    self.test_data_response = writeMultiKeyTestData(self)
                elif(type == "Resumption"):
                    self.test_data_response = writeResumptionTestData(self)
                return fn(self, *args, **kw)
            except:
                raise
            finally:
                #removeTestData(self)
                removeTestData(self)
                self.test_data_response = None
                #print "Wrapper After...."

        return test_decorated
    return test_decorator
Exemple #4
0
class TestHarvestController(TestController):
    def __init__(self, *args, **kwargs):
        TestController.__init__(self, *args, **kwargs)
        self.controllerName = "obtain"

    @classmethod
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def setupClass(self):
        self.setup = True
        with open("lr/tests/data/nsdl_dc/data-000000000.json", 'r') as f:
            data = json.load(f)
        if hasattr(self, "attr"):
            app = self.app
        else:
            controller = TestHarvestController(methodName="test_empty")
            app = controller.app
        h = harvest()
        self.db = h.db
        self.server = h.server
        global db
        db = self.db
        result = app.post('/publish', params=json.dumps(data), headers=headers)
        result = json.loads(result.body)
        self.ids = []
        self.ids.extend(
            map(lambda doc: doc['doc_ID'], result['document_results']))
        result = app.post('/publish', params=json.dumps(data), headers=headers)
        result = json.loads(result.body)
        self.ids.extend(
            map(lambda doc: doc['doc_ID'], result['document_results']))
        self.resourceLocators = map(lambda doc: doc['resource_locator'],
                                    data['documents'])
        done = False
        distributableIds = map(lambda id: id + '-distributable', self.ids)
        while not done:
            view = self.db.view('_all_docs', keys=distributableIds)
            done = len(distributableIds) == len(view.rows)
            time.sleep(0.5)

    def test_empty(self):
        pass

    @classmethod
    def tearDownClass(self):
        for id in self.ids:
            try:
                del self.db[id]
            except:
                pass
            try:
                del self.db[id + '-distributable']
            except:
                pass

    def validate_getrecord_response_base(self, response):
        data = json.loads(response.body)
        assert data.has_key('OK') and data['OK']
        assert data.has_key('request')
        assert data['request'].has_key(
            'verb') and data['request']['verb'] == 'getrecord'
        assert data.has_key('getrecord')
        return data

    def validate_getrecord_response(self, response, targetId):
        data = self.validate_getrecord_response_base(response)
        for doc in data['getrecord']['record']:
            assert doc.has_key('resource_data')
            assert doc['resource_data']['doc_ID'] == targetId

    def validate_getrecord_response_resource_id(self, response,
                                                resourceLocator):
        data = self.validate_getrecord_response_base(response)
        for doc in data['getrecord']['record']:
            assert doc.has_key('resource_data')
            assert unquote_plus(
                doc['resource_data']['resource_locator']) == unquote_plus(
                    resourceLocator)

    def validate_getrecord_id_doesnot_exist(self, resp):
        doc = json.loads(resp.body)
        assert doc.has_key("OK") and not doc["OK"]
        assert doc.has_key("error") and doc['error'] == "idDoesNotExist"

    @ForceCouchDBIndexing()
    def test_getrecord_get_by_doc_id(self):
        response = self.app.get(
            url('harvest',
                id='getrecord',
                request_ID=self.ids[0],
                by_doc_ID=True))
        self.validate_getrecord_response(response, self.ids[0])

    @ForceCouchDBIndexing()
    def test_getrecord_get_by_resource_id(self):
        response = self.app.get(
            url('harvest',
                id='getrecord',
                request_ID=self.resourceLocators[0],
                by_doc_ID=False,
                by_resource_id=True))
        self.validate_getrecord_response_resource_id(response,
                                                     self.resourceLocators[0])

    @ForceCouchDBIndexing()
    def test_getrecord_get_by_resource_id_url_quoted(self):
        testID = quote_plus(self.resourceLocators[0])
        response = self.app.get(
            url('harvest',
                id='getrecord',
                request_ID=testID,
                by_doc_ID=False,
                by_resource_id=True))
        self.validate_getrecord_response_resource_id(response, testID)

    @ForceCouchDBIndexing()
    def test_getrecord_post_by_resource_id(self):
        data = json.dumps({
            'request_ID': self.resourceLocators[0],
            'by_resource_ID': True,
            'by_doc_ID': False
        })
        response = self.app.post(url(controller='harvest', action='getrecord'),
                                 params=data,
                                 headers=headers)
        self.validate_getrecord_response_resource_id(response,
                                                     self.resourceLocators[0])

    @ForceCouchDBIndexing()
    def test_getrecord_post_by_doc_id(self):
        data = json.dumps({'request_ID': self.ids[0], 'by_doc_ID': True})
        response = self.app.post(url(controller='harvest', action='getrecord'),
                                 params=data,
                                 headers=headers)
        self.validate_getrecord_response(response, self.ids[0])

    @ForceCouchDBIndexing()
    def test_getrecord_get_by_doc_id_fail(self):
        response = self.app.get(
            url('harvest', id='getrecord', request_ID="blah", by_doc_ID=True))
        self.validate_getrecord_id_doesnot_exist(response)

    @ForceCouchDBIndexing()
    def test_getrecord_get_by_resource_id_fail(self):
        response = self.app.get(
            url('harvest',
                id='getrecord',
                request_ID="blah",
                by_doc_ID=False,
                by_resource_id=True))
        self.validate_getrecord_id_doesnot_exist(response)

    @ForceCouchDBIndexing()
    def test_getrecord_post_by_doc_id_fail(self):
        data = json.dumps({
            'request_ID': "blah",
            'by_resource_ID': True,
            'by_doc_ID': False
        })
        response = self.app.post(url(controller='harvest', action='getrecord'),
                                 params=data,
                                 headers=headers)
        self.validate_getrecord_id_doesnot_exist(response)

    @ForceCouchDBIndexing()
    def test_getrecord_post_by_resource_id_fail(self):
        data = json.dumps({'request_ID': "blah", 'by_doc_ID': True})
        response = self.app.post(url(controller='harvest', action='getrecord'),
                                 params=data,
                                 headers=headers)
        self.validate_getrecord_id_doesnot_exist(response)

    def _validate_error_message(self, response):
        data = json.loads(response.body)
        assert (not data['OK'])

    def validate_listrecords_response(self, response):
        data = json.loads(response.body)
        if not data['OK']:
            print data
        assert data.has_key('OK') and data['OK']
        assert data.has_key('listrecords')
        assert len(data['listrecords']) > 0
        for doc in data['listrecords']:
            assert doc.has_key('record')
            record = doc['record']
            assert record.has_key('resource_data')
            resource = record['resource_data']
            nodeTimestamp = resource['node_timestamp']
            assert nodeTimestamp[:nodeTimestamp.rfind(
                '.')] >= self.from_date[:self.from_date.rfind('.')]
            assert nodeTimestamp[:nodeTimestamp.rfind(
                '.')] <= self.until_date[:self.until_date.rfind('.')]

    @ForceCouchDBIndexing()
    def test_listrecords_get(self):
        self.until_date = datetime.utcnow().isoformat() + "Z"
        response = self.app.get(url('harvest', id='listrecords'),
                                params={
                                    'from': self.from_date,
                                    'until': self.until_date
                                })
        self.validate_listrecords_response(response)

    @ForceCouchDBIndexing()
    def test_listrecords_post(self):
        self.until_date = datetime.utcnow().isoformat() + "Z"
        data = json.dumps({'from': self.from_date, 'until': self.until_date})
        response = self.app.post(url(controller='harvest',
                                     action='listrecords'),
                                 params=data,
                                 headers=headers)
        self.validate_listrecords_response(response)

    @ForceCouchDBIndexing()
    def test_listrecords_post_bad_from(self):
        self.until_date = datetime.utcnow().isoformat() + "Z"
        data = json.dumps({'from': "aaa", 'until': self.until_date})
        response = self.app.post(url(controller='harvest',
                                     action='listrecords'),
                                 params=data,
                                 headers=headers)
        self._validate_error_message(response)

    @ForceCouchDBIndexing()
    def test_listrecords_post_bad_until(self):
        data = json.dumps({'from': self.from_date, 'until': 'self.until_date'})
        response = self.app.post(url(controller='harvest',
                                     action='listrecords'),
                                 params=data,
                                 headers=headers)
        self._validate_error_message(response)

    def validate_listidentifiers_response(self, response):
        data = json.loads(response.body)
        if not data['OK']:
            print data
        assert data.has_key('OK') and data['OK']
        assert data.has_key('listidentifiers')
        assert len(data['listidentifiers']) > 0
        for doc in data['listidentifiers']:
            assert doc.has_key('header')
            record = doc['header']
            assert record.has_key('identifier')

    @ForceCouchDBIndexing()
    def test_listidentifiers_get(self):
        self.until_date = datetime.utcnow().isoformat() + "Z"
        response = self.app.get(url('harvest', id='listidentifiers'),
                                params={
                                    'from': self.from_date,
                                    'until': self.until_date
                                })
        self.validate_listidentifiers_response(response)

    @ForceCouchDBIndexing()
    def test_listidentifiers_post(self):
        self.until_date = datetime.utcnow().isoformat() + "Z"
        data = json.dumps({'from': self.from_date, 'until': self.until_date})
        response = self.app.post(url(controller='harvest',
                                     action='listidentifiers'),
                                 params=data,
                                 headers={'content-type': 'application/json'})
        self.validate_listidentifiers_response(response)

    @ForceCouchDBIndexing()
    def test_listidentifiers_post_bad_from(self):
        self.until_date = datetime.utcnow().isoformat() + "Z"
        data = json.dumps({'from': 'self.from_date', 'until': self.until_date})
        response = self.app.post(url(controller='harvest',
                                     action='listidentifiers'),
                                 params=data,
                                 headers={'content-type': 'application/json'})
        self._validate_error_message(response)

    @ForceCouchDBIndexing()
    def test_listidentifiers_post_bad_until(self):
        data = json.dumps({'from': self.from_date, 'until': 'self.until_date'})
        response = self.app.post(url(controller='harvest',
                                     action='listidentifiers'),
                                 params=data,
                                 headers={'content-type': 'application/json'})
        self._validate_error_message(response)

    @ForceCouchDBIndexing()
    def test_listidentifiers_flow_control_enabled(self):
        nodeDb = self.server[config["couchdb.db.node"]]
        serviceDoc = nodeDb[config["lr.harvest.docid"]]
        flowControlCurrent = serviceDoc['service_data']['flow_control']
        serviceDoc['service_data']['flow_control'] = True
        idLimit = None
        if serviceDoc['service_data'].has_key('id_limit'):
            idLimit = serviceDoc['service_data']['id_limit']
        serviceDoc['service_data']['id_limit'] = 100
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        response = self.app.get(url('harvest', id='listidentifiers'))
        result = json.loads(response.body)
        serviceDoc['service_data']['flow_control'] = flowControlCurrent
        if idLimit is None:
            del serviceDoc['service_data']['id_limit']
        else:
            serviceDoc['service_data']['id_limit'] = idLimit
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        assert result.has_key('resumption_token')
        assert len(result['documents']) == 100

    @ForceCouchDBIndexing()
    def test_listidentifiers_flow_control_enabled(self):
        nodeDb = self.server[config["couchdb.db.node"]]
        serviceDoc = nodeDb[config["lr.harvest.docid"]]
        flowControlCurrent = serviceDoc['service_data']['flow_control']
        serviceDoc['service_data']['flow_control'] = False
        serviceDoc['service_data']['id_limit'] = 100
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        response = self.app.get(url('harvest', id='listidentifiers'))
        result = json.loads(response.body)
        serviceDoc['service_data']['flow_control'] = flowControlCurrent
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        assert not result.has_key('resumption_token')

    @ForceCouchDBIndexing()
    def test_listrecords_flow_control_enabled(self):
        nodeDb = self.server[config["couchdb.db.node"]]
        serviceDoc = nodeDb[config["lr.harvest.docid"]]
        flowControlCurrent = serviceDoc['service_data']['flow_control']
        serviceDoc['service_data']['flow_control'] = True
        idLimit = None
        if serviceDoc['service_data'].has_key('id_limit'):
            idLimit = serviceDoc['service_data']['id_limit']
        serviceDoc['service_data']['id_limit'] = 100
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        response = self.app.get(url('harvest', id='listrecords'))
        result = json.loads(response.body)
        serviceDoc['service_data']['flow_control'] = flowControlCurrent
        if idLimit is None:
            del serviceDoc['service_data']['id_limit']
        else:
            serviceDoc['service_data']['id_limit'] = idLimit
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        assert result.has_key('resumption_token')
        assert len(result['documents']) == 100

    @ForceCouchDBIndexing()
    def test_listrecords_flow_control_enabled(self):
        nodeDb = self.server[config["couchdb.db.node"]]
        serviceDoc = nodeDb[config["lr.harvest.docid"]]
        flowControlCurrent = serviceDoc['service_data']['flow_control']
        serviceDoc['service_data']['flow_control'] = False
        serviceDoc['service_data']['id_limit'] = 100
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        response = self.app.get(url('harvest', id='listrecords'))
        result = json.loads(response.body)
        serviceDoc['service_data']['flow_control'] = flowControlCurrent
        nodeDb[config["lr.harvest.docid"]] = serviceDoc
        assert not result.has_key('resumption_token')

    def validate_identify_response(self, response):
        data = json.loads(response.body)
        assert data.has_key('OK') and data['OK']
        assert data.has_key('identify')
        assert data['identify'].has_key('node_id')
        assert data['identify'].has_key('repositoryName')
        assert data['identify'].has_key('baseURL')
        assert data['identify'].has_key('protocolVersion')
        assert data['identify'].has_key('service_version')
        assert data['identify'].has_key('earliestDatestamp')
        assert data['identify'].has_key('deletedRecord')
        assert data['identify'].has_key('granularity')
        assert data['identify'].has_key('adminEmail')

    @ForceCouchDBIndexing()
    def test_identify_get(self):
        response = self.app.get(url(controller='harvest', action='identify'))
        self.validate_identify_response(response)

    @ForceCouchDBIndexing()
    def test_identify_post(self):
        response = self.app.post(url(controller='harvest', action='identify'),
                                 params={},
                                 headers=headers)
        self.validate_identify_response(response)

    def validate_listmetadataformats_response(self, response):
        data = json.loads(response.body)
        assert data.has_key('OK') and data['OK']
        assert data.has_key('listmetadataformats')
        metadata_formats = data['listmetadataformats']
        assert len(metadata_formats) >= 0
        for format in metadata_formats:
            assert format.has_key('metadataformat')
            assert format['metadataformat'].has_key('metadataPrefix')

    @ForceCouchDBIndexing()
    def test_listmetadataformats_get(self):
        response = self.app.get(url('harvest', id='listmetadataformats'))
        self.validate_listmetadataformats_response(response)

    @ForceCouchDBIndexing()
    def test_listmetadataformats_post(self):
        response = self.app.post(url(controller='harvest',
                                     action='listmetadataformats'),
                                 params={},
                                 headers=headers)
        self.validate_listmetadataformats_response(response)

    def validate_listsets_response(self, response):
        data = json.loads(response.body)
        assert data.has_key('OK') and not data['OK']

    @ForceCouchDBIndexing()
    def test_listsets_get(self):
        response = self.app.get(url('harvest', id='listsets'))
        self.validate_listsets_response(response)

    @ForceCouchDBIndexing()
    def test_listsets_post(self):
        response = self.app.post(url(controller='harvest', action='listsets'),
                                 params={},
                                 headers=headers)
        self.validate_listsets_response(response)
class TestExtractController(TestController):
    def _convertDateTime(self,dt):
        epoch = parse_date("1970-01-01T00:00:01Z")
        if isinstance(dt, str) or isinstance(dt,unicode):
            dt = parse_date(dt)
        dt = dt - epoch
        return int(math.floor(dt.total_seconds()))
    def _validateJsonStructure(self,data):        
        assert "documents" in data
        assert len(data['documents']) > 0
        for doc in data['documents']:
            assert "result_data" in doc
            assert "resource_data" in doc
    def _validateDiscriminator(self,data,discriminator):
        for doc in data['documents']:
            assert doc['result_data']['discriminator'].startswith(discriminator)
    def _validateUntil(self,data,until):
        until = self._convertDateTime(until)
        for doc in data['documents']:
            for envelope in  doc['resource_data']:
                timestamp = self._convertDateTime(envelope['node_timestamp'])
                assert timestamp <= until
    def _validateFrom(self,data,f):
        f = self._convertDateTime(f)
        for doc in data['documents']:
            for envelope in  doc['resource_data']:
                timestamp = self._convertDateTime(envelope['node_timestamp'])
                assert timestamp >= f
    
    @classmethod
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def setupClass(self):
        self.setup = True
        with open("lr/tests/data/nsdl_dc/data-000000000.json",'r') as f:
            data = json.load(f)
        if hasattr(self, "attr"):
            app = self.app
        else:
            controller =  TestExtractController(methodName="test_empty")
            app = controller.app  
        h = harvest()
        self.db = h.db                      
        self.server = h.server
        global db
        db = self.db
        result = app.post('/publish', params=json.dumps(data), headers=headers)        
        result = json.loads(result.body)
        self.ids = []
        self.ids.extend(map(lambda doc: doc['doc_ID'],result['document_results']))
        result = app.post('/publish', params=json.dumps(data), headers=headers)        
        result = json.loads(result.body)
        self.ids.extend(map(lambda doc: doc['doc_ID'],result['document_results']))
        self.resourceLocators = map(lambda doc: doc['resource_locator'],data['documents'])
        done = False
        distributableIds = map(lambda id: id+'-distributable',self.ids)
        while not done:      
            view = self.db.view('_all_docs',keys=distributableIds)                
            done = len(distributableIds) == len(view.rows)
            time.sleep(0.5)

        #install data_service view  
        couchdb_url = config['couchdb.url.dbadmin']
        resource_data_db = config['couchdb.db.resourcedata']

        _pushCouchApp("../data_services/standards-alignment-dct-conformsTo", "{0}/{1}".format(couchdb_url,resource_data_db))

    def test_empty(self):
        pass
    @classmethod
    def tearDownClass(self):
        for id in self.ids:
            try:
                del self.db[id]
            except:
                pass
            try:
                del self.db[id+'-distributable']
            except:
                pass
    @ForceCouchDBIndexing()
    def test_get(self):
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json'))
        data = json.loads(response.body)
        self._validateJsonStructure(data)
    @ForceCouchDBIndexing()
    def test_get_resource(self):
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-resource/format/to-json'))
        data = json.loads(response.body)
        self._validateJsonStructure(data)        
    @ForceCouchDBIndexing()
    def test_get_with_discriminator(self):
        discriminator = "http://purl.org/ASN/"
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json',discriminator=discriminator))
        data = json.loads(response.body)
        self._validateJsonStructure(data)
        self._validateDiscriminator(data,discriminator)        
    @ForceCouchDBIndexing()
    def test_get_with_from(self):
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json'),params={"from":self.from_date})
        data = json.loads(response.body)
        self._validateJsonStructure(data)
        self._validateFrom(data,self.from_date)
    @ForceCouchDBIndexing()
    def test_get_with_until(self):
        until_date = datetime.utcnow().isoformat()+"Z"
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json',until=until_date))
        data = json.loads(response.body)
        self._validateJsonStructure(data)                
        self._validateUntil(data,until_date)
    @ForceCouchDBIndexing()
    def test_get_with_discriminator_until(self):
        discriminator = "http://purl.org/ASN/"
        until_date = datetime.utcnow().isoformat()+"Z"
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json',discriminator=discriminator,until=until_date))
        data = json.loads(response.body)
        self._validateJsonStructure(data)
        self._validateDiscriminator(data,discriminator)          
        self._validateUntil(data,until_date)
    @ForceCouchDBIndexing()
    def test_get_with_discriminator_from(self):
        discriminator = "http://purl.org/ASN/"
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json'),params={"discriminator":discriminator,"from":self.from_date})
        data = json.loads(response.body)
        self._validateJsonStructure(data)
        self._validateDiscriminator(data,discriminator)                  
        self._validateFrom(data,self.from_date)
    @ForceCouchDBIndexing()
    def test_get_with_discriminator_from_until(self):
        discriminator = "http://purl.org/ASN/"
        until_date = datetime.utcnow().isoformat()+"Z"
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json'),params={"discriminator":discriminator,"from":self.from_date, "until":until_date})
        data = json.loads(response.body)
        self._validateJsonStructure(data)
        self._validateDiscriminator(data,discriminator)                          
        self._validateFrom(data,self.from_date)
        self._validateUntil(data,until_date)
    @ForceCouchDBIndexing()
    def test_get_with_from_junk_date(self):
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json'),params={"from":"abc123"},status=500)
        data = json.loads(response.body)
        assert not data['OK']
    @ForceCouchDBIndexing()
    def test_get_with_until_junk_date(self):
        response = self.app.get(url('/extract/standards-alignment-dct-conformsTo/discriminator-by-ts/format/to-json'),params={"until":"abc123"},status=500)
        data = json.loads(response.body)
        assert not data['OK']        
    def test_invalid_Data_service(self):
        response = self.app.get(url('/extract/learningregistry-slice/docs/format/to-json'),status=406)
def DataCleaner(testName, type="Basic"):

    #write a document for each combination of test key and test identity (currently 3X3), multiplied
    #by the data multiplier. Returns the response from posting this array of docs to the publish
    #service. Also attempts to force a reindex (by calling the slice view directly) before returning.
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def writeTestData(obj):
        test_data = {"documents": []}

        for x in xrange(0, DATA_MULTIPLIER):
            for testKey in obj.testKeys:
                for testIdentity in obj.identities:
                    obj.setupCount = obj.setupCount + 1
                    setupCountFlag = testName + "setupCount" + str(
                        obj.setupCount)
                    testDoc = buildTestDoc(testIdentity + testName, [
                        setupCountFlag, obj.testDataKey, testKey + testName,
                        obj.otherKeys[0], obj.otherKeys[1]
                    ], "metadata", [obj.testSchema + testName])
                    test_data["documents"].append(testDoc)

        docs_json = json.dumps(test_data)

        #all the commented out code below is for debugging, principally to measure time to
        #publish and time to reindex, but also to determine whether the re-index call is
        #actually working
        #info = urlopen(obj.couch_url+"/resource_data/_design/learningregistry-slice/_info")
        #print "info, pre-publish: " + str(info.read())
        #start = time.clock()
        #print "about to publish " + str(len(test_data["documents"])) + " documents."
        response = obj.app.post('/publish',
                                params=docs_json,
                                headers={"Content-type": "application/json"})
        #pub_time = time.clock()
        #print "published, elapsed time: " + str(pub_time - start) + ". about to wait for index..."
        #info = urlopen(obj.couch_url+"/resource_data/_design/learningregistry-slice/_info")
        #print "info, post-publish: " + str(info.read())
        #This call is here to attempt to force a re-index. Not clear if it is working properly
        urlopen(
            obj.couch_url +
            "/resource_data/_design/learningregistry-slicelite/_view/by-date?limit=1&reduce=false&descending=true&limit=1"
        ).read()
        #print "indexed, elapsed time: " + str(time.clock() - pub_time) + ", output is: " + str(url_result.read())
        #info = urlopen(obj.couch_url+"/resource_data/_design/learningregistry-slice/_info")
        #print "info, post-index: " + str(info.read())

        return response

    #for each identity in test indentities, writes a doc with all 3 test keys
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def writeMultiKeyTestData(obj):
        test_data = {"documents": []}
        for testIdentity in obj.identities:
            obj.setupCount = obj.setupCount + 1
            setupCountFlag = testName + "setupCount" + str(obj.setupCount)
            testDoc = buildTestDoc(testIdentity + testName, [
                setupCountFlag, obj.testDataKey, obj.testKeys[0] + testName,
                obj.testKeys[1] + testName, obj.testKeys[2] + testName,
                obj.otherKeys[0], obj.otherKeys[1]
            ], "metadata", [obj.testSchema + testName])
            test_data["documents"].append(testDoc)

        docs_json = json.dumps(test_data)
        response = obj.app.post('/publish',
                                params=docs_json,
                                headers=json_headers)
        urlopen(
            obj.couch_url +
            "/resource_data/_design/learningregistry-slicelite/_view/by-date?limit=1"
        ).read()
        return response

    #writes 150 docs for the purpose of resumption testing
    @ModifiedServiceDoc(config['lr.publish.docid'], update_authz())
    def writeResumptionTestData(obj):
        num_docs = 150
        #i=0
        test_data = {"documents": []}
        #while i<num_docs:
        for x in xrange(0, num_docs):
            obj.setupCount = obj.setupCount + 1
            setupCountFlag = testName + "setupCount" + str(obj.setupCount)
            testDoc = buildTestDoc(obj.identities[1] + testName, [
                setupCountFlag, obj.testDataKey, obj.testKeys[0] + testName,
                obj.testKeys[1] + testName, obj.testKeys[2] + testName,
                obj.otherKeys[0], obj.otherKeys[1]
            ], "metadata", [obj.testSchema + testName])
            test_data["documents"].append(testDoc)
            #i = i+1
        docs_json = json.dumps(test_data)
        response = obj.app.post('/publish',
                                params=docs_json,
                                headers=json_headers)
        urlopen(
            obj.couch_url +
            "/resource_data/_design/learningregistry-slicelite/_view/by-date?limit=1"
        )
        return response

    #simple template for writing test docs
    def buildTestDoc(submitter, keys, type, schemas):
        testDoc = {
                   "resource_data": "data",
                   "keys": keys, \
                   "TOS": {
                        "submission_attribution": "My Attribution",
                        "submission_TOS": "My TOS"
                    },
                   "payload_placement": "inline",
                   "active": True,
                   "resource_locator": "http://my.resource.locator",
                   "doc_type": "resource_data",
                   "resource_data_type": type,
                   "payload_schema_locator": "http://my.scehma.locator",
                   "payload_schema": schemas,
                   "doc_version": "0.23.0",
                   "identity": {
                               "submitter": submitter,
                               "submitter_type": "agent"
                               }
                   }
        return testDoc

    #attempt to delete all test data. get a list of doc ids to be deleted by slicing for the signature test
    #data key. then attempt to delete a doc having each id and each id+"-distributable". A single pass at
    #this can fail to delete many docs, but the exception message thrown is empty so it is not yet known
    #why. making multiple passes at this can improve the number of documents successfully deleted, so
    #we iterate until all docs are successfully deleted or that we've made 10 attempts.
    def removeTestData(obj):
        deleteFail = 0
        deleteDistributableFail = 0
        deleteAttempts = 0
        while True:
            deleteFail = 0
            deleteDistributableFail = 0
            #del_key = quote("{\"tag\": \"metadata\"}")
            url = obj.couch_url + "/resource_data/_design/learningregistry-slicelite/_view/any-tags-by-date?reduce=false"
            #url = url % obj.testDataKey
            #fragment = &startkey=[\"%s\", 0]
            response = urlopen(url)
            data = json.loads(response.read())
            rows = data["rows"]
            for row in rows:
                doc_id = row["id"]
                try:
                    del obj.db[doc_id]
                except Exception:
                    #print "error deleting doc_id: " + doc_id + ". Message: " + e.message
                    deleteFail = deleteFail + 1

            deleteAttempts = deleteAttempts + 1
            if (deleteFail == 0
                    and deleteDistributableFail == 0) or deleteAttempts > 10:
                break
            else:
                pass  # print "deleteFail: " + str(deleteFail) + ", deleteDistributableFail: " + str(deleteDistributableFail)

    #a decorator to wrap each test case in that writes test data before the test is run and removes is after
    def test_decorator(fn):
        @wraps(fn)
        def test_decorated(self, *args, **kw):
            try:
                #print "Wrapper Before...."
                if (type == "Basic"):
                    self.test_data_response = writeTestData(self)
                elif (type == "Multi"):
                    self.test_data_response = writeMultiKeyTestData(self)
                elif (type == "Resumption"):
                    self.test_data_response = writeResumptionTestData(self)
                return fn(self, *args, **kw)
            except:
                raise
            finally:
                #removeTestData(self)
                removeTestData(self)
                self.test_data_response = None
                #print "Wrapper After...."

        return test_decorated

    return test_decorator