def test_05_cache_success(self): ids = [{"id" : "10.cached"}] # set up the mocks for the first test pdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins", "test_workflow", "test_05") plugin.PluginFactory.load_from_directory(plugin_dir=pdir) cache.check_cache = mock_queue_cache # first do a lookup on a queued version rs = workflow.lookup(ids) assert len(rs.processing) == 1 result = rs.processing[0] assert result['identifier']['id'] == "10.cached" assert result['identifier']['type'] == "doi" assert result['identifier']['canonical'] == "doi:10.cached" # now update the cache mock for the appropriate result cache.check_cache = mock_success_cache old_is_stale = workflow._is_stale workflow._is_stale = mock_is_stale_false # now the same lookup on a properly cached version ids = [{"id" : "10.cached"}] rs = workflow.lookup(ids) workflow._is_stale = old_is_stale assert len(rs.results) == 1 result = rs.results[0] assert result['identifier'][0]['id'] == "10.cached" assert result['identifier'][0]['type'] == "doi" assert result['identifier'][0]['canonical'] == "doi:10.cached", result assert result['title'] == "cached"
def test_05_cache_success(self): ids = [{"id" : "10.cached"}] # set up the mocks for the first test config.type_detection = ["mock_doi_type", "mock_pmid_type"] config.canonicalisers = {"doi" : "mock_doi_canon", "pmid" : "mock_pmid_canon"} cache.check_cache = mock_queue_cache # first do a lookup on a queued version rs = workflow.lookup(ids) assert len(rs.processing) == 1 result = rs.processing[0] assert result['identifier']['id'] == "10.cached" assert result['identifier']['type'] == "doi" assert result['identifier']['canonical'] == "doi:10.cached" # now update the cache mock for the appropriate result cache.check_cache = mock_success_cache old_is_stale = workflow._is_stale workflow._is_stale = mock_is_stale_false # now the same lookup on a properly cached version ids = [{"id" : "10.cached"}] rs = workflow.lookup(ids) workflow._is_stale = old_is_stale assert len(rs.results) == 1 result = rs.results[0] assert result['identifier'][0]['id'] == "10.cached" assert result['identifier'][0]['type'] == "doi" assert result['identifier'][0]['canonical'] == "doi:10.cached", result assert result['title'] == "cached"
def test_06_cache_prior_error(self): # test to make sure that a prior error causes a lookup error global CACHE CACHE["doi:10.cached"] = {"error" : "prior error"} cache.check_cache = mock_check_cache_general ids = [{"id" : "10.cached"}] rs = workflow.lookup(ids) assert len(rs.errors) == 1
def test_07_lookup_error(self): ids = [{"id" : "12345", "type" : "doi"}] config.type_detection = ["mock_doi_type", "mock_pmid_type"] rs = workflow.lookup(ids) assert len(rs.errors) == 1 result = rs.errors[0] assert result['identifier']['id'] == "12345" assert result['identifier']['type'] == "doi" assert result.has_key("error")
def test_08_lookup_error(self): ids = [{"id" : "12345", "type" : "doi"}] pdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins", "test_workflow", "test_08") plugin.PluginFactory.load_from_directory(plugin_dir=pdir) rs = workflow.lookup(ids) assert len(rs.errors) == 1 result = rs.errors[0] assert result['identifier']['id'] == "12345" assert result['identifier']['type'] == "doi" assert result.has_key("error")
def test_06_archive_success(self): ids = [{"id" : "10.archived"}] # set up the mocks for the first test config.type_detection = ["mock_doi_type", "mock_pmid_type"] config.canonicalisers = {"doi" : "mock_doi_canon", "pmid" : "mock_pmid_canon"} cache.check_cache = mock_null_cache models.Record.check_archive = mock_check_archive old_is_stale = workflow._is_stale workflow._is_stale = mock_is_stale_false # do a lookup for an archived version rs = workflow.lookup(ids) workflow._is_stale = old_is_stale assert len(rs.results) == 1 result = rs.results[0] assert result['identifier'][0]['id'] == "10.archived" assert result['identifier'][0]['type'] == "doi" assert result['identifier'][0]['canonical'] == "doi:10.archived", result assert result['title'] == "archived"
def api_lookup(path='',ids=[]): givejson = util.request_wants_json() path = path.replace('.json','') idlist = [] if ids and isinstance(ids,basestring): idlist = [ {"id":i} for i in ids.split(',') ] elif ids: for i in ids: if isinstance(i,basestring): idlist.append({"id":i}) else: idlist.append(i) elif request.json: for item in request.json: if isinstance(item,dict): idlist.append(item) else: idlist.append({"id":item}) elif path and len(path) > 0: idlist = [ {"id":i} for i in path.split(',') ] if len(idlist) > 1000: abort(400) if idlist: results = workflow.lookup(idlist).json() else: results = json.dumps({}) if request.method == 'GET' and not givejson: if path: triggered = idlist else: triggered = False return render_template('index.html', results=results, triggered=triggered) else: resp = make_response( results ) resp.mimetype = "application/json" return resp
def test_07_archive_success(self): ids = [{"id" : "10.archived"}] # set up the mocks for the first test pdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins", "test_workflow", "test_07") plugin.PluginFactory.load_from_directory(plugin_dir=pdir) cache.check_cache = mock_null_cache models.Record.check_archive = mock_check_archive old_is_stale = workflow._is_stale workflow._is_stale = mock_is_stale_false # do a lookup for an archived version rs = workflow.lookup(ids) workflow._is_stale = old_is_stale assert len(rs.results) == 1 result = rs.results[0] assert result['identifier'][0]['id'] == "10.archived" assert result['identifier'][0]['type'] == "doi" assert result['identifier'][0]['canonical'] == "doi:10.archived", result assert result['title'] == "archived"
def test_12_check_cache_update_on_queued(self): global CACHE ids = [{"id" : "10.queued"}] # set up the configuration so that the type and canonical form are created # but no copy of the id is found in the cache or archive config.type_detection = ["mock_doi_type", "mock_pmid_type"] config.canonicalisers = {"doi" : "mock_doi_canon", "pmid" : "mock_pmid_canon"} cache.check_cache = mock_null_cache models.Record.check_archive = mock_null_archive # mock out the cache method to allow us to record # calls to it cache.cache = mock_cache # mock out the _start_back_end so that we don't actually start the # back end old_back_end = workflow._start_back_end workflow._start_back_end = mock_back_end # do the lookup rs = workflow.lookup(ids) # assert that the result is in the appropriate bit of the response assert len(rs.processing) == 1 result = rs.processing[0] assert result['identifier']['id'] == "10.queued" # now check our cache and make sure that the item got cached # correctly assert CACHE.has_key("doi:10.queued") assert CACHE["doi:10.queued"]['queued'] # reset the test cache and reinstate the old back-end del CACHE["doi:10.queued"] workflow._start_back_end = old_back_end
def test_14_check_cache_update_on_queued(self): global CACHE ids = [{"id" : "10.queued"}] # set up the configuration so that the type and canonical form are created # but no copy of the id is found in the cache or archive pdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins", "test_workflow", "test_14") plugin.PluginFactory.load_from_directory(plugin_dir=pdir) cache.check_cache = mock_null_cache models.Record.check_archive = mock_null_archive # mock out the cache method to allow us to record # calls to it cache.cache = mock_cache # mock out the _start_back_end so that we don't actually start the # back end old_back_end = workflow._start_back_end workflow._start_back_end = mock_back_end # do the lookup rs = workflow.lookup(ids) # assert that the result is in the appropriate bit of the response assert len(rs.processing) == 1 result = rs.processing[0] assert result['identifier']['id'] == "10.queued" # now check our cache and make sure that the item got cached # correctly assert CACHE.has_key("doi:10.queued") assert CACHE["doi:10.queued"]['queued'], CACHE # reset the test cache and reinstate the old back-end del CACHE["doi:10.queued"] workflow._start_back_end = old_back_end
def api_lookup(path='',ids=[]): givejson = util.request_wants_json() path = path.replace('.json','') idlimit = config.LOOKUP_LIMIT # have we been asked to prioritise? priority = bool(request.values.get("priority", False)) if priority: idlimit = config.PRIORITY_LOOKUP_LIMIT idlist = [] # look for JSON in the incoming request data if request.json: # the MIME type of the request is set properly - this is how it # should be request_json = request.json else: request_json = None # check if somebody just POST-ed without bothering to request # the right MIME type try: request_json = json.loads(request.data) except ValueError: pass # now check if the client mislabeled the request really badly, # i.e. saying it's HTML form data when it's actually JSON try: request_json = json.loads(str(request.form)) except ValueError: pass if ids and isinstance(ids,basestring): idlist = [ {"id":i} for i in ids.split(',') ] elif ids: for i in ids: if isinstance(i,basestring): idlist.append({"id":i}) else: idlist.append(i) elif request_json: for item in request_json: if isinstance(item,dict): idlist.append(item) else: idlist.append({"id":item}) elif path and len(path) > 0: idlist = [ {"id":i} for i in path.split(',') ] log.debug('LOOKUP: About to do a request size test. Len of idlist: ' + str(len(idlist))) if len(idlist) > idlimit: abort(400) if idlist: results = workflow.lookup(idlist, priority).json() else: results = json.dumps({}) if request.method == 'GET' and not givejson: if path: triggered = idlist else: triggered = False return render_template('index.html', results=results, triggered=triggered) else: resp = make_response( results ) resp.mimetype = "application/json" return resp