def testsearch(self): settings = Configuration() dv = "National" dv = "Micro" sconnection = ExtrasearchAPI(settings.config['dataverseroot'], dv) p = sconnection.read_all_datasets() self.assertTrue(bool(sconnection.read_all_datasets())) # test if dataset is private self.assertTrue(bool(sconnection.has_restricted_data("V4Q8XE"))) # test if dataset is public self.assertFalse(bool(sconnection.has_restricted_data("8FCYOX"))) # test full handle pid self.assertTrue( bool(sconnection.has_restricted_data("hdl:10622/V4Q8XE")))
def testsearch(self): settings = Configuration() dv = "National" dv = "Micro" sconnection = ExtrasearchAPI(settings.config['dataverseroot'], dv) p = sconnection.read_all_datasets() self.assertTrue(bool(sconnection.read_all_datasets())) # test if dataset is private self.assertTrue(bool(sconnection.has_restricted_data("V4Q8XE"))) # test if dataset is public self.assertFalse(bool(sconnection.has_restricted_data("8FCYOX"))) # test full handle pid self.assertTrue(bool(sconnection.has_restricted_data("hdl:10622/V4Q8XE")))
def datasetspace(settings=''): (where, query, datasets, metadata, s, permissions) = ({}, '', [], [], {}, 'yes') where = {'collab': '', 'iish': '', 'harvard': ''} pagetitle = "Public datasets" config = configuration() if config['error']: return config['error'] root = config['apiroot'] dataversename = 'global' if request.args.get('dv'): dataversename = request.args.get('dv') if request.args.get('q'): query = request.args.get('q') if request.args.get('permissions'): permissions = request.args.get('permissions') if request.args.get('where'): where[request.args.get('where')] = 'checked="checked"' settings = Configuration() sconnection = ExtrasearchAPI(settings.config['dataverseroot'], dataversename) if where['harvard']: # Extract host for Dataverse connection findhost = re.search('(http\:\/\/|https\:\/\/)(.+)', settings.config['harvarddataverse']) if findhost: settings.config['dataversehostname'] = findhost.group(2) connection = Connection(settings.config['dataversehostname'], settings.config['harvardkey']) else: try: connection = Connection(config['hostname'], settings.config['key']) except: return 'Error: no connection to Dataverse. Please try later...' handlestr = '' if query: s['q'] = query metadata = search_by_keyword(connection, s) else: try: dataverse = connection.get_dataverse(dataversename) item = dataverse.get_contents() active = 'yes' except: active = None if active: try: for item in dataverse.get_contents(): handlestr+= item['identifier'] + ' ' active = 'yes' except: active = None if not active: handlestr = sconnection.read_all_datasets() if handlestr: s['q'] = handlestr s['per_page'] = 100 metadata = search_by_keyword(connection, s) #return str(metadata['items']) for dataset in metadata['items']: active = '' # Private datasets if permissions == 'closed': pagetitle = "Restricted datasets" try: if (sconnection.has_restricted_data(dataset['global_id'])): active = 'yes' except: active = '' # Public data else: try: if not (sconnection.has_restricted_data(dataset['global_id'])): active = 'yes' except: active = '' if active: try: for author in dataset['authors']: dataset['author'] = str(author) + ', ' dataset['author'] = dataset['author'][:-2] except: dataset['author'] = str(dataset['description']) datasets.append(dataset) if where['harvard']: datasets.append(dataset) (username, projectname) = ('','') fields = {} resp = make_response(render_template('search.html', projectname=projectname, username=username, datasets=datasets, searchq=query, pagetitle=pagetitle, where=where, fields=fields)) return resp