def main(config): client = GirderClient(apiUrl=config.girder_api_url) client.authenticate(config.girder_user, config.girder_password) # Load any parameters params = {} if config.taskflow_start_params is not None: with open(config.taskflow_start_params) as fp: params = json.load(fp) print params try: print ('Running %s taskflow ...' % config.taskflow_start_params) taskflow_id = create_taskflow( client, config.taskflow_class) # Start the task flow url = 'taskflows/%s/start' % (taskflow_id) client.put(url, data=json.dumps(params)) # Wait for it to complete wait_for_complete(client, taskflow_id) except HttpError as ex: print( ex.responseText)
def main(): parser = argparse.ArgumentParser( description='Import analyses into minerva') parser.add_argument('--username', required=False, default=None) parser.add_argument('--password', required=False, default=None) parser.add_argument('--scheme', required=False, default='http') parser.add_argument('--host', required=False, default='localhost') parser.add_argument('--port', required=False, default='8080') parser.add_argument('--api-root', required=False, default='/api/v1', help='path to the Girder REST API') parser.add_argument('--path', required=True, help='the path to import the analyses from') config = parser.parse_args() client = GirderClient(host=config.host, port=config.port, apiRoot=config.api_root, scheme=config.scheme) client.authenticate(config.username, config.password) import_analyses(client, config.path)
class GirderClient(object): def __init__(self): token_obj = get_oc_token_obj() url = token_obj.get('apiUrl') api_key = token_obj.get('apiKey') url = os.environ.get('OC_API_URL', url) internal_url = os.environ.get('OC_INTERNAL_API_URL', url) api_key = os.environ.get('OC_API_KEY', api_key) token = os.environ.get('GIRDER_TOKEN') self.client = None self.url = url self.internal_url = internal_url if internal_url is not None: self.client = GC(apiUrl=internal_url) if api_key is not None: self.client.authenticate(apiKey=api_key) elif token is not None: self.client.token = token def __getattr__(self, name): return getattr(self.client, name)
def get_girder_client() -> GirderClient: client = GirderClient(apiUrl="https://viame.kitware.com/api/v1") apikey = os.environ.get('GIRDER_API_KEY', None) if apikey: client.authenticate(apiKey=apikey) else: client.authenticate(interactive=True) return client
def import_calc(config): try: target_port = None if config.port: target_port = config.port target_scheme = None if config.scheme: target_scheme = config.scheme target_apiroot = None if config.apiroot: target_apiroot = config.apiroot client = GirderClient(host=config.host, port=target_port, scheme=target_scheme, apiRoot=target_apiroot) client.authenticate(apiKey=config.apiKey) me = client.get('/user/me') if not me: print('Error: Girder token invalid, please verify') return folderParams = { 'parentId': me['_id'], 'parentType': 'user', 'name': 'Private' } # Get the private folder id first folder = next(client.listResource('folder', folderParams)) folder = next(client.listFolder(me['_id'], 'user', 'Private')) for file_name in config.datafile: print ('\nUploading ' + file_name) file_id = {} with open(file_name, 'r') as fp: fileNameBase = os.path.basename(file_name) size = os.path.getsize(file_name) file_id = client.uploadFile(folder['_id'], fp, fileNameBase, size, 'folder') body = { 'fileId': file_id['_id'] } if config.public: body['public'] = True mol = client.sendRestRequest('POST', 'molecules', data=json.dumps(body)) if mol and '_id' in mol: config.moleculeId = mol['_id'] print('Molecule ID: ' + mol['_id']) else: print(mol) except HttpError as error: print(error.responseText, file=sys.stderr)
def test_extract_download(data): fileId, filename = data filepath = localDataRoot / str(filename) if not filepath.exists(): gc = GirderClient(apiUrl=source_api_root) gc.authenticate(apiKey=os.environ.get('GIRDER_API_KEY')) gc.downloadFile(fileId, str(filepath)) with zipfile.ZipFile(filepath, 'r') as zipref: zipref.extractall(localDataRoot)
def _importAnalysis(self): """Setup and import analyses for bsve tests.""" if self._import_done: return path = "/minerva_analysis/folder" response = self.request(path=path, method="POST", user=self._user) self.assertStatusOk(response) analyses_folder = response.json["folder"] # import the bsve analysis client = GirderClient("localhost", girder_port) client.authenticate("minervauser", "password") bsve_analysis_path = os.path.abspath( os.path.join(os.path.dirname(os.path.realpath(__file__)), "../analyses/bsve") ) import_analyses.import_analyses(client, bsve_analysis_path) path = "/item" params = {"folderId": analyses_folder["_id"]} response = self.request(path=path, method="GET", params=params, user=self._user) self.assertStatusOk(response) self.assertEquals(len(response.json), 2, "Expecting only one analysis") for analysis in response.json: if analysis["name"] == "bsve search": search_analysis = analysis elif analysis["name"] == "MMWR data import": soda_analysis = analysis else: self.fail('Unexpected analysis found "%s".' % analysis["name"]) expected_meta = { u"minerva": { u"analysis_type": u"bsve_search", u"analysis_name": u"bsve search", u"analysis_id": search_analysis["_id"], } } self.assertEquals(search_analysis["meta"], expected_meta, "Unexpected value for search meta data") expected_meta = { u"minerva": { u"analysis_type": u"mmwr_import_data", u"analysis_name": u"MMWR data import", u"analysis_id": soda_analysis["_id"], } } self.assertEquals(soda_analysis["meta"], expected_meta, "Unexpected value for soda meta data") # create the dataset folder path = "/minerva_dataset/folder" params = {"userId": self._user["_id"]} response = self.request(path=path, method="POST", params=params, user=self._user) self.assertStatusOk(response) self._importDone = True
def main(ctx, api_key, api_url): """Openchemistry Client The client can be used to fetch molecules, add molecules, etc. """ gc = GirderClient(apiUrl=api_url) if api_key is not None: gc.authenticate(apiKey=api_key) ctx.obj = gc
def _ingest(project, composite, dir, channel_map, api_url, api_key): if dir[-1] != '/': dir += '/' gc = GirderClient(apiUrl=api_url) gc.authenticate(apiKey=api_key) channel_map = json.load(channel_map) channel_map = {channel.upper():element.lower() for (channel,element) in channel_map.items()} experiments = _ingest_runs(gc, project, composite, dir) #(run_ids, runs) = _ingest_runs(gc, project, composite, dir) _ingest_samples(gc, project, composite, dir, experiments, channel_map)
def gc_init(api_url='https://girder.hub.yt/api/v1', api_key=None): from girder_client import GirderClient if api_key is None: try: api_key = os.environ['GIRDER_API_KEY'] except KeyError as err: msg = 'please define GIRDER_API_KEY environment variable\n' msg += ' or pass api_key kwargs.' raise KeyError(msg) gc = GirderClient(apiUrl=api_url) gc.authenticate(apiKey=api_key) return gc
def main(args=None): parser = argparse.ArgumentParser( description='Mount Girder filesystem assetstore.') parser.add_argument('--api-url', required=True, default=None, help='full URL to the RESTful API of Girder server') parser.add_argument('--username', required=False, default=None) parser.add_argument('--password', required=False, default=None) parser.add_argument('--api-key', required=False, default=None) parser.add_argument('--token', required=False, default=None) parser.add_argument('-c', default='remote', choices=['remote', 'direct'], help='command to run') parser.add_argument('--foreground', dest='foreground', action='store_true') parser.add_argument('--hostns', dest='hostns', action='store_true') parser.add_argument('local_folder', help='path to local target folder') parser.add_argument('remote_folder', help='Girder\'s folder id') args = parser.parse_args() gc = GirderClient(apiUrl=args.api_url) if args.token: gc.token = args.token elif args.api_key: gc.authenticate(apiKey=args.api_key) elif args.username and args.password: gc.authenticate(username=args.username, password=args.password) else: raise RuntimeError("You need to specify apiKey or user/pass") if args.hostns: targetns = os.path.join(os.environ.get('HOSTDIR', '/'), 'proc/1/ns/mnt') with open(targetns) as fd: setns(fd, CLONE_NEWNS) if args.c == 'remote': FUSE(RESTGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True) elif args.c == 'direct': FUSE(LocalGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True) else: print('No implementation for command %s' % args.c)
def upload_benchmark_results(benchmark_bin, api_key=None): hostname = socket.gethostname().lower() results_dir = os.path.join(benchmark_bin, 'BenchmarkResults', hostname) if not os.path.exists(results_dir): sys.stderr.write('Expected results directory does not exist: ' + results_dir) sys.exit(1) from girder_client import GirderClient gc = GirderClient(apiUrl='https://data.kitware.com/api/v1') gc.authenticate(apiKey=api_key) # ITK/PerformanceBenchmarkingResults folder_id = '5af50c818d777f06857985e3' hostname_folder = gc.loadOrCreateFolder(hostname, folder_id, 'folder') gc.upload(os.path.join(results_dir, '*.json'), hostname_folder['_id'], leafFoldersAsItems=False, reuseExisting=True)
def testClientMetadataExtractor(testData, user): item = Item().load(testData['item']['_id'], user=user) assert item['name'] == testData['name'] del item['meta'] item = Item().save(item) assert 'meta' not in item client = GirderClient('localhost', int(os.environ['GIRDER_PORT'])) client.authenticate(user['login'], 'password') extractor = ClientMetadataExtractor(client, testData['path'], testData['item']['_id']) extractor.extractMetadata() item = Item().load(testData['item']['_id'], user=user) assert item['name'] == testData['name'] assert 'meta' in item assert item['meta']['MIME type'] == testData['mimeType']
def init_girder(api_key=None, api_url='https://girder.hub.yt/api/v1'): """Initialize girder client, rely on environment variable: GIRDER_API_KEY, connect to yt Hub. Args: api_key (str, optional): use GIRDER_API_KEY env. var. by default api_url (str, optional): use yt Hub v1 by default Return: GirderClient: initialized girder client """ from girder_client import GirderClient if api_key is None: import os api_key = os.environ['GIRDER_API_KEY'] gc = GirderClient(apiUrl=api_url) gc.authenticate(apiKey=api_key) return gc
def testClientMetadataExtractor(self): item = self.model('item').load(self.item['_id'], user=self.user) self.assertEqual(item['name'], self.name) self.assertNotHasKeys(item, ['meta']) clientPath = os.path.join(ROOT_DIR, 'clients', 'python') sys.path.insert(0, clientPath) from girder_client import GirderClient client = GirderClient('localhost', int(os.environ['GIRDER_PORT'])) client.authenticate(self.user['login'], self.password) extractor = ClientMetadataExtractor(client, self.path, self.item['_id']) extractor.extractMetadata() sys.path.remove(clientPath) item = self.model('item').load(self.item['_id'], user=self.user) self.assertEqual(item['name'], self.name) self.assertHasKeys(item, ['meta']) self.assertEqual(item['meta']['MIME type'], self.mimeType)
def main(): parser = argparse.ArgumentParser(description='Import analyses into minerva') parser.add_argument('--username', required=False, default=None) parser.add_argument('--password', required=False, default=None) parser.add_argument('--scheme', required=False, default='http') parser.add_argument('--host', required=False, default='localhost') parser.add_argument('--port', required=False, default='8080') parser.add_argument('--api-root', required=False, default='/api/v1', help='path to the Girder REST API') parser.add_argument('--path', required=True, help='the path to import the analyses from') config = parser.parse_args() client = GirderClient(host=config.host, port=config.port, apiRoot=config.api_root, scheme=config.scheme) client.authenticate(config.username, config.password) import_analyses(client, config.path)
def testClientMetadataExtractor(self): item = Item().load(self.item['_id'], user=self.user) self.assertEqual(item['name'], self.name) self.assertNotHasKeys(item, ['meta']) clientPath = os.path.join(ROOT_DIR, 'clients', 'python') sys.path.insert(0, clientPath) from girder_client import GirderClient client = GirderClient('localhost', int(os.environ['GIRDER_PORT'])) client.authenticate(self.user['login'], self.password) extractor = ClientMetadataExtractor(client, self.path, self.item['_id']) extractor.extractMetadata() sys.path.remove(clientPath) item = Item().load(self.item['_id'], user=self.user) self.assertEqual(item['name'], self.name) self.assertHasKeys(item, ['meta']) self.assertEqual(item['meta']['MIME type'], self.mimeType)
def test_upload_zip_data(dataset: dict): user = zipUser client = GirderClient(apiUrl='http://localhost:8010/api/v1') client.authenticate(username=user['login'], password=user['password']) dsPath = localDataRoot / str(dataset['path']) privateFolder = getTestFolder(client) newDatasetFolder = client.createFolder( privateFolder['_id'], dataset['name'], metadata={ 'fps': dataset['fps'], 'type': dataset['type'], }, ) if Path(dsPath).is_file(): client.uploadFileToFolder(newDatasetFolder['_id'], str(dsPath)) client.post(f'dive_rpc/postprocess/{newDatasetFolder["_id"]}') wait_for_jobs(client, max_wait_timeout=30, expected_status=dataset['job_status']) resultFolder = client.getFolder(newDatasetFolder['_id']) # verify sub datasets if they exist if dataset.get('subDatasets', False): folders = list(client.listFolder(newDatasetFolder['_id'])) for item in dataset["subDatasets"]: matches = [x for x in folders if x["name"] == item["name"]] if len(matches) > 0: meta = matches[0].get("meta", {}) assert meta.get("fps", -1) == item["fps"] assert meta.get("type", "") == item["type"] assert meta.get("annotate", False) elif dataset['job_status'] == JobStatus.SUCCESS: assert resultFolder['meta'].get("annotate", False) assert type(resultFolder['meta'].get("fps")) in [int, float] assert type(resultFolder['meta'].get("type")) == str else: assert resultFolder['meta'].get("annotate", None) is None
def main(): """Create the folder hierarchy with metadata in a Girder instance.""" args = parser.parse_args() g = GirderClient(host=args.host, port=args.port, scheme=args.scheme) g.authenticate(args.username, args.password) def create_folder_on_demand(parent_folder_id, folder_name): existing_folders = list( g.listFolder(parent_folder_id, name=folder_name)) if not len(existing_folders): sought_folder = g.createFolder(parent_folder_id, name=folder_name) else: sought_folder = existing_folders[0] return sought_folder metadata_file = 'metadata.json' with open(metadata_file) as json_file: metadata = json.load(json_file) parent_folder_id = args.parent_folder_id for subject_id, subject_metadata in metadata.items(): subject_folder = create_folder_on_demand(parent_folder_id, subject_id) for (scan_time, scan_date, scan_weight) in subject_metadata['scans']: create_folder_on_demand(subject_folder['_id'], scan_time)
from girder_client import GirderClient c = GirderClient(host="localhost", port=9000) # Create an admin user if there isn't one try: c.authenticate("girder", "girder") except: c.sendRestRequest( "POST", "user", { "login": "******", "password": "******", "email": "*****@*****.**", "firstName": "Girder", "lastName": "Admin", }, ) c.authenticate("girder", "girder") # Create a tangelo hub collection if there isn't one coll_search = c.get("resource/search", parameters={"q": "Default", "types": '["collection"]'}) if len(coll_search["collection"]) == 0: collection = c.post( "collection", parameters={"name": "Default", "description": "Default workspace", "public": "true"} ) c.post( "folder", parameters={ "parentType": "collection",
def __init__(self, session_id, api_url, api_key): super().__init__() self.session_id = session_id gc = GirderClient(apiUrl=api_url) gc.authenticate(apiKey=api_key) self._fs = WtDmsGirderFS(session_id, gc)
def testBsveSearchAnalysis(self): # create the analysis folder path = '/minerva_analysis/folder' response = self.request(path=path, method='POST', user=self._user) self.assertStatusOk(response) analyses_folder = response.json['folder'] # import the bsve analysis client = GirderClient('localhost', girder_port) client.authenticate('minervauser', 'password') bsve_analysis_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../analyses/bsve')) import_analyses.import_analyses(client, bsve_analysis_path) path = '/item' params = { 'folderId': analyses_folder['_id'] } response = self.request(path=path, method='GET', params=params, user=self._user) self.assertStatusOk(response) self.assertEquals(len(response.json), 1, 'Expecting only one analysis') analysis = response.json[0] self.assertEquals(analysis['name'], 'bsve search', 'Expecting analysis name to be "bsve search"') expected_meta = { u'minerva': { u'analysis_type': u'bsve_search', u'analysis_name': u'bsve search', u'analysis_id': analysis['_id'] } } self.assertEquals(analysis['meta'], expected_meta, 'Unexpected value for meta data') # create the dataset folder path = '/minerva_dataset/folder' params = { 'userId': self._user['_id'], } response = self.request(path=path, method='POST', params=params, user=self._user) self.assertStatusOk(response) # mock the calls to bsve search @urlmatch(netloc=r'(.*\.)?beta-search.bsvecosystem.net(.*)$') def bsve_mock(url, request): if url.path.split('/')[-1] == 'request': return httmock.response(200, '12345') else: pluginTestDir = os.path.dirname(os.path.realpath(__file__)) filepath = os.path.join(pluginTestDir, 'data', 'bsve_search.json') with open(filepath) as bsve_search_file: content = { 'status': 1, 'results': json.load(bsve_search_file) } headers = { 'content-length': len(content), 'content-type': 'application/json' } return httmock.response(200, content, headers, request=request) with HTTMock(bsve_mock): response = self.request( path='/minerva_analysis/bsve_search', method='POST', params={ 'datasetName': 'test dataset', 'bsveSearchParams': '{}' }, user=self._user ) # wait for the async job to complete searchResultsFinished = False count = 0 while not searchResultsFinished and count < 5: # get the dataset and check if it has been updated path = '/minerva_dataset/%s/dataset' % str(response.json['dataset_id']) response = self.request( path=path, method='GET', user=self._user ) dataset = response.json if 'json_row' in dataset: searchResultsFinished = True else: time.sleep(2) count += 1 # ensure the first row of results was added to the dataset self.assertTrue('json_row' in dataset, 'json_row expected in dataset') self.assertTrue('data' in dataset['json_row'], 'data should be in json_row') self.assertTrue('Longitude' in dataset['json_row']['data'], 'data.Longitude should be in json_row') # ensure that we can map the Lat/Long to geojson, as this json has # unicode values for Lat/Long # update the minerva metadata with coordinate mapping metadata = {'minerva': dataset} metadata['minerva']['mapper'] = { "latitudeKeypath": "data.Latitude", "longitudeKeypath": "data.Longitude" } path = '/item/{}/metadata'.format(dataset['dataset_id']) response = self.request( path=path, method='PUT', user=self._user, body=json.dumps(metadata), type='application/json' ) metadata = response.json # create geojson in the dataset path = '/minerva_dataset/{}/geojson'.format(dataset['dataset_id']) response = self.request( path=path, method='POST', user=self._user, ) self.assertHasKeys(response.json, ['geojson_file'])
def getClient(name: str) -> GirderClient: gc = GirderClient(apiUrl='http://localhost:8010/api/v1') gc.authenticate(username=name, password=users[name]['password']) return gc
def admin_client() -> GirderClient: gc = GirderClient(apiUrl='http://localhost:8010/api/v1') gc.authenticate(username='******', password='******') return gc
girder_scheme = os.environ.get('GIRDER_SCHEME', 'http') girder_api_root = os.environ.get('GIRDER_API_ROOT', '/api/v1') girder_api_key = os.environ.get('GIRDER_API_KEY') girder_token = os.environ.get('GIRDER_TOKEN') app_base_url = os.environ.get('APP_BASE_URL') cluster_id = os.environ.get('CLUSTER_ID') jupyterhub_base_url = os.environ.get('JUPYTERHUB_BASE_URL') if girder_host: girder_client = GirderClient(host=girder_host, port=girder_port, scheme=girder_scheme, apiRoot=girder_api_root) if girder_api_key is not None: girder_client.authenticate(apiKey=girder_api_key) elif girder_token is not None: girder_client.token = girder_token girder_file = lookup_file(girder_client, jupyterhub_base_url) # TODO Need to use basis and theory def _fetch_calculation(molecule_id, type_=None, basis=None, theory=None, functional=None): parameters = {'moleculeId': molecule_id, 'sortByTheory': True} if type_ is not None:
parser.add_argument('--admin', help='name:pass for the admin user') parser.add_argument('--host', help='host to connect to') parser.add_argument('--port', type=int, help='port to connect to') parser.add_argument('--broker', help='girder worker broker URI') parser.add_argument('--s3', help='name of S3 bucket') parser.add_argument('--aws-key-id', help='aws key id') parser.add_argument('--aws-secret-key', help='aws secret key') args = parser.parse_args() client = GirderClient(host=args.host, port=args.port) user, password = args.admin.split(":", 1) if find_user('girder'): client.authenticate('girder', 'girder') ensure_user(client, login=user, password=password, email='*****@*****.**', firstName='Girder', lastName='Admin') client.authenticate(user, password) s3_assetstore_name = 's3' if find_assetstore(s3_assetstore_name) is None: client.post('assetstore', parameters=dict(name=s3_assetstore_name,
def main(args=None): parser = argparse.ArgumentParser(description="Mount Girder filesystem assetstore.") parser.add_argument( "--api-url", required=True, default=None, help="full URL to the RESTful API of Girder server", ) parser.add_argument("--username", required=False, default=None) parser.add_argument("--password", required=False, default=None) parser.add_argument("--api-key", required=False, default=None) parser.add_argument("--token", required=False, default=None) parser.add_argument("--foreground", dest="foreground", action="store_true") parser.add_argument("--hostns", dest="hostns", action="store_true") parser.add_argument( "--versions-mountpoint", dest="versions_mountpoint", required=False, help="Mountpoint for the versions FS. If relative, then it should be " "relative to the runs mountpoint", default="Versions", ) parser.add_argument( "-c", default="remote", help="type of filesystem to mount", choices=[ "remote", "direct", "wt_dms", "wt_home", "wt_work", "wt_run", "wt_versions", "wt_runs", ], ) parser.add_argument("local_folder", help="path to local target folder") parser.add_argument( "remote_folder", help="Girder's folder id, a DM session id (for wt_dms), or a tale instance" "ID (for wt_versions)", ) args = parser.parse_args() gc = GirderClient(apiUrl=args.api_url) if args.token: gc.token = args.token elif args.api_key: gc.authenticate(apiKey=args.api_key) elif args.username and args.password: gc.authenticate(username=args.username, password=args.password) else: raise RuntimeError("You need to specify apiKey or user/pass") if args.hostns: targetns = os.path.join(os.environ.get("HOSTDIR", "/"), "proc/1/ns/mnt") with open(targetns) as fd: setns(fd, CLONE_NEWNS) if args.c == "remote": FUSE( RESTGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True, ) elif args.c == "direct": FUSE( LocalGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True, ) elif args.c == "wt_dms": FUSE( WtDmsGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True, ) elif args.c == "wt_run": user = gc.get("/user/me") args = { "user": user["login"], "pass": "******".format(gc.token), "dest": args.local_folder, "runId": args.remote_folder, "opts": "-o uid=1000,gid=100,file_mode=0600,dir_mode=2700", # FIXME "url": gc.urlBase.replace("api/v1", "runs").rstrip("/"), # FIXME } cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{runId} {dest}' cmd = cmd.format(**args) subprocess.check_output(cmd, shell=True) # FIXME elif args.c == "wt_work": user = gc.get("/user/me") args = { "user": user["login"], "pass": "******".format(gc.token), "dest": args.local_folder, "tale": args.remote_folder, "opts": "-o uid=1000,gid=100,file_mode=0600,dir_mode=2700", # FIXME "url": gc.urlBase.replace("api/v1", "tales").rstrip("/"), # FIXME } cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{tale} {dest}' cmd = cmd.format(**args) subprocess.check_output(cmd, shell=True) # FIXME elif args.c == "wt_home": user = gc.get("/user/me") args = { "user": user["login"], "pass": "******".format(gc.token), "dest": args.local_folder, "opts": "-o uid=1000,gid=100,file_mode=0600,dir_mode=2700", # FIXME "url": gc.urlBase.replace("api/v1", "homes").rstrip("/"), # FIXME } cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{user} {dest}' cmd = cmd.format(**args) subprocess.check_output(cmd, shell=True) # FIXME elif args.c == "wt_versions": FUSE( WtVersionsFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=False, allow_other=True, ) elif args.c == "wt_runs": FUSE( WtRunsFS(args.remote_folder, gc, args.versions_mountpoint), args.local_folder, foreground=args.foreground, ro=False, allow_other=True, ) else: print("No implementation for command %s" % args.c)
def test_reset_integration_env(user: dict): client = GirderClient(apiUrl='http://localhost:8010/api/v1') client.authenticate(username=user['login'], password=user['password']) privateFolder = getTestFolder(client) client.delete(f"folder/{privateFolder['_id']}")
from girder_client import GirderClient if __name__ == "__main__": login = '******' password = '******' gc = GirderClient(apiUrl='http://*****:*****@admin.com', firstName='admin', lastName='admin', password=password, admin=True) gc.authenticate(username=login, password=password) # Create an assetstore gc.post('assetstore', parameters={ 'name': 'TestAssetstore', 'type': 0, 'root': '/home/circleci/project/assetstore' }) # Enable the 'slicer_extension_manager' plugin gc.put('system/plugins', parameters={"plugins": '["slicer_extension_manager"]'}) # Restart the server gc.put('system/restart')
'table8': table8, 'table9': table9 }) def create_items_from_csv(path, data_dir, gc): with open(path) as csvfile: reader = csv.reader(csvfile, delimiter=',') next(reader) for row in reader: create_item_from_row(row, data_dir, gc) if __name__ == '__main__': if len(sys.argv) < 5: sys.exit( 'Sample call: python ingest.py ./data localhost 8080 admin letmein' ) data_dir = sys.argv[1] host = sys.argv[2] port = sys.argv[3] user = sys.argv[4] password = sys.argv[5] gc = GirderClient(apiUrl='http://{}:{}/api/v1'.format(host, port)) gc.authenticate(user, password) create_items_from_csv( './jgi_data/NMDC_metadata_datasets - NMDC_datasets_metadata.csv', data_dir, gc) # with open("envo_lookup.json", "w") as f: # f.write(json.dumps(lookup_table))
break for assetstore in assetstore_list: if assetstore['name'] == name: result = assetstore['_id'] break offset += limit return result client = GirderClient(host='localhost', port=8080) if find_user('girder'): client.authenticate('girder', 'girder') ensure_user(client, login='******', password='******', email='*****@*****.**', firstName='girder', lastName='girder') client.authenticate('girder', 'girder') if find_assetstore('local') is None: client.post('assetstore', parameters=dict(name='local', type=str(AssetstoreType.GRIDFS), db='sumoLocalStore',
class BaseIntegrationTest(unittest.TestCase): def __init__(self, name, girder_url, girder_user, girder_password, job_timeout=60, cleanup=True): super(BaseIntegrationTest, self).__init__(name) self._job_id = None self._script_id = None self._output_folder_id = None self._input_folder_id = None self._girder_url = girder_url self._girder_user = girder_user self._girder_password = girder_password self._job_timeout = job_timeout self._data = 'Need more input!' self._cleanup = cleanup def setUp(self): url = '%s/api/v1' % self._girder_url self._client = GirderClient(apiUrl=url) self._client.authenticate(self._girder_user, self._girder_password) user = self._client.get('user/me') self._user_id = user['_id'] r = list(self._client.listFolder(self._user_id, 'user', name='Private')) self.assertEqual(len(r), 1) self._private_folder_id = r[0]['_id'] def tearDown(self): if not self._cleanup: return if self._job_id: try: url = 'jobs/%s' % self._job_id self._client.delete(url) except Exception as e: traceback.print_exc() if self._script_id: try: url = 'scripts/%s' % self._script_id self._client.delete(url) except Exception: traceback.print_exc() if self._output_folder_id: try: url = 'folder/%s' % self._output_folder_id self._client.delete(url) except Exception: traceback.print_exc() if self._input_folder_id: try: url = 'folder/%s' % self._input_folder_id self._client.delete(url) except Exception: traceback.print_exc() def create_script(self, commands=[ 'sleep 10', 'cat CumulusIntegrationTestInput' ]): body = { 'commands': commands, 'name': 'CumulusIntegrationTestLob' } r = self._client.post('scripts', data=json.dumps(body)) self._script_id = r['_id'] def create_input(self, folder_name='CumulusInput'): r = self._client.createFolder(self._private_folder_id, folder_name) self._input_folder_id = r['_id'] size = len(self._data) item = self._client.uploadFile(self._input_folder_id, StringIO(self._data), 'CumulusIntegrationTestInput', size, parentType='folder') self._item_id = item['itemId'] def create_output_folder(self, folder_name='CumulusOutput'): r = self._client.createFolder(self._private_folder_id, folder_name) self._output_folder_id = r['_id'] def create_job(self, job_name='CumulusIntegrationTestJob', tail=None): body = { 'name': job_name, 'scriptId': self._script_id, 'output': [{ 'folderId': self._output_folder_id, 'path': '.' }], 'input': [ { 'folderId': self._input_folder_id, 'path': '.' } ] } if tail: body['output'].append({ "path": tail, "tail": True }) job = self._client.post('jobs', data=json.dumps(body)) self._job_id = job['_id'] def submit_job(self, job_params={}, timeout=None): url = 'clusters/%s/job/%s/submit' % (self._cluster_id, self._job_id) self._client.put(url, data=json.dumps(job_params)) start = time.time() while True: time.sleep(1) r = self._client.get('jobs/%s' % self._job_id) if r['status'] in ['error', 'unexpectederror']: r = self._client.get('jobs/%s/log' % self._job_id) self.fail(str(r)) elif r['status'] == 'complete': break if time.time() - start > timeout: self.fail('Job didn\'t complete in timeout') def assert_output(self): r = self._client.listItem(self._output_folder_id) self.assertEqual(len(r), 4) stdout_item = None for i in r: if i['name'].startswith('CumulusIntegrationTestJob-%s.o' % self._job_id): stdout_item = i break self.assertIsNotNone(stdout_item) r = self._client.get('item/%s/files' % i['_id']) self.assertEqual(len(r), 1) path = os.path.join(tempfile.gettempdir(), self._job_id) try: self._client.downloadFile(r[0]['_id'], path) with open(path, 'rb') as fp: self.assertEqual(fp.read(), self._data) finally: if os.path.exists(path): os.remove(path)
def main(args=None): parser = argparse.ArgumentParser( description='Mount Girder filesystem assetstore.') parser.add_argument('--api-url', required=True, default=None, help='full URL to the RESTful API of Girder server') parser.add_argument('--username', required=False, default=None) parser.add_argument('--password', required=False, default=None) parser.add_argument('--api-key', required=False, default=None) parser.add_argument('--token', required=False, default=None) parser.add_argument('--foreground', dest='foreground', action='store_true') parser.add_argument('--hostns', dest='hostns', action='store_true') parser.add_argument('-c', default='remote', help='command to run', choices=['remote', 'direct', 'wt_dms', 'wt_home']) parser.add_argument('local_folder', help='path to local target folder') parser.add_argument('remote_folder', help='Girder\'s folder id or a DM session id') args = parser.parse_args() gc = GirderClient(apiUrl=args.api_url) if args.token: gc.token = args.token elif args.api_key: gc.authenticate(apiKey=args.api_key) elif args.username and args.password: gc.authenticate(username=args.username, password=args.password) else: raise RuntimeError("You need to specify apiKey or user/pass") if args.hostns: targetns = os.path.join(os.environ.get('HOSTDIR', '/'), 'proc/1/ns/mnt') with open(targetns) as fd: setns(fd, CLONE_NEWNS) if args.c == 'remote': FUSE(RESTGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True) elif args.c == 'direct': FUSE(LocalGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True) elif args.c == 'wt_dms': FUSE(WtDmsGirderFS(args.remote_folder, gc), args.local_folder, foreground=args.foreground, ro=True, allow_other=True) elif args.c == 'wt_home': user = gc.get('/user/me') args = { 'user': user['login'], 'pass': '******'.format(gc.token), 'dest': args.local_folder, 'opts': '-o uid=1000,gid=100', # FIXME 'url': gc.urlBase.replace('api/v1', 'homes').rstrip('/') # FIXME } cmd = 'echo "{user}\n{pass}" | mount.davfs {opts} {url}/{user} {dest}' cmd = cmd.format(**args) subprocess.check_output(cmd, shell=True) # FIXME else: print('No implementation for command %s' % args.c)
def main(config): client = GirderClient(apiUrl=config.girder_api_url) client.authenticate(config.girder_user, config.girder_password) try: # First run the simple flow print ('Running simple taskflow ...') taskflow_id = create_taskflow( client, 'cumulus.taskflow.core.test.mytaskflows.SimpleTaskFlow') # Start the task flow url = 'taskflows/%s/start' % (taskflow_id) client.put(url) # Wait for it to complete wait_for_complete(client, taskflow_id) # First run the simple flow print ('Running linked taskflow ...') taskflow_id = create_taskflow( client, 'cumulus.taskflow.core.test.mytaskflows.LinkTaskFlow') # Start the task flow url = 'taskflows/%s/start' % (taskflow_id) client.put(url) # Wait for it to complete wait_for_complete(client, taskflow_id) # Test terminating a simple flow print ('Running simple taskflow ...') taskflow_id = create_taskflow( client, 'cumulus.taskflow.core.test.mytaskflows.SimpleTaskFlow') # Start the task flow url = 'taskflows/%s/start' % (taskflow_id) client.put(url) time.sleep(4) print ('Terminate the taskflow') url = 'taskflows/%s/terminate' % (taskflow_id) client.put(url) # Wait for it to terminate wait_for_terminated(client, taskflow_id) # Now delete it print ('Delete the taskflow') url = 'taskflows/%s' % (taskflow_id) try: client.delete(url) except HttpError as ex: if ex.status != 202: raise # Wait for it to terminate wait_for_deletion(client, taskflow_id) # Now try something with a chord print ('Running taskflow containing a chord ...') taskflow_id = create_taskflow( client, 'cumulus.taskflow.core.test.mytaskflows.ChordTaskFlow') # Start the task flow url = 'taskflows/%s/start' % (taskflow_id) client.put(url) # Wait for it to complete wait_for_complete(client, taskflow_id) # Now try a workflow that is the two connected together print ('Running taskflow that connects to parts together ...') taskflow_id = create_taskflow( client, 'cumulus.taskflow.core.test.mytaskflows.ConnectTwoTaskFlow') # Start the task flow url = 'taskflows/%s/start' % (taskflow_id) client.put(url) # Wait for it to complete wait_for_complete(client, taskflow_id) # # Now try a composite workflow approach ... # print ('Running taskflow that is a composite ...') # taskflow_id = create_taskflow( # client, 'cumulus.taskflow.core.test.mytaskflows.MyCompositeTaskFlow') # # # Start the task flow # url = 'taskflows/%s/start' % (taskflow_id) # client.put(url) # # # Wait for it to complete # wait_for_complete(client, taskflow_id) except HttpError as ex: print( ex.responseText)
def girder_client(request, api_url): username, password = request.param client = GirderClient(apiUrl=api_url) client.authenticate(username, password) yield client
def testImportAnalyses(self): """ Test importing a romanesco analysis """ client = GirderClient('localhost', girder_port) client.authenticate(self._username, self._password) path = os.path.dirname(os.path.realpath(__file__)) analyses_path = os.path.join(path, 'analyses') import_analyses.import_analyses(client, analyses_path) # Get the analysis folder path = '/minerva_analysis/folder' response = self.request(path=path, method='GET', params={}, user=self._user) self.assertStatusOk(response) analyses_folder = response.json['folder'] path = '/item' params = { 'folderId': analyses_folder['_id'] } response = self.request(path=path, method='GET', params=params, user=self._user) self.assertStatusOk(response) self.assertEquals(len(response.json), 2, 'Expecting two analyses') analysis = response.json[0] self.assertEquals(analysis['name'], 'add', 'Expecting analysis one name to be "add"') expected_meta = { u'minerva': { u'analysis_type': u'add', u'analysis_name': u'add', u'analysis_id': analysis['_id'] }, u'analysis': { u'inputs': [{ u'default': { u'data': u'0', u'format': u'json' }, u'type': u'number', u'name': u'a', u'format': u'number' }, { u'type': u'number', u'name': u'b', u'format': u'number' }], u'script': u'c = a + b', u'mode': u'python', u'outputs': [{ u'type': u'number', u'name': u'c', u'format': u'number' }], u'name': u'add' } } self.assertEquals(analysis['meta'], expected_meta, 'Unexpected value for meta data') analysis = response.json[1] self.assertEquals(analysis['name'], 'local', 'Expecting analysis two name to be "local"') expected_meta = { u'minerva': { u'analysis_type': u'local type', u'analysis_name': u'local', u'analysis_id': analysis['_id'] } } self.assertEquals(analysis['meta'], expected_meta, 'Unexpected value for meta data')
_ _________ __ _________ _ __ __ | | / / _/ | / |/ / ____/ | | / /___ _____/ /_____ _____ | | / // // /| | / /|_/ / __/ | | /| / / __ \/ ___/ //_/ _ \/ ___/ | |/ // // ___ |/ / / / /___ | |/ |/ / /_/ / / / ,< / __/ / |___/___/_/ |_/_/ /_/_____/ |__/|__/\____/_/ /_/|_|\___/_/ You are running in private standalone mode. Troubleshooting: Try running `docker pull kitware/viame-worker` to get the latest image Documentation: https://kitware.github.io/dive/Deployment-Docker-Compose/ Issues: https://github.com/Kitware/dive/issues Support: please email [email protected] """) # Fetch Celery broker credentials from server diveclient = GirderClient(apiUrl=dive_api_url) diveclient.authenticate(username=dive_username, password=dive_password) me = diveclient.get('user/me') creds = diveclient.post(f'rabbit_user_queues/user/{me["_id"]}') broker_url = creds['broker_url'] queue_name = f"{me['login']}@private" if not me.get(UserPrivateQueueEnabledMarker, False): warn(" Private queues not enabled for this user.") warn( " You can visit https://viame.kitware/com/#jobs to change these settings" ) info("========================") task_default_queue = queue_name if broker_url is None: raise RuntimeError('CELERY_BROKER_URL must be set')
parser = argparse.ArgumentParser() parser.add_argument("path", type=str, help="path to Arbor web apps") parser.add_argument("-g", "--girder-host", type=str, default='localhost', help="host to Girder instance") parser.add_argument("-p", "--girder-port", type=int, default=9000, help="port to Girder instance") args = parser.parse_args() # Get the ID for our Analyses folder. c = GirderClient(host=args.girder_host, port=args.girder_port) c.authenticate('girder', 'girder') folderSearch = c.get('resource/search', parameters={ 'q': 'Analyses', 'types': '["folder"]' }) folderId = folderSearch['folder'][0]['_id'] # Disable authorization requirements for running romanesco tasks c.put('system/setting', parameters={ 'key': 'flow.require_auth', 'value': 'false' }) # Check if these analyses already exist. If so, we won't re-upload them. uploadACR = False uploadPGS = False
from girder_client import GirderClient import json import pymongo import sys if len(sys.argv) < 2: print "%s /path/to/ArborWebApps" % sys.argv[0] sys.exit(1) arborWebAppsPath = sys.argv[1] # Get the ID for our Analyses folder. c = GirderClient(host='localhost', port=9000) c.authenticate('girder', 'girder') folderSearch = c.get('resource/search', parameters={ 'q': 'Analyses', 'types': '["folder"]' }) folderId = folderSearch['folder'][0]['_id'] # Disable authorization requirements for running romanesco tasks c.put('system/setting', parameters={ 'key': 'romanesco.require_auth', 'value': 'false' }) # Check if these analyses already exist. If so, we won't re-upload them. uploadACR = False uploadPGS = False
class ContourAnalysesTestCase(base.TestCase): """ Tests of the minerva S3 dataset API endpoints. """ def setUp(self): """ Set up the test case with a user """ super(ContourAnalysesTestCase, self).setUp() self._username = '******' self._password = '******' self._user = self.model('user').createUser( self._username, self._password, 'minerva', 'user', '*****@*****.**') # Import the analyses self._client = GirderClient('localhost', girder_port) self._client.authenticate(self._username, self._password) path = os.path.dirname(os.path.realpath(__file__)) analyses_path = os.path.join(path, '../analyses/NEX/') import_analyses.import_analyses(self._client, analyses_path) # Get the analysis folder path = '/minerva_analysis/folder' response = self.request(path=path, method='GET', params={}, user=self._user) self.assertStatusOk(response) analyses_folder = response.json['folder'] path = '/item' params = { 'folderId': analyses_folder['_id'] } response = self.request(path=path, method='GET', params=params, user=self._user) self.assertStatusOk(response) # Find the contour analysis for analysis in response.json: if analysis['name'] == 'contour': self._analysis = analysis # Now import an S3 prefix path = '/minerva_dataset/folder' params = { 'userId': self._user['_id'] } response = self.request(path=path, method='POST', params=params, user=self._user) self.assertStatusOk(response) folder = response.json['folder'] # create the item params = { 'name': 'bobby', 'folderId': folder['_id'] } response = self.request(path='/item', method='POST', params=params, user=self._user) self.assertStatusOk(response) itemId = response.json['_id'] # create a s3 dataset from the item prefix = '/CMIP5/CommonGrid/hadcm3/rcp45/mon/r1i1p1/pr/' bucket = 'nasanex' params = { 'name': 'nasanex', 'bucket': bucket, 'prefix': prefix, 'accessKeyId': '', 'secret': '', 'service': '', 'readOnly': True } path = '/minerva_dataset_s3/%s/dataset' % str(itemId) response = self.request( path=path, method='POST', user=self._user, params=params ) self.assertStatusOk(response) import_folder_id = response.json['folderId'] # Wait for import to occur time.sleep(1) path = '/item' params = { 'folderId': import_folder_id } response = self.request(path=path, method='GET', user=self._user, params=params) self.assertStatusOk(response) self.assertEqual(len(response.json), 1, 'Excepting only a single item') item_id = str(response.json[0]['_id']) # Now list the files path = '/item/%s/files' % item_id response = self.request(path=path, method='GET', user=self._user) self.assertStatusOk(response) self.assertEqual(len(response.json), 1, 'Excepting only a single file') self._dataset_file_id = response.json[0]['_id'] def testContourAnalysis(self): """ Test contour analysis """ inputs = { 'host': { 'format': 'json', 'data': 'localhost' }, 'port': { 'format': 'json', 'data': girder_port }, 'token': { 'format': 'json', 'data': self._client.token }, 'fileId': { 'format': 'json', 'data': self._dataset_file_id }, 'variable': { 'format': 'json', 'data': 'pr' }, 'timestep': { 'format': 'number', 'data': 0 } } outputs = { 'result': { 'format': 'json' } } analysis = self._analysis['meta']['analysis'] result = romanesco.run(analysis, inputs=inputs, outputs=outputs) output_item_id = str(result['output_item_id']['data']) # Download the item and check it what we expect path = '/item/%s/download' % output_item_id response = self.request(path=path, method='GET', user=self._user) self.assertStatusOk(response) data_path = os.path.join(os.path.dirname(__file__), 'data', 'expected_contour.json' ) with open(data_path, 'r') as fp: expected_result = json.load(fp) self.assertEquals(response.json, expected_result, 'Unexpected result')