def lookup_url(self, path=None, job_id=None, test=False): """Returns internal url for resource at specified path :param path: (string) Girder path, from user's root to resource :param job_id: (string) Girder job id, represents processing job submitted to remote machine :param test: (boolean) if True, raise exception if resource not found Either path or job_id must be specified (but not both!) """ if path: resource = self.lookup_resource(path, test) if resource is None: return None # (else) construct "gaia" url resource_type = resource['_modelType'] resource_id = resource['_id'] gaia_url = 'girder://{}/{}'.format(resource_type, resource_id) return gaia_url elif job_id: job_endpoint = 'jobs/{}'.format(job_id) job_info = self.gc.get(job_endpoint) if not job_info: raise GaiaException('Job not found on girder') status = job_info.get('status') if status != 'complete': print('job_info:\n', job_info) raise GaiaException( 'Job status not complete ({})'.format(status)) output_folder_id = job_info.get('output', [])[0].get('folderId') # print('output_folder_id', output_folder_id) # Output filename is stored in metadata default_filename = 'output.tif' metadata = job_info.get('metadata', {}) output_filename = metadata.get('outputFilename', default_filename) # Get item id params = dict( folderId=output_folder_id, name=output_filename, limit=1) output_list = self.gc.get('item', parameters=params) # print(output_list) if output_list: output_info = output_list[0] output_item_id = output_info.get('_id', 'missing') # print('Output file {} is item id {}'.format( # output_filename, output_item_id)) else: raise GaiaException( 'Output file {} not found'.format(output_filename)) # Create gaia object for output gaia_url = 'girder://item/{}'.format(output_item_id) return gaia_url else: raise MissingParameterError( 'Must specify either path or job_id argument')
def validate_base(inputs, args, required_inputs=[], required_args=[], optional_args=[]): """ Ensure that all required inputs and arguments are present. """ input_types = [] errors = [] for procInput in inputs: if not isinstance(procInput, GaiaDataObject): raise GaiaException('Not a GaiaDataObject') inputDataType = procInput._getdatatype() if inputDataType == types.PROCESS: for t in [i for i in dir(types) if not i.startswith("__")]: if any((True for x in procInput.default_output if x in getattr(formats, t, []))): inputDataType = getattr(types, t) break input_types.append(inputDataType) for i, req_input in enumerate(required_inputs): if i >= len(input_types): errors.append("Not enough inputs for process") elif req_input['type'] != input_types[i]: errors.append("Input #{} is of incorrect type.".format(i + 1)) if len(input_types) > len(required_inputs): if (required_inputs[-1]['max'] is not None and len(input_types) > len(required_inputs) + required_inputs[-1]['max'] - 1): errors.append("Incorrect # of inputs; expected {}".format( len(required_inputs))) else: for i in range(len(required_inputs) - 1, len(input_types)): if input_types[i] != required_inputs[-1]['type']: errors.append("Input #{} is of incorrect type.".format(i + 1)) if errors: raise GaiaException('\n'.join(errors)) for item in required_args: arg, arg_type = item['name'], item['type'] if arg not in args or args[arg] is None: raise GaiaException('Missing required argument {}'.format(arg)) test_arg_type(args, arg, arg_type) if 'options' in item and args[arg] not in item['options']: raise GaiaException('Invalid value for {}'.format(item['name'])) for item in optional_args: arg, arg_type = item['name'], item['type'] if arg in optional_args and optional_args[arg] is not None: test_arg_type(optional_args, arg, arg_type) argval = args[arg] if 'options' in item and argval not in item['options']: raise GaiaException('Invalid value for {}'.format( item['name']))
def write_gaia_object(gaia_object, filename, **options): if gaia_object.__class__.__name__ == 'GirderDataObject': raise GaiaException('Writing not supported for GirderDataObject') data_type = gaia_object._getdatatype() if data_type == types.VECTOR: return write_vector_object(gaia_object, filename, **options) elif data_type == types.RASTER: return write_raster_object(gaia_object, filename, **options) else: raise GaiaException('Unsupported data type {}'.format(data_type))
def _get_girder_client(cls): """Returns GirderClient instance For internal use only """ if cls.instance is None: raise GaiaException('GirderInterface not initialized') if cls.instance.gc is None: raise GaiaException('GirderClient not initialized') return cls.instance.gc
def __call__(cls, *args, **kwargs): registry = GaiaReaderFactoryMetaclass._registry subclass = None instance = None if id(cls) != id(GaiaReader): # Allow for direct subclass instantiation instance = cls.__new__(cls, args, kwargs) else: if 'reader_class' in kwargs: classname = kwargs['reader_class'] if classname in registry: subclass = registry[classname] else: for classname, classinstance in registry.items(): if hasattr(classinstance, 'can_read'): canReadMethod = getattr(classinstance, 'can_read') if canReadMethod(*args, **kwargs): subclass = classinstance # FIXME: break if subclass: instance = subclass.__new__(subclass, args, kwargs) else: argsstr = 'args: %s, kwargs: %s' % (args, kwargs) msg = 'Unable to find GaiaReader subclass for: %s' % argsstr raise GaiaException(msg) if instance is not None: instance.__init__(*args, **kwargs) return instance
def verify(self): """ Make sure that all PostgisIO columns exist in the actual table """ for col in self._columns: if col not in self._table_obj.columns.keys(): raise GaiaException('{} column not found in {}'.format( col, self._table_obj))
def validator(inputs=[], args={}): # First object must be GirderDataObject if (type(inputs[0]) is not GirderDataObject): raise GaiaException('girder process requires GirderDataObject') # Second object must have vector geometry if (isinstance(inputs[1], GaiaDataObject) and inputs[1].get_datatype() != gaia.types.VECTOR): template = """girder process cannot use datatype \"{}\"" \ for crop geometry""" raise GaiaException(template.format(inputs[1].get_datatype())) # For now, second object/geometry must be on local filesystem if isinstance(inputs[1], GirderDataObject): raise GaiaException('crop geometry on girder not supported') # Otherwise call up the chain to let parent do common validation return v(inputs, args)
def test_arg_type(args, arg, arg_type): """ Try to cast a process argument to its required type. Raise an exception if not successful. :param arg: The argument property :param arg_type: The required argument type (int, str, etc) """ try: arg_type(args[arg]) except Exception: raise GaiaException('Required argument {} must be of type {}'.format( arg, arg_type))
def write_raster_object(gaia_object, filename, **options): # Delete existing file (if any) if os.path.exists(filename): os.remove(filename) ext = os.path.splitext(filename)[1] if ext == '': ext = '.tif' # default driver_name = GDAL_DRIVERS.get(ext) if driver_name is None: raise GaiaException('Unsupported file extension {}'.format(ext)) # Have to create copy of dataset in order to write to file driver = gdal.GetDriverByName(driver_name) if driver is None: raise GaiaException('GDAL driver {} not found'.format(driver_name)) gdal_dataset = gaia_object.get_data() output_dataset = driver.CreateCopy(filename, gdal_dataset, strict=0) # Setting the dataset to None causes the write to disk # Add # noqa comment to ignore flake8 error that variable isn't used output_dataset = None # writes to disk # noqa: F841
def write_vector_object(gaia_object, filename, **options): # Delete existing file (if any) if os.path.exists(filename): os.remove(filename) data = gaia_object.get_data() ext = os.path.splitext(filename)[1] if ext == '': ext = '.geojson' # default driver = GEOPANDAS_DRIVERS.get(ext) if driver is None: raise GaiaException('Unsupported file extension {}'.format(ext)) data.to_file(filename, driver, **options)
def __init__(self): """Applies crude singleton pattern (raise exception if called twice) """ if GirderInterface.instance: msg = """GirderInterface already exists \ -- use get_instance() class method""" raise GaiaException(msg) GirderInterface.instance = self self.girder_url = None self.gc = None # girder client self.user = None # girder user object self.gaia_folder = None self.default_folder = None
def get_epsg(self): if not self._epsgComputed: if not self._data: self.get_data() projection = self._data.GetProjection() data_crs = osr.SpatialReference(wkt=projection) try: self.epsg = int(data_crs.GetAttrValue('AUTHORITY', 1)) self._epsgComputed = True except KeyError: raise GaiaException("EPSG code coud not be determined") return self.epsg
def __init__(self): """Recommend using separate instance for each job submission. """ self._girder_client = None self._nersc_scratch_folder = None self._private_folder_id = None # Internal, job-specific ids self._cluster_id = None self._input_folder_id = None self._job_folder_id = None self._job_id = None self._output_folder_id = None self._script_id = None girder_interface = GirderInterface.get_instance() if girder_interface.nersc_requests is None: msg = """GirderInterface is not configured for NERSC job submission -- \ must authenticate with NEWT session id.""" raise GaiaException(msg) # Get user's scratch directory data = {'executable': 'echo $SCRATCH', 'loginenv': 'true'} machine = 'cori' url = '%s/command/%s' % (NERSC_URL, machine) r = girder_interface.nersc_requests.post(url, data=data) r.raise_for_status() js = r.json() self._nersc_scratch_folder = js.get('output') # Get Girder client self._girder_client = girder_interface.gc # Get id for user's private girder folder user = self._girder_client.get('user/me') print('user', user) user_id = user['_id'] # r = self._girder_client.listFolder(user_id, 'user', name='Private') r = self._girder_client.listFolder(user_id, 'user', name='Public') # Getting mixed signals on what listFolder returns # I *think* it is a generator try: self._private_folder_id = next(r)['_id'] except Exception: # But just in case self._private_folder_id = r[0]['_id']
def initialize( self, girder_url, username=None, password=None, apikey=None, newt_sessionid=None): """Connect to girder server and authenticate with input credentials :param girder_url: The full path to the Girder instance, for example, 'http://localhost:80' or 'https://my.girder.com'. :param username: The name for logging into Girder. :param password: The password for logging into Girder. :apikey: An api key, which can be used instead of username & password. :newt_sessionid: (string) Session token from NEWT web service at NERSC. (Girder must be connected to NEWT service to authenicate.) """ if self.__class__.is_initialized(): msg = """GirderInterface already initialized -- \ cannot initialize twice""" raise GaiaException(msg) self.girder_url = girder_url # Check that we have credentials api_url = '{}/api/v1'.format(girder_url) # print('api_url: {}'.format(api_url)) self.gc = girder_client.GirderClient(apiUrl=api_url) if username is not None and password is not None: self.gc.authenticate(username=username, password=password) elif apikey is not None: self.gc.authenticate(apiKey=apikey) elif newt_sessionid is not None: self.nersc_requests = requests.Session() url = '{}/newt/authenticate/{}'.format(api_url, newt_sessionid) r = self.nersc_requests.put(url) r.raise_for_status() self.nersc_requests.cookies.update(dict( newt_sessionid=newt_sessionid)) # self.nersc_requests.cookies.set('newt_sessionid', newt_sessionid) # Get scratch directory data = { 'executable': '/usr/bin/echo $SCRATCH', 'loginenv': 'true' } machine = 'cori' NERSC_URL = 'https://newt.nersc.gov/newt' url = '%s/command/%s' % (NERSC_URL, machine) r = self.nersc_requests.post(url, data=data) r.raise_for_status() print(r.json()) self.gc.token = self.nersc_requests.cookies['girderToken'] else: raise MissingParameterError('No girder credentials provided.') # Get user info self.user = self.gc.getUser('me') # Get or intialize Private/gaia/default folder private_list = self.gc.listFolder( # self.user['_id'], parentFolderType='user', name='Private') # HACK FOR DEMO - use public folder until we set up # mechanism to send girder token to js client self.user['_id'], parentFolderType='user', name='Public') try: private_folder = next(private_list) except StopIteration: raise GaiaException('User/Private folder not found') gaia_list = self.gc.listFolder( private_folder['_id'], parentFolderType='folder', name='gaia') try: self.gaia_folder = next(gaia_list) except StopIteration: description = 'Created by Gaia' self.gaia_folder = self.gc.createFolder( private_folder['_id'], 'gaia', description=description) default_list = self.gc.listFolder( self.gaia_folder['_id'], parentFolderType='folder', name='default') try: self.default_folder = next(default_list) except StopIteration: description = 'Created by Gaia' self.default_folder = self.gc.createFolder( self.gaia_folder['_id'], 'default', description=description) print('Created gaia/default folder')
def submit_crop(self, input_object, crop_object, nersc_repository, job_name='geolib'): """ """ # Validate inputs if not isinstance(input_object, GirderDataObject): print('input object type', type(input_object)) raise GaiaException("""submit_crop() currently only supports \ GirderDataObject input""") if not crop_object._getdatatype() == gaia.types.VECTOR: raise GaiaException('Crop object not type VECTOR') # Get input object's filename # For now (March 2019) we are storing a cache of files on cori # for the ESS-DIVE dev server item = self._girder_client.getItem(input_object.resource_id) input_filename = item.get('name') # Call internal methods in this order # create_cluster() # create_slurm_script() # create_job() # upload_inputs() # submit_job() print('Creating cluster on {}'.format(MACHINE)) self.create_cluster(MACHINE) # Create SLURM commands print('Creating SLURM script {}'.format(job_name)) command_list = list() command_list.append('ulimit -s unlimited') # stack size command_list.append('module load python/3.6-anaconda-4.4') command_list.append('source activate {}'.format(CONDA_ENV_PATH)) command_list.append('export PYTHONPATH={}'.format(GAIA_PATH)) # Last command is the python script itself py_script = '{}/nersc/crop.py'.format(GAIA_PATH) # For now, we have chache copies of input files on cori: input_path = '{}/data/{}'.format(PROJECT_PATH, input_filename) geometry_filename = 'crop_geometry.geojson' output_filename = 'output.tif' py_command = 'python {} {} {} {}'.format(py_script, input_path, geometry_filename, output_filename) # Arguments # -n number of nodes # -c number of cpus per allocated process # -u unbuffered (don't buffer terminal output - needed by cumulus) command_list.append('srun -n 1 -c 1 -u {}'.format(py_command)) self.create_slurm_script('metadata', command_list) print('Creating job {}'.format(job_name)) self.create_job(job_name) # Set job metadata - keywords used by smtk job panel job_metadata = dict() # job_metadata['solver'] = solver job_metadata['notes'] = '' number_of_nodes = 1 job_metadata['numberOfNodes'] = number_of_nodes # Total number of cores (1 core per task times number of nodes) number_of_tasks = 1 job_metadata['numberOfCores'] = number_of_nodes * number_of_tasks # Time stamp (seconds since epoci) job_metadata['startTimeStamp'] = time.time() # Plus one specific to our job job_metadata['outputFilename'] = output_filename self.set_job_metadata(job_metadata) print('Uploading geometry file') name = geometry_filename geom_string = crop_object.get_data().to_json() size = len(geom_string) # print('geom_string:', geom_string) geom_stream = io.StringIO(geom_string) self._girder_client.uploadFile(self._input_folder_id, geom_stream, name, size, parentType='folder') print('Submitting job') datecode = datetime.datetime.now().strftime('%y%m%d') output_dir = '{}/geolib/{}/{}'.format(self._nersc_scratch_folder, datecode, job_name) return self.submit_job(MACHINE, nersc_repository, output_dir)
def show(data_objects, **options): """Returns pygeojs scene for JupyterLab display :param data_objects: list of GeoData objects to display, in front-to-back rendering order. :param options: options passed to jupyterlab_geojs.Scene instance. :return: pygeojs.scene instance if running Jupyter; otherwise returns data_objects for default display """ if not is_loaded(): return data_objects # (else) if not hasattr(data_objects, '__iter__'): data_objects = [data_objects] # print(data_objects) scene = pygeojs.scene(**options) scene.createLayer('osm') if not data_objects: print('No data objects') return scene # feature_layer = scene.createLayer('feature') feature_layer = None combined_bounds = None # Reverse order so that first item ends on top for data_object in reversed(data_objects): if data_object._getdatatype() == gaia.types.VECTOR: # print('Adding vector object') # Special handling for vector datasets: # First, make a copy of the geopandas frame df = geopandas.GeoDataFrame.copy(data_object.get_data()) # Convert to lon-lat if needed epsg = data_object.get_epsg() if epsg != '4236': df[df.geometry.name] = df.geometry.to_crs(epsg='4236') # Strip any z coordinates (force to z = 1) df.geometry = df.geometry.scale(zfact=0.0).translate(zoff=1.0) # df.to_file('/home/john/temp/df.pandas') # print(df) # print(df.geometry) # Calculate bounds geopandas_bounds = df.geometry.total_bounds xmin, ymin, xmax, ymax = geopandas_bounds meta_bounds = [[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]] # Add map feature if feature_layer is None: feature_layer = scene.createLayer('feature') # Use __geo_interface__ to get the geojson feature_layer.readGeoJSON(df.__geo_interface__) # print(df.__geo_interface__) else: # Get bounds, in order to compute overall bounds meta = data_object.get_metadata() # print('meta: {}'.format(meta)) # print(meta) meta_bounds = meta.get('bounds').get('coordinates')[0] # print(meta_bounds) assert meta_bounds, 'data_object missing bounds' # Bounds format is [xmin, ymin, xmax, ymax] bounds = [ meta_bounds[0][0], meta_bounds[0][1], meta_bounds[2][0], meta_bounds[2][1] ] # print(bounds) if combined_bounds is None: combined_bounds = bounds else: combined_bounds[0] = min(combined_bounds[0], bounds[0]) combined_bounds[1] = min(combined_bounds[1], bounds[1]) combined_bounds[2] = max(combined_bounds[2], bounds[2]) combined_bounds[3] = max(combined_bounds[3], bounds[3]) # print('options:', options) rep = options.get('representation') if rep == 'outline': # Create polygon object rect = [ [bounds[0], bounds[1]], [bounds[2], bounds[1]], [bounds[2], bounds[3]], [bounds[0], bounds[3]], [bounds[0], bounds[1]], ] geojs_polygon = geojson.Polygon([rect]) properties = { 'fillColor': '#fff', 'fillOpacity': 0.1, 'stroke': True, 'strokeColor': '#333', 'strokeWidth': 2 } geojson_feature = geojson.Feature(geometry=geojs_polygon, properties=properties) geojson_collection = geojson.FeatureCollection([geojson_feature]) # print(geojson_collection) if feature_layer is None: feature_layer = scene.createLayer('feature') feature_layer.createFeature('geojson', geojson_collection, **options) elif data_object.__class__.__name__ == 'GirderDataObject': if data_object._getdatatype() == 'raster': # Use large-image display # Todo - verify that it is installed tiles_url = data_object._get_tiles_url() # print('tiles_url', tiles_url) opacity = data_object.opacity scene.createLayer('osm', url=tiles_url, keepLower=False, opacity=opacity) else: raise GaiaException( 'Cannot display GirderDataObject with data type {}'.format( data_object._getdatatype())) elif data_object._getdatatype() == gaia.types.VECTOR: pass # vector objects handled above else: msg = 'Cannot display dataobject, type {}'.format( data_object.__class__.__name__) raise GaiaException(msg) # Send custom message to (javascript) client to set zoom & center rpc = {'method': 'set_zoom_and_center', 'params': combined_bounds} scene.send(rpc) return scene
def gen_zonalstats(zones_json, raster): """ Generator function that yields the statistics of a raster dataset within each polygon (zone) of a vector dataset. :param zones_json: Polygons in GeoJSON format :param raster: Raster dataset :return: Polygons with additional properties for calculated raster stats. """ global_transform = True # Open data raster = get_dataset(raster) if type(zones_json) is str: shp = ogr.Open(zones_json) zones_json = json.loads(zones_json) else: shp = ogr.Open(json.dumps(zones_json)) lyr = shp.GetLayer() # Get raster georeference info transform = raster.GetGeoTransform() xOrigin = transform[0] yOrigin = transform[3] pixelWidth = transform[1] pixelHeight = transform[5] # Reproject vector geometry to same projection as raster sourceSR = lyr.GetSpatialRef() targetSR = osr.SpatialReference() targetSR.ImportFromWkt(raster.GetProjectionRef()) coordTrans = osr.CoordinateTransformation(sourceSR, targetSR) # Check for matching spatial references differing_SR = (sourceSR.ExportToWkt() != targetSR.ExportToWkt()) # TODO: Use a multiprocessing pool to process features more quickly for feat, feature in zip(lyr, zones_json['features']): geom = feat.geometry() # geotransform of the feature by global if (global_transform and differing_SR): geom.Transform(coordTrans) # Get geometry type geom_type = geom.GetGeometryName() # Get extent of feat if geom_type == 'MULTIPOLYGON': pointsX = [] pointsY = [] for count, polygon in enumerate(geom): ring = geom.GetGeometryRef(count).GetGeometryRef(0) numpoints = ring.GetPointCount() for p in range(numpoints): lon, lat, z = ring.GetPoint(p) if abs(lon) != float('inf'): pointsX.append(lon) if abs(lat) != float('inf'): pointsY.append(lat) elif geom_type == 'POLYGON': ring = geom.GetGeometryRef(0) numpoints = ring.GetPointCount() pointsX = [] pointsY = [] for p in range(numpoints): lon, lat, z = ring.GetPoint(p) if abs(lon) != float('inf'): pointsX.append(lon) if abs(lat) != float('inf'): pointsY.append(lat) else: raise GaiaException( "ERROR: Geometry needs to be either Polygon or Multipolygon") xmin = min(pointsX) xmax = max(pointsX) ymin = min(pointsY) ymax = max(pointsY) # Specify offset and rows and columns to read xoff = int((xmin - xOrigin) / pixelWidth) yoff = int((yOrigin - ymax) / pixelWidth) xcount = int((xmax - xmin) / pixelWidth) + 1 ycount = int((ymax - ymin) / pixelWidth) + 1 # Create memory target raster target_ds = gdal.GetDriverByName('MEM').Create('', xcount, ycount, 1, gdal.GDT_Byte) # apply new geotransform of the feature subset if global_transform is False: target_ds.SetGeoTransform(( (xOrigin + (xoff * pixelWidth)), pixelWidth, 0, (yOrigin + (yoff * pixelHeight)), 0, pixelHeight, )) else: # apply new geotransform of the global set target_ds.SetGeoTransform(( xmin, pixelWidth, 0, ymax, 0, pixelHeight, )) # Create memory vector layer mem_ds = ogr.GetDriverByName('Memory').CreateDataSource('out') mem_layer = mem_ds.CreateLayer(geom.GetGeometryName(), None, geom.GetGeometryType()) mem_layer.CreateFeature(feat.Clone()) # Create for target raster the same projection as for the value raster raster_srs = osr.SpatialReference() raster_srs.ImportFromWkt(raster.GetProjectionRef()) target_ds.SetProjection(raster_srs.ExportToWkt()) # Rasterize zone polygon to raster gdal.RasterizeLayer(target_ds, [1], mem_layer, burn_values=[1]) # Read raster as arrays banddataraster = raster.GetRasterBand(1) try: dataraster = banddataraster.ReadAsArray(xoff, yoff, xcount, ycount).astype(numpy.float) except AttributeError: # Nothing within bounds, move on to next polygon properties = feature['properties'] for p in [ 'count', 'sum', 'mean', 'median', 'min', 'max', 'stddev' ]: properties[p] = None yield (feature) else: # Get no data value of array noDataValue = banddataraster.GetNoDataValue() if noDataValue: # Updata no data value in array with new value dataraster[dataraster == noDataValue] = numpy.nan bandmask = target_ds.GetRasterBand(1) datamask = bandmask.ReadAsArray(0, 0, xcount, ycount).astype(numpy.float) # Mask zone of raster zoneraster = numpy.ma.masked_array(dataraster, numpy.logical_not(datamask)) properties = feature['properties'] properties['count'] = zoneraster.count() properties['sum'] = numpy.nansum(zoneraster) if type(properties['sum']) == MaskedConstant: # No non-null values for raster data in polygon, skip for p in ['sum', 'mean', 'median', 'min', 'max', 'stddev']: properties[p] = None else: properties['mean'] = numpy.nanmean(zoneraster) properties['min'] = numpy.nanmin(zoneraster) properties['max'] = numpy.nanmax(zoneraster) properties['stddev'] = numpy.nanstd(zoneraster) median = numpy.ma.median(zoneraster) if hasattr(median, 'data'): try: properties['median'] = median.data.item() except AttributeError: if median: properties['median'] = median yield (feature)
def initialize(self, girder_url, username=None, password=None, apikey=None): """Connect to girder server and authenticate with input credentials :param girder_url: The full path to the Girder instance, for example, 'http://localhost:80' or 'https://my.girder.com'. :param username: The name for logging into Girder. :param password: The password for logging into Girder. :apikey: An api key, which can be used instead of username & password. """ if self.__class__.is_initialized(): msg = """GirderInterface already initialized -- \ cannot initialize twice""" raise GaiaException(msg) self.girder_url = girder_url # Check that we have credentials api_url = '{}/api/v1'.format(girder_url) # print('api_url: {}'.format(api_url)) gc = girder_client.GirderClient(apiUrl=api_url) if username is not None and password is not None: gc.authenticate(username=username, password=password) elif apikey is not None: gc.authenticate(apiKey=apikey) else: raise MissingParameterError('No girder credentials provided.') # Get user info self.user = gc.getUser('me') # Get or intialize Private/gaia/default folder private_list = gc.listFolder( # self.user['_id'], parentFolderType='user', name='Private') # HACK FOR DEMO - use public folder until we set up # mechanism to send girder token to js client self.user['_id'], parentFolderType='user', name='Public') try: private_folder = next(private_list) except StopIteration: raise GaiaException('User/Private folder not found') gaia_list = gc.listFolder(private_folder['_id'], parentFolderType='folder', name='gaia') try: self.gaia_folder = next(gaia_list) except StopIteration: description = 'Created by Gaia' self.gaia_folder = gc.createFolder(private_folder['_id'], 'gaia', description=description) default_list = gc.listFolder(self.gaia_folder['_id'], parentFolderType='folder', name='default') try: self.default_folder = next(default_list) except StopIteration: description = 'Created by Gaia' self.default_folder = gc.createFolder(self.gaia_folder['_id'], 'default', description=description) print('Created gaia/default folder') # print('default_folder:', self.default_folder) self.gc = gc