def near(self, params): """ Search for items that are in proximity to a GeoJSON point. The field on which to search must be indexed by a '2dsphere' index. Anonymous users may not use 'ensureIndex' to create such an index. :param params: parameters to the API call, including 'field' and 'geometry'. :type params: dict[str, unknown] :returns: filtered fields of the matching items with geospatial data appended to the 'geo' field of each item. :rtype: list[dict[str, unknown]] :raise RestException: on malformed or forbidden API call. """ self.requireParams(('field', 'geometry'), params) condition = {'$geometry': self._getGeometry(params)} for param in ('maxDistance', 'minDistance'): if param in params: try: distance = float(params[param]) if distance < 0.0: raise ValueError except ValueError: raise RestException("Parameter '%s' must be a number." % param) condition['$' + param] = distance if params['field'][:3] == '%s.' % GEOSPATIAL_FIELD: field = params['field'].strip() else: field = '%s.%s' % (GEOSPATIAL_FIELD, params['field'].strip()) if params.get('ensureIndex', False): user = self.getCurrentUser() if not user: raise RestException('Index creation denied.', 403) self.model('item').collection.create_index([(field, GEOSPHERE)]) query = {field: {'$near': condition}} limit, offset, sort = self.getPagingParameters(params, 'lowerName') try: items = self._find(query, limit, offset, sort) except OperationFailure: raise RestException("Field '%s' must be indexed by a 2dsphere" " index." % field) return items
def authenticate(self, params): """Authenticate a user with a BSVE login token. This endpoint bypasses the normal authentication scheme and makes it possible to login as a user without his/her password. This is a security vulnerability if a user creates an account with a login name that is later automatically assigned by the escaping mechanism defined in ``_generatePassword``. Only auto-generated accounts should be allowed when this plugin is enabled and registration should be set to closed. """ authHeader = cherrypy.request.headers.get('Authorization') if not authHeader or not authHeader[0:6] == 'Basic ': raise RestException('Use HTTP Basic Authentication', 401) try: credentials = base64.b64decode(authHeader[6:]).decode('utf8') if ':' not in credentials: raise TypeError except Exception: raise RestException('Invalid HTTP Authorization header', 401) email, token = credentials.split(':', 1) data = self._bsveAuth( params.get('apiroot', 'https://dev.bsvecosystem.net/api'), email, token ) if data is None: raise RestException('Invalid BSVE login', 403) User = ModelImporter.model('user') user = User.findOne({'email': email}) if not user: login = self._generateLogin(email) firstName = data.get('firstName', 'First') lastName = data.get('lastName', 'Last') user = User.createUser( login=login, password=None, firstName=firstName, lastName=lastName, email=email ) setattr(cherrypy.request, 'girderUser', user) token = self.sendAuthTokenCookie(user) user['authToken'] = { 'token': token['_id'], 'expires': token['expires'] } return user
def create(self, folder, geoJSON): try: GeoJSON.to_instance(geoJSON, strict=True) except ValueError: raise RestException('Invalid GeoJSON passed in request body.') if geoJSON['type'] == 'Feature': features = [geoJSON] elif geoJSON['type'] == 'FeatureCollection': features = geoJSON['features'] else: raise RestException('GeoJSON feature or feature collection must be ' 'passed in request body.') data = [] for feature in features: properties = feature['properties'] if 'name' not in properties: raise RestException("All GeoJSON features must contain a" " property named 'name'.") name = properties['name'] del properties['name'] if 'description' in properties: description = properties['description'] del properties['description'] else: description = '' for key in properties: if not len(key): raise RestException('Property names must be at least one' ' character long.') if '.' in key or key[0] == '$': raise RestException('The property name %s must not contain' ' a period or begin with a dollar sign.' % key) data.append({'name': name, 'description': description, 'metadata': properties, 'geometry': feature['geometry']}) user = self.getCurrentUser() items = [] for datum in data: newItem = self.model('item').createItem( folder=folder, name=datum['name'], creator=user, description=datum['description']) self.model('item').setMetadata(newItem, datum['metadata']) newItem[GEOSPATIAL_FIELD] = {'geometry': datum['geometry']} newItem = self.model('item').updateItem(newItem) items.append(newItem) return [self._filter(item) for item in items]
def points(self, params): self.requireParams(('q',), params) limit, offset, sort = self.getPagingParameters(params, 'name') latitude = params.get('latitude', 'meta.latitude') longitude = params.get('longitude', 'meta.longitude') spec = { 'type': 'point', 'latitude': latitude, 'longitude': longitude, 'keys': ['meta', 'name', 'description', '_id'], 'flatten': ['meta'] } try: query = bson.json_util.loads(params['q']) except ValueError: # pragma: no cover raise RestException('The query parameter must be a JSON object.') events.trigger('geojson.points', info={ 'spec': spec, 'query': query }) # make sure the lat/lon are whitelisted keys to prevent private # data leaking if spec['latitude'].split('.')[0] not in spec['keys'] or \ spec['longitude'].split('.')[0] not in spec['keys']: raise RestException('Invalid latitude/longitude key.', code=402) coll = features.FeatureCollection(points=spec) item = ModelImporter().model('item') cursor = item.find( query, limit=0 ) cursor = item.filterResultsByPermission( cursor, user=self.getCurrentUser(), level=AccessType.READ, limit=limit, offset=offset ) try: obj = coll(points=cursor) except features.GeoJSONException: raise RestException( 'Could not assemble a geoJSON object from spec.', code=401 ) return obj
def _validateJsonType(self, name, info, val): if info.get('schema') is not None: try: jsonschema.validate(val, info['schema']) except jsonschema.ValidationError as e: raise RestException('Invalid JSON object for parameter %s: %s' % ( name, e.message)) elif info['requireObject'] and not isinstance(val, dict): raise RestException('Parameter %s must be a JSON object.' % name) elif info['requireArray'] and not isinstance(val, list): raise RestException('Parameter %s must be a JSON array.' % name)
def _loadMetadata(params): if 'meta' not in params: return None try: meta = bson.json_util.loads(params['meta']) except ValueError: raise RestException('Parameter meta must be valid JSON.') if not isinstance(meta, dict): raise RestException('Parameter meta must be a JSON object.') return meta
def promoteItemToDataset(self, item, params): """ Take an Item in the user's Minerva Dataset folder, and promote it to a Minerva Dataset by adding proper Minerva metadata. """ user = self.getCurrentUser() folder = findDatasetFolder(user, user, create=True) if folder is None: raise RestException('User has no Minerva Dataset folder.') if folder['_id'] != item['folderId']: raise RestException("Items need to be in user's Minerva Dataset " + "folder.") # Don't overwrite if minerva metadata already exists. if 'meta' in item and 'minerva' in item['meta']: return item minerva_metadata = {'source_type': 'item'} for file in self.model('item').childFiles(item=item, limit=0): # TODO This switching based on which file is found first is # fairly brittle and should only be called after first upload. if 'geojson' in file['exts']: # we found a geojson, assume this is geojson original minerva_metadata['original_type'] = 'geojson' minerva_metadata['dataset_type'] = 'geojson' minerva_metadata['original_files'] = [{ 'name': file['name'], '_id': file['_id'] }] minerva_metadata['geojson_file'] = { 'name': file['name'], '_id': file['_id'] } minerva_metadata['source'] = {'layer_source': 'GeoJSON'} break elif 'json' in file['exts']: minerva_metadata['original_type'] = 'json' minerva_metadata['dataset_type'] = 'json' minerva_metadata['original_files'] = [{ 'name': file['name'], '_id': file['_id'] }] break elif 'csv' in file['exts']: minerva_metadata['original_type'] = 'csv' minerva_metadata['dataset_type'] = 'csv' minerva_metadata['original_files'] = [{ 'name': file['name'], '_id': file['_id'] }] break updateMinervaMetadata(item, minerva_metadata) return item
def createNotebook(self, folder, user, token, when=None, save=True): existing = self.findOne({ 'folderId': folder['_id'], 'userId': user['_id'], }) if existing: return existing now = datetime.datetime.utcnow() when = when or now hub_url = self.model('setting').get(PluginSettings.TMPNB_URL) payload = { "girder_token": token['_id'], "folderId": str(folder['_id']) } resp = requests.post(hub_url, json=payload) content = resp.content if isinstance(content, six.binary_type): content = content.decode('utf8') try: resp.raise_for_status() except requests.HTTPError: raise RestException('Got %s code from tmpnb, response="%s"/' % (resp.status_code, content), code=502) try: nb = json.loads(content) except ValueError: raise RestException('Non-JSON response: %s' % content, code=502) notebook = { 'folderId': folder['_id'], 'userId': user['_id'], 'containerId': nb['containerId'], 'containerPath': nb['containerPath'], 'mountPoint': nb['mountPoint'], 'lastActivity': now, 'status': NotebookStatus.RUNNING, # be optimistic for now 'created': now, 'when': when, } self.setPublic(notebook, public=False) self.setUserAccess(notebook, user=user, level=AccessType.ADMIN) if save: notebook = self.save(notebook) return notebook
def find_resource_pid(pid): """ Find the PID of the resource map for a given PID, which may be a resource map """ result = query( "identifier:\"{}\"".format(esc(pid)), fields=["identifier", "formatType", "formatId", "resourceMap"]) result_len = int(result['response']['numFound']) if result_len == 0: raise RestException('No object was found in the index for {}.'.format(pid)) elif result_len > 1: raise RestException( 'More than one object was found in the index for the identifier ' '{} which is an unexpected state.'.format(pid)) # Find out if the PID is an OAI-ORE PID and return early if so try: if result['response']['docs'][0]['formatType'] == 'RESOURCE': return(result['response']['docs'][0]['identifier']) except KeyError: raise RestException('Unable to find a resource file in the data package') try: if len(result['response']['docs'][0]['resourceMap']) == 1: return result['response']['docs'][0]['resourceMap'][0] except KeyError: raise RestException('Unable to find a resource map for the data package') if len(result['response']['docs'][0]['resourceMap']) > 1: # Extract all of the candidate resource map PIDs (list of lists) resmaps = [doc['resourceMap'] for doc in result['response']['docs']] # Flatten the above result out and query # Flattening is required because the above 'resourceMap' field is a # Solr array type so the result is a list of lists nonobs = find_nonobsolete_resmaps( [item for items in resmaps for item in items] ) # Only return of one non-obsolete Resource Map was found # If we find multiple, that implies the original PID we queried for # is a member of multiple packages and what to do isn't implemented if len(nonobs) == 1: return nonobs[0] # Error out if the document passed in has multiple resource maps. What I can # still do here is determine the most likely resource map given the set. # Usually we do this by rejecting any obsoleted resource maps and that # usually leaves us with one. raise RestException( "Multiple resource maps were for the data package, which isn't supported.")
def createGeojsonDataset(self, item, params, postgresGeojson=None): user = self.getCurrentUser() folder = findDatasetFolder(user, user, create=True) if folder is None: raise RestException('User has no Minerva Dataset folder.') if folder['_id'] != item['folderId']: raise RestException("Items need to be in user's Minerva Dataset " + "folder.") minerva_metadata = { 'original_type': 'geojson', 'dataset_type': 'geojson', } # Use the first geojson or json file found as the dataset. for file in self.model('item').childFiles(item=item, limit=0): if ('geojson' in file['exts'] or 'json' in file['exts'] or file.get('mimeType') in ( 'application/json', 'application/vnd.geo+json', )): minerva_metadata['original_files'] = [{ 'name': file['name'], '_id': file['_id'] }] minerva_metadata['geojson_file'] = { 'name': file['name'], '_id': file['_id'] } minerva_metadata['geo_render'] = { 'type': 'geojson', 'file_id': file['_id'] } minerva_metadata['original_type'] = 'geojson' minerva_metadata['source'] = {'layer_source': 'GeoJSON'} minerva_metadata['source_type'] = 'item' if postgresGeojson is not None: if postgresGeojson['field'] is not None: minerva_metadata['visProperties'] = { 'line': { "fillColorKey": postgresGeojson['field'] }, 'polygon': { "fillColorKey": postgresGeojson['field'] }, 'point': { "fillColorKey": postgresGeojson['field'] } } minerva_metadata['postgresGeojson'] = postgresGeojson break if 'geojson_file' not in minerva_metadata: raise RestException('Item contains no geojson file.') updateMinervaMetadata(item, minerva_metadata) return item
def _extractZipPayload(): # TODO: Move assetstore type to wholetale. assetstore = next((_ for _ in Assetstore().list() if _['type'] == 101), None) if assetstore: adapter = assetstore_utilities.getAssetstoreAdapter(assetstore) tempDir = adapter.tempDir else: tempDir = None with tempfile.NamedTemporaryFile(dir=tempDir) as fp: for chunk in iterBody(2 * 1024**3): fp.write(chunk) fp.seek(0) if not zipfile.is_zipfile(fp): raise RestException("Provided file is not a zipfile") with zipfile.ZipFile(fp) as z: manifest_file = next( (_ for _ in z.namelist() if _.endswith('manifest.json')), None) if not manifest_file: raise RestException( "Provided file doesn't contain a Tale manifest") try: manifest = json.loads(z.read(manifest_file).decode()) # TODO: is there a better check? manifest['@id'].startswith('https://data.wholetale.org') except Exception as e: raise RestException( "Couldn't read manifest.json or not a Tale: {}".format( str(e))) env_file = next( (_ for _ in z.namelist() if _.endswith("environment.json")), None) try: environment = json.loads(z.read(env_file).decode()) except Exception as e: raise RestException( "Couldn't read environment.json or not a Tale: {}". format(str(e))) # Extract files to tmp on workspace assetstore temp_dir = tempfile.mkdtemp(dir=tempDir) # In theory malicious content like: abs path for a member, or relative path with # ../.. etc., is taken care of by zipfile.extractall, but in the end we're still # unzipping an untrusted content. What could possibly go wrong...? z.extractall(path=temp_dir) return temp_dir, manifest_file, manifest, environment
def frame_shape(self, id, user, type): path = self._get_path_to_type(type) with self._open_h5py_file(id, user) as rf: dataset = rf[path] if type == 'electron': if 'Nx' not in dataset.attrs or 'Ny' not in dataset.attrs: raise RestException('Detector dimensions not found!', 404) return int(dataset.attrs['Nx']), int(dataset.attrs['Ny']) elif type == 'raw': return dataset.shape[1], dataset.shape[2] raise RestException('In frame_shape, unknown type: ' + type)
def searchCase(self, params): user = self.getCurrentUser() limit, offset, sort = self.getPagingParameters(params, 'name') self.requireParams('table', params) table = params.get('table') key = params.get('key') value = params.get('value') substring = params.get('substring') if value and substring: raise RestException( 'Cannot search by both value and substring' ) if (value or substring) and not key: raise RestException( 'A key must be provided to search by value' ) if key and invalid_key_re.search(key): raise RestException( 'Invalid key parameter' ) query = {} if not key: query = { 'tcga.meta.' + table: { '$exists': True } } elif not value and not substring: query = { 'tcga.meta.' + table + '.' + key: { '$exists': True } } elif value: query = { 'tcga.meta.' + table + '.' + key: value } else: query = { 'tcga.meta.' + table + '.' + key: re.compile(re.escape(substring)) } cursor = self.model('case', 'digital_slide_archive').find( query, user=user, offset=offset, limit=limit, sort=sort ) return pagedResponse(cursor, limit, offset, sort)
def create(self, params): body = self.getBodyJson() user = self.getCurrentUser() public = body.get('public', True) gen3d = body.get('generate3D', True) gen3d_forcefield = body.get('gen3dForcefield', 'mmff94') gen3d_steps = body.get('gen3dSteps', 100) provenance = body.get('provenance', 'uploaded by user') mol = None if 'fileId' in body: file_id = body['fileId'] file = ModelImporter.model('file').load(file_id, user=user) parts = file['name'].split('.') input_format = parts[-1] name = '.'.join(parts[:-1]) if input_format not in Molecule.input_formats: raise RestException('Input format not supported.', code=400) with File().open(file) as f: data_str = f.read().decode() mol = create_molecule(data_str, input_format, user, public, gen3d, provenance, gen3d_forcefield, gen3d_steps, body) elif 'inchi' in body: input_format = 'inchi' data = body['inchi'] if not data.startswith('InChI='): data = 'InChI=' + data mol = create_molecule(data, input_format, user, public, gen3d, provenance, gen3d_forcefield, gen3d_steps, body) for key in body: if key in Molecule.input_formats: input_format = key data = body[input_format] # Convert to str if necessary if isinstance(data, dict): data = json.dumps(data) mol = create_molecule(data, input_format, user, public, gen3d, provenance, gen3d_forcefield, gen3d_steps, body) break if not mol: raise RestException('Invalid request', code=400) return self._clean(mol)
def update(self, task, params): immutable = ['access', '_id', 'celeryTaskId', 'log'] user = getCurrentUser() updates = getBodyJson() if not updates: raise RestException('A body must be provided', code=400) for p in updates: if p in immutable: raise RestException('\'%s\' is an immutable property' % p, 400) status = updates.get('status') return self._model.update_task(user, task, status=status)
def append_to_log(self, id, params): user = self.getCurrentUser() job = self._model.load(id, user=user, level=AccessType.WRITE) if not job: raise RestException('Job not found.', code=404) body = getBodyJson() if not body: raise RestException('Log entry must be provided', code=400) return self._model.append_to_log(user, id, body)
def ingest(self, params): self.requireParams(('dataset', ), params) dataset = params['dataset'] if dataset != 'tcga': raise RestException('Unknown dataset: %s' % dataset) progressEnabled = self.boolParam('progress', params, default=True) assetstoreId = params.get('assetstoreId') or None if assetstoreId: # Validate the asssetstore id self.model('assetstore').load(params['assetstoreId'], force=True, exc=True) if params.get('limit') == 'all': limit = None else: try: limit = int(params['limit']) if limit < 1: raise ValueError() except ValueError: raise RestException('Parameter "limit" must be a positive integer ' 'or "all".') localImportPath = \ params['localImportPath'] \ if params.get('localImportPath') \ else None if localImportPath and not os.path.isdir(localImportPath): raise RestException('Directory "%s" not found.' % localImportPath) job = self.model('job', 'jobs').createLocalJob( module='girder.plugins.digital_slide_archive.worker', function='ingestRunner', kwargs={ 'dataset': dataset, 'progressEnabled': progressEnabled, 'assetstoreId': assetstoreId, 'limit': limit, 'localImportPath': localImportPath }, title='Ingesting TCGA data', type='digital_slide_archive.ingest', user=self.getCurrentUser(), public=False, async=True) self.model('job', 'jobs').scheduleJob(job)
def login(self, params): """ Login endpoint. Sends an auth cookie in the response on success. The caller is expected to use HTTP Basic Authentication when calling this endpoint. """ user, token = self.getCurrentUser(returnToken=True) # Only create and send new cookie if user isn't already sending # a valid one. if not user: authHeader = cherrypy.request.headers.get('Girder-Authorization') if not authHeader: authHeader = cherrypy.request.headers.get('Authorization') if not authHeader or not authHeader[0:6] == 'Basic ': raise RestException('Use HTTP Basic Authentication', 401) try: credentials = base64.b64decode(authHeader[6:]).decode('utf8') if ':' not in credentials: raise TypeError except Exception: raise RestException('Invalid HTTP Authorization header', 401) login, password = credentials.split(':', 1) login = login.lower().strip() loginField = 'email' if '@' in login else 'login' user = self.model('user').findOne({loginField: login}) if user is None: raise RestException('Login failed.', code=403) if not self.model('password').authenticate(user, password): raise RestException('Login failed.', code=403) setattr(cherrypy.request, 'girderUser', user) token = self.sendAuthTokenCookie(user) return { 'user': self.model('user').filter(user, user), 'authToken': { 'token': token['_id'], 'expires': token['expires'], 'scope': token['scope'] }, 'message': 'Login succeeded.' }
def getTile(self, image, z, x, y, params): try: x, y, z = int(x), int(y), int(z) except ValueError: raise RestException('x, y, and z must be integers') if x < 0 or y < 0 or z < 0: raise RestException('x, y, and z must be positive integers') try: tileData, tileMime = ImageItem().getTile(image, x, y, z) except TileGeneralException as e: raise RestException(e.message, code=404) setResponseHeader('Content-Type', tileMime) setRawResponse() return tileData
def within(self, field, geometry, center, radius, limit, offset, sort): if geometry is not None: try: GeoJSON.to_instance(geometry, strict=True) if geometry['type'] != 'Polygon': raise ValueError except (TypeError, ValueError): raise RestException("Invalid GeoJSON passed as 'geometry' parameter.") condition = { '$geometry': geometry } elif center is not None and radius is not None: try: radius /= self._RADIUS_OF_EARTH if radius < 0.0: raise ValueError except ValueError: raise RestException("Parameter 'radius' must be a number.") try: GeoJSON.to_instance(center, strict=True) if center['type'] != 'Point': raise ValueError except (TypeError, ValueError): raise RestException("Invalid GeoJSON passed as 'center' parameter.") condition = { '$centerSphere': [center['coordinates'], radius] } else: raise RestException("Either parameter 'geometry' or both parameters" " 'center' and 'radius' are required.") if field[:3] != '%s.' % GEOSPATIAL_FIELD: field = '%s.%s' % (GEOSPATIAL_FIELD, field) query = { field: { '$geoWithin': condition } } return self._find(query, limit, offset, sort)
def detach(self, volume, params): profile_id = parse('profileId').find(volume)[0].value profile, secret_key = _get_profile(profile_id) girder_callback_info = { 'girder_api_url': cumulus.config.girder.baseUrl, 'girder_token': get_task_token()['_id'] } log_write_url = '%s/volumes/%s/log' % (cumulus.config.girder.baseUrl, volume['_id']) p = CloudProvider(dict(secretAccessKey=secret_key, **profile)) aws_volume = p.get_volume(volume) if aws_volume is None or aws_volume['state'] != VolumeState.INUSE: raise RestException('This volume is not attached ' 'to a cluster', 400) if 'clusterId' not in volume: raise RestException('clusterId is not set on this volume!', 400) try: volume['path'] except KeyError: raise RestException('path is not set on this volume!', 400) cluster = ModelImporter.model('cluster', 'cumulus').load(volume['clusterId'], user=getCurrentUser(), level=AccessType.ADMIN) master = p.get_master_instance(cluster['_id']) if master['state'] != InstanceState.RUNNING: raise RestException('Master instance is not running!', 400) user = getCurrentUser() cluster = ModelImporter.model('cluster', 'cumulus').filter(cluster, user, passphrase=False) cumulus.ansible.tasks.volume.detach_volume\ .delay(profile, cluster, master, self._model.filter(volume, user), secret_key, log_write_url, girder_callback_info) volume['status'] = VolumeState.DETACHING volume = self._model.update_volume(user, volume) return self._model.filter(volume, user)
def update(self, taskflow, params): user = self.getCurrentUser() immutable = [ 'access', '_id', 'taskFlowClass', 'log', 'activeTaskCount' ] updates = getBodyJson() if not updates: raise RestException('A body must be provided', code=400) for p in updates: if p in immutable: raise RestException('\'%s\' is an immutable property' % p, 400) taskflow = self._model.update_taskflow(user, taskflow, updates) return taskflow
def _getFirstFileByHash(self, algo, hash, user=None): """ Return the first file that the user has access to given its hash and its associated hash sum algorithm name. :param algo: Algorithm the given hash is encoded with. :param hash: Hash of the file to find. :param user: User to test access against. Default (none) is the current user. :return: A file document. """ algo = algo.lower() if algo not in self.supportedAlgorithms: msg = 'Invalid algorithm ("%s"). Supported algorithm are: %s.'\ % (algo, self.supportedAlgorithms) raise RestException(msg, code=400) query = {algo: hash.lower()} # Always convert to lower case fileModel = self.model('file') cursor = fileModel.find(query) if not user: user = self.getCurrentUser() for file in cursor: if fileModel.hasAccess(file, user, AccessType.READ): return file return None
def changePassword(self, params): self.requireParams(('old', 'new'), params) user = self.getCurrentUser() token = None if not params['old']: raise RestException('Old password must not be empty.') if (not self.model('password').hasPassword(user) or not self.model('password').authenticate(user, params['old'])): # If not the user's actual password, check for temp access token token = self.model('token').load(params['old'], force=True, objectId=False, exc=False) if (not token or not token.get('userId') or token['userId'] != user['_id'] or not self.model('token').hasScope( token, TokenScope.TEMPORARY_USER_AUTH)): raise AccessException('Old password is incorrect.') self.model('user').setPassword(user, params['new']) if token: # Remove the temporary access token if one was used self.model('token').remove(token) return {'message': 'Password changed.'}
def createUser(self, params): self.requireParams( ('firstName', 'lastName', 'login', 'password', 'email'), params) currentUser = self.getCurrentUser() regPolicy = self.model('setting').get(SettingKey.REGISTRATION_POLICY) if currentUser is not None and currentUser['admin']: admin = self.boolParam('admin', params, default=False) else: admin = False if regPolicy == 'closed': raise RestException( 'Registration on this instance is closed. Contact an ' 'administrator to create an account for you.') user = self.model('user').createUser(login=params['login'], password=params['password'], email=params['email'], firstName=params['firstName'], lastName=params['lastName'], admin=admin) outputUser = self.model('user').filter(user, user) if not currentUser and self.model('user').canLogin(user): setCurrentUser(user) token = self.sendAuthTokenCookie(user) outputUser['authToken'] = { 'token': token['_id'], 'expires': token['expires'] } return outputUser
def mapReduce(self, item, mapScript, reduceScript, params={}): # Get the current or the anonymous user user = self.getCurrentUser() if user is None: user = self.app.anonymousAccess.getAnonymousUser() # Figure out what kind of assetstore the file is in fileInfo = self.model('file').load(item['meta']['rlab']['fileId'], level=AccessType.READ, user=user, exc=True) dbInfo = getDbInfoForFile(fileInfo) # Okay, figure out how/where we want to run our mapreduce code if dbInfo is not None: if dbInfo['type'] == 'mongo': collection = self.getMongoCollection(dbInfo) result = self.mongoMapReduce(mapScript, reduceScript, collection, params) else: raise RestException('MapReduce for ' + dbInfo['type'] + ' databases is not yet supported') else: result = self.mapReduceViaDownload(item, user, mapScript, reduceScript, params) return result
def addImage(self, dataset, params): params = self._decodeParams(params) self.requireParams(['filename', 'signature'], params) user = self.getCurrentUser() User().requireCreateDataset(user) filename = params['filename'].strip() if not filename: raise ValidationException('Filename must be specified.', 'filename') signature = params['signature'].strip() if not signature: raise ValidationException('Signature must be specified.', 'signature') imageDataStream = RequestBodyStream(cherrypy.request.body) imageDataSize = len(imageDataStream) if not imageDataSize: raise RestException('No data provided in request body.') image = Dataset().addImage( dataset=dataset, imageDataStream=imageDataStream, imageDataSize=imageDataSize, filename=filename, signature=signature, user=user) return Image().filter(image, user=user)
def log(self, task, params): body = cherrypy.request.body.read().decode('utf8') if not body: raise RestException('Log entry must be provided', code=400) self._model.append_to_log(task, json.loads(body))
def update(self, id, params): user = self.getCurrentUser() mol = MoleculeModel().load(id, user=user, level=AccessType.WRITE) if not mol: raise RestException('Molecule not found.', code=404) body = self.getBodyJson() query = {'_id': mol['_id']} updates = {'$set': {}, '$addToSet': {}} if 'name' in body: updates['$set']['name'] = body['name'] if 'logs' in body: updates['$addToSet']['logs'] = body['logs'] # Remove unused keys updates = {k: v for k, v in updates.items() if v} super(MoleculeModel, MoleculeModel()).update(query, updates) # Reload the molecule mol = MoleculeModel().load(id, user=user) return self._clean(mol)
def find_id(self, id, params): mol = self._model.load(id, level=AccessType.READ, user=getCurrentUser()) if not mol: raise RestException('Molecule not found.', code=404) return self._clean(mol)