def handle_post(data, params): """ Handle an HTTP POST request. NOTE: we only support execute requests as POST for the moment """ try: request = cwt.wps.CreateFromDocument(data) except Exception as e: raise WPSError('Malformed WPS execute request') if isinstance(request, cwt.wps.CTD_ANON_11): raise WPSError('GetCapabilities POST not supported') elif isinstance(request, cwt.wps.CTD_ANON_12): raise WPSError('DescribeProcess POST not supported') data_inputs = {} for x in request.DataInputs.Input: data_inputs[x.Identifier.value()] = x.Data.LiteralData.value() api_key = params.get('api_key') logger.info('Handling POST request for API key %s', api_key) with metrics.WPS_REQUESTS.labels('Execute', 'POST').time(): response = handle_execute(api_key, request.Identifier.value(), data_inputs) return response
def generate_grid(self, gridder, spatial, chunk): try: grid_type, grid_param = gridder.grid.split('~') except ValueError: raise WPSError('Error generating grid "{name}"', name=gridder.grid) if grid_type.lower() == 'uniform': result = re.match('^(.*)x(.*)$', grid_param) if result is None: raise WPSError('Failed to parse uniform configuration from {value}', value=grid_param) try: start_lat, nlat, delta_lat = self.parse_uniform_arg(result.group(1), -90.0, 180.0) except WPSError: raise try: start_lon, nlon, delta_lon = self.parse_uniform_arg(result.group(2), 0.0, 360.0) except WPSError: raise grid = cdms2.createUniformGrid(start_lat, nlat, delta_lat, start_lon, nlon, delta_lon) logger.info('Created target uniform grid {} from lat {}:{}:{} lon {}:{}:{}'.format( grid.shape, start_lat, delta_lat, nlat, start_lon, delta_lon, nlon)) else: try: nlats = int(grid_param) except ValueError: raise WPSError('Error converting gaussian parameter to an int') grid = cdms2.createGaussianGrid(nlats) logger.info('Created target gaussian grid {}'.format(grid.shape)) target = cdms2.MV2.ones(grid.shape) target.setAxisList(grid.getAxisList()) lat = chunk.getLatitude() lon = chunk.getLongitude() try: lat_spec = spatial[lat.id] lon_spec = spatial[lon.id] except KeyError as e: logger.debug('Skipping subsetting the target grid') else: target = target(latitude=lat_spec, longitude=lon_spec) return target.getGrid()
def wait_operation(self, operation): # TODO something other than a constant timeout try: result = operation.wait(timeout=10 * 60) except cwt.CWTError as e: raise WPSError('Process {!r} failed with {!r}', operation.identifier, str(e)) if not result: raise WPSError('Operation {!r} failed', operation.identifier) self.job.step_complete()
def initialize(self, user_id, job_id): try: self.user = models.User.objects.get(pk=user_id) except models.User.DoesNotExist: raise WPSError('User with id "{id}" does not exist', id=user_id) try: self.job = models.Job.objects.get(pk=job_id) except models.Job.DoesNotExist: raise WPSError('Job with id "{id}" does not exist', id=job_id) credentials.load_certificate(self.user)
def refresh_certificate(user): """ Refresh user certificate Will try to refresh a users certificate if authenticated using OAuth2. Args: user: User object. Return: returns new certificate """ logger.info('Refreshing user certificate') if user.auth.type == 'myproxyclient': raise CertificateError(user, 'MyProxyClient certificate has expired') url, services = discover.discoverYadis(user.auth.openid_url) auth_service = openid.find_service_by_type(services, URN_AUTHORIZE) cert_service = openid.find_service_by_type(services, URN_RESOURCE) try: extra = json.loads(user.auth.extra) except ValueError as e: raise WPSError( 'Missing OAuth2 state, try authenticating with OAuth2 again') if 'token' not in extra: raise WPSError( 'Missing OAuth2 token, try authenticating with OAuth2 again') try: cert, key, new_token = oauth2.get_certificate(extra['token'], extra['state'], auth_service.server_url, cert_service.server_url, refresh=True) except KeyError as e: raise WPSError('Missing OAuth2 {!r}', e) logger.info('Retrieved certificate and new token') extra['token'] = new_token user.auth.extra = json.dumps(extra) user.auth.cert = ''.join([cert, key]) user.auth.save() return user.auth.cert
def check_error(self): if self.oph_client.last_error is not None and self.oph_client.last_error != '': error = '{}\n'.format(self.oph_client.last_error) res = self.oph_client.deserialize_response() try: for x in res['response'][2]['objcontent']: for y in x['rowvalues']: error += '\t{}: {}\n'.format(y[-3], y[-1]) except IndexError: raise WPSError('Failed to parse last error from Ophidia') raise WPSError(error)
def generate_user_defined_grid(self, gridder): try: grid_type, grid_param = gridder.grid.split('~') except AttributeError: return None except ValueError: raise WPSError('Error generating grid "{name}"', name=gridder.grid) logger.info('Generating grid %r %r', grid_type, grid_param) if grid_type.lower() == 'uniform': result = re.match('^(.*)x(.*)$', grid_param) if result is None: raise WPSError( 'Failed to parse uniform configuration from {value}', value=grid_param) try: start_lat, nlat, delta_lat = self.parse_uniform_arg( result.group(1), -90.0, 180.0) except WPSError: raise try: start_lon, nlon, delta_lon = self.parse_uniform_arg( result.group(2), 0.0, 360.0) except WPSError: raise grid = cdms2.createUniformGrid(start_lat, nlat, delta_lat, start_lon, nlon, delta_lon) logger.info( 'Created target uniform grid {} from lat {}:{}:{} lon {}:{}:{}' .format(grid.shape, start_lat, delta_lat, nlat, start_lon, delta_lon, nlon)) elif grid_type.lower() == 'gaussian': try: nlats = int(grid_param) except ValueError: raise WPSError('Error converting gaussian parameter to an int') grid = cdms2.createGaussianGrid(nlats) logger.info('Created target gaussian grid {}'.format(grid.shape)) else: raise WPSError('Unknown grid type for regridding: {}', grid_type) return grid
def get_input(self, name): if name in self.variable: return self.variable[name] elif name in self.intermediate: return self.intermediate[name] raise WPSError('Unable to locate input {!r}', name)
def load_user(self, user_id): try: user = models.User.objects.get(pk=user_id) except models.User.DoesNotExist: raise WPSError('User "{}" does not exist', user_id) return user
def authorization(request): if not request.user.is_authenticated: try: proto = request.META['HTTP_X_FORWARDED_PROTO'] host = request.META['HTTP_X_FORWARDED_HOST'] uri = request.META['HTTP_X_FORWARDED_URI'].strip('/') except KeyError as e: raise WPSError('Could not reconstruct forwarded url, missing {!s}', e) prefix = request.META.get('HTTP_X_FORWARDED_PREFIX', '').strip('/') logger.info('PROTO %r', proto) logger.info('HOST %r', host) logger.info('URI %r', uri) logger.info('PREFIX %r', prefix) forward = '{!s}://{!s}'.format(proto, host) if uri != '': forward = '{!s}/{!s}'.format(forward, uri) if prefix != '': forward = '{!s}/{!s}'.format(forward, prefix) redirect_url = '{!s}?next={!s}'.format(settings.WPS_LOGIN_URL, forward) return http.HttpResponseRedirect(redirect_url) return http.HttpResponse()
def search_dataset(request): try: common.authentication_required(request) try: dataset_id = request.GET['dataset_id'] index_node = request.GET['index_node'] except KeyError as e: raise common.MissingParameterError(name=e.message) shard = request.GET.get('shard', None) query = request.GET.get('query', None) docs = search_solr(dataset_id, index_node, shard, query) dataset_variables = parse_solr_docs(docs) try: query_variable = dataset_variables.keys()[0] except IndexError as e: raise WPSError('Dataset "{dataset_id}" returned no variables', dataset_id=dataset_id) except WPSError as e: logger.exception('Error retrieving ESGF search results') return common.failed(e.message) else: return common.success(dataset_variables)
def filename(self): if self.variable is None: raise WPSError('No variable set') parts = urlparse.urlparse(self.variable.uri) return parts.path.split('/')[-1]
def load_job(self, job_id): try: job = models.Job.objects.get(pk=job_id) except models.Job.DoesNotExist: raise WPSError('Job {} does not exist', job_id) return job
def load_data_inputs(data_inputs, resolve_inputs=False): o, d, v = cwt.WPSClient.parse_data_inputs(data_inputs) v = dict((x.name, x) for x in v) d = dict((x.name, x) for x in d) o = dict((x.name, x) for x in o) logger.info('Loaded variables %r', v) logger.info('Loaded domains %r', d) logger.info('Loaded operations %r', o) if resolve_inputs: collected_inputs = list(y for x in o.values() for y in x.inputs) try: root_op = [o[x] for x in o.keys() if x not in collected_inputs][0] except IndexError as e: raise WPSError('Error resolving root operation') root_op.resolve_inputs(v, o) try: for x in o.values(): if x.domain is not None: x.domain = d[x.domain] except KeyError as e: raise WPSerror('Error resolving domain') return o, d, v
def listen_edas_output(self, poller, proc): edas_output_path = None proc.log('Listening for EDAS status') while True: events = dict(poller.poll(settings.EDAS_TIMEOUT * 1000)) if len(events) == 0: raise WPSError( 'EDAS timed out waiting for heartbear or output message') data = events.keys()[0].recv() check_exceptions(data) parts = data.split('!') if 'file' in parts: sub_parts = parts[-1].split('|') edas_output_path = sub_parts[-1] break elif 'response' in parts: proc.log('EDAS Heartbeat') proc.log('Received success from EDAS backend') return edas_output_path
def map_axis(axis, dimension, units): """ Maps an axis to a dimension. Args: axis (cdms2.axis): Axis to be mapped. dimension (cwt.Dimension): Dimension being mapped to. units (str): Units to be used if a time axis is being mapped. Returns: A slice that will be used as a cdms2 selector. """ if axis.isTime() and units is not None: axis = axis.clone() axis.toRelativeTime(str(units)) if dimension is None or dimension.crs == cwt.INDICES: selector = map_axis_indices(axis, dimension) elif dimension.crs == cwt.VALUES: selector = map_axis_values(axis, dimension) elif dimension.crs == cwt.TIMESTAMPS: selector = map_axis_timestamps(axis, dimension) else: raise WPSError('Unknown CRS {!r}', dimension.crs) return selector
def merge_dimensions(context, file_dimensions): """ Merge user and file dimensions. Args: context (OperationContext): Current context. file_dimensions (list,tuple): Dimension names in file. Returns: Complete list of dimension names. """ try: user_dim = [x.name for x in context.domain.dimensions] except AttributeError: user_dim = [] user_dim = set(user_dim) logger.info('User dimensions %r', user_dim) file_dim = set(file_dimensions) logger.info('File dimensions %r', file_dim) # Check that user dimensions are a subset of the file dimensions if not user_dim <= file_dim: raise WPSError('User defined axes {!r} were not found in the file', ', '.join(user_dim-file_dim)) # Isolate dimensions not defined by the user file_dim -= user_dim # Return union of user and file dimensions return user_dim | file_dim
def prepare_data_inputs(variable, domain, operation): # TODO Remove when https://github.com/ESGF/esgf-compute-api/issues/39 is # resolved. class Dummy(object): metadata = None operation.description = Dummy() data_inputs = '[' operation.inputs = [x.name for x in variable] variable = [x.parameterize() for x in variable] if len(variable) > 0: data_inputs = '{}variable = {}'.format(data_inputs, json.dumps(variable)) try: operation.domain = domain.name except AttributeError: raise WPSError('EDASK requires that a domain be supplied') data_inputs = '{};domain = {}'.format(data_inputs, json.dumps([domain.parameterize()])) data_inputs = '{};operation = {}]'.format( data_inputs, json.dumps([operation.parameterize()])) logger.info('Data Inputs "%r"', data_inputs) return data_inputs
def read_grid_from_file(gridder): url_validator = URLValidator(['https', 'http']) try: url_validator(gridder.grid.uri) except ValidationError: raise WPSError('Path to grid file is not an OpenDAP url: {}', gridder.grid.uri) try: with cdms2.open(gridder.grid) as infile: data = infile(gridder.grid.var_name) except cdms2.CDMSError: raise WPSError('Failed to read the grid from {} in {}', gridder.grid.var_name, gridder.grid.uri) return data.getGrid()
def status(request, job_id): try: job = models.Job.objects.get(pk=job_id) except models.Job.DoesNotExist: raise WPSError('Status for job "{job_id}" does not exist', job_id=job_id) return http.HttpResponse(job.report, content_type='text/xml')
def load_data_inputs(self, variable_raw, domain_raw, operation_raw): variable = {} for item in json.loads(variable_raw): v = cwt.Variable.from_dict(item) variable[v.name] = v logger.info('Loaded %r variables', len(variable)) domain = {} for item in json.loads(domain_raw): d = cwt.Domain.from_dict(item) domain[d.name] = d logger.info('Loaded %r domains', len(domain)) operation = {} for item in json.loads(operation_raw): o = cwt.Process.from_dict(item) operation[o.name] = o logger.info('Loaded %r operations', len(operation)) for o in operation.values(): if o.domain is not None: logger.info('Resolving domain %r', o.domain) o.domain = domain[o.domain] inputs = [] for inp in o.inputs: if inp in variable: inputs.append(variable[inp]) elif inp in operation: inputs.append(operation[inp]) else: kwargs = { 'inp': inp, 'name': o.name, 'id': o.identifier, } raise WPSError( 'Unabled to resolve input "{inp}" for operation "{name}" - "{id}"', **kwargs) logger.info('Resolved input %r', inp) o.inputs = inputs return variable, domain, operation
def connect_socket(context, socket_type, host, port): try: sock = context.socket(socket_type) sock.connect('tcp://{}:{}'.format(host, port)) except zmq.ZMQError: raise WPSError('Failed to connect to EDAS {} on port {}', host, port) return sock
def min_func(data, axes): for axis in axes: axis_index = data.getAxisIndex(axis) if axis_index == -1: raise WPSError('Unknown axis {!s}', axis) data = MV2.min(data, axis=axis_index) return data
def edas_wait(socket, timeout): if socket.poll(timeout * 1000, zmq.POLLIN) == 0: raise WPSError('Timed out waiting for response') data = socket.recv() logger.info('Received data, length %r, peek %r', len(data), edas_peek(data)) return data
def check_exceptions(data): if '<exceptions>' in data: index = data.index('!') data = data[index + 1:] root = ET.fromstring(data) exceptions = root.findall('./exceptions/*') if len(exceptions) > 0: raise WPSError('EDAS exception: {error}', error=exceptions[0].text)
def wps(request): try: api_key, op, identifier, data_inputs = handle_request(request) logger.info('Handling WPS request {} for api key {}'.format( op, api_key)) if op == 'getcapabilities': server = models.Server.objects.get(host='default') response = server.capabilities elif op == 'describeprocess': process = models.Process.objects.get(identifier=identifier) response = process.description else: try: user = models.User.objects.filter(auth__api_key=api_key)[0] except IndexError: raise WPSError('Missing API key for WPS execute request') response = wps_execute(user, identifier, data_inputs) except WPSExceptionError as e: failure = wps_lib.ProcessFailed(exception_report=e.report) exc_response = wps_xml.execute_response('', failure, '') response = exc_response.xml() except WPSError as e: exc_report = wps_lib.ExceptionReport(settings.VERSION) exc_report.add_exception(wps_lib.NoApplicableCode, str(e)) failure = wps_lib.ProcessFailed(exception_report=exc_report) exc_response = wps_xml.execute_response('', failure, '') response = exc_response.xml() except Exception as e: exc_report = wps_lib.ExceptionReport(settings.VERSION) exc_report.add_exception( wps_lib.NoApplicableCode, 'Please report this as a bug: {}'.format(str(e))) failure = wps_lib.ProcessFailed(exception_report=exc_report) exc_response = wps_xml.execute_response('', failure, '') response = exc_response.xml() finally: return http.HttpResponse(response, content_type='text/xml')
def int_or_float(value): if isinstance(value, (int, float)): return value try: return int(value) except ValueError: pass try: return float(value) except ValueError: raise WPSError('Failed to parse "{value}" as a float or int', value=value)
def load_model(obj, name, model_class): assert hasattr(obj, name) pk = getattr(obj, name, None) if pk is not None: try: value = model_class.objects.get(pk=pk) except model_class.DoesNotExist: raise WPSError('{!s} {!r} does not exist', model_class.__name__, pk) setattr(obj, name, value)
def edas_result(pull_socket): data = edas_wait(pull_socket, settings.WPS_EDAS_EXECUTE_TIMEOUT) try: id, type, msg = data.split('!') except ValueError: raise WPSError('Failed to parse EDASK response, expected 3 tokens') if type == 'error': raise WPSError(msg) parts = msg.split('|') var_name = parts[-3].replace('[', '%5b') var_name = var_name.replace(']', '%5d') try: output = set_output(var_name, parts[-1]) except IndexError: raise WPSError('Failed to set the output of the EDASK operation') return output
def map_domain(self, context): """ Maps a domain. Args: context (OperationContext): Current operation context. Returns: An updated operation context. """ for input in context.inputs: self.status('Mapping {!r}', input.filename) with input.open(context.user) as var: axes = var.getAxisList() input.mapped_order = [x.id for x in axes] logger.info('Axis mapped order %r', input.mapped_order) dimensions = merge_dimensions(context, input.mapped_order) logger.info('Merge dimensions %r', dimensions) for name in dimensions: try: dim = context.domain.get_dimension(name) except AttributeError: dim = None axis_index = var.getAxisIndex(name) if axis_index == -1: raise WPSError('Axis {!r} was not found in remote file', name) axis = var.getAxis(axis_index).clone() try: input.mapped[name] = map_axis(axis, dim, context.units) #except WPSError: # raise except Exception: input.mapped = None break logger.info('Mapped %r to %r', name, input.mapped[name]) return context