def disable(self, task_id): id_list = [] for each_id in task_id: conn = pecan.request.storage_conn tasks = conn.get_rpttasks(task_id=each_id) if not tasks: LOG.error("Fail to get task %s." % each_id) raise wsme.exc.ClientSideError( _("Reporttask with task_id=%s not exsit") % task_id, 404) if tasks[0]['task_status'] == "Inactive": LOG.error("Task with task_id=%s already disabled", each_id) raise wsme.exc.ClientSideError( _("Reporttask with task_id=%s already disabled!") % each_id, 404) ctx = context.RequestContext() self.rpcclient.disable_task(ctx, task=tasks[0]) # sleep 1s and wait for agent update db time.sleep(1) task_now = conn.get_rpttasks(task_id=each_id) id_list.append(each_id) # return ReportTask.from_dict(task_now[0]) return id_list
def check_string_length(value, name=None, min_length=0, max_length=None): """Check the length of specified string :param value: the value of the string :param name: the name of the string :param min_length: the min_length of the string :param max_length: the max_length of the string """ if not isinstance(value, six.string_types): if name is None: msg = _("The input is not a string or unicode") else: msg = _("%s is not a string or unicode") % name raise exception.InvalidInput(message=msg) if name is None: name = value if len(value) < min_length: msg = _("%(name)s has a minimum character requirement of " "%(min_length)s.") % {'name': name, 'min_length': min_length} raise exception.InvalidInput(message=msg) if max_length and len(value) > max_length: msg = _("%(name)s has more than %(max_length)s " "characters.") % {'name': name, 'max_length': max_length} raise exception.InvalidInput(message=msg)
def post(self, data): ctx = context.RequestContext() conn = pecan.request.storage_conn tasks = list(conn.get_rpttasks(task_name=data.task_name, )) if tasks: raise wsme.exc.ClientSideError( _("Task with task_name='%s' exists") % (data.task_name), 409) data = data.to_dict() now = datetime.now() data['task_createtime'] = datetime.now() data['task_updatetime'] = datetime.now() data['task_id'] = str(uuid.uuid4()) try: conn.add_rpttask(data) except Exception as e: raise wsme.exc.ClientSideError( _("Add the task failed, catch exception: %s") % data.task_name, 500) self.rpcclient.add_task(ctx, task=data) # sleep 1s and wait for agent update db time.sleep(1) task_now = conn.get_rpttasks(task_id=data['task_id']) return ReportTask.from_dict(task_now[0])
def get_meters(self, user=None, project=None, resource=None, source=None, metaquery=None, pagination=None): """Return an iterable of models.Meter instances :param user: Optional ID for user that owns the resource. :param project: Optional ID for project that owns the resource. :param resource: Optional resource filter. :param source: Optional source filter. :param metaquery: Optional dict with metadata to match on. :param pagination: Optional pagination query. """ metaquery = metaquery or {} if pagination: raise report.NotImplementedError(_('Pagination not implemented')) with self.conn_pool.connection() as conn: resource_table = conn.table(self.RESOURCE_TABLE) q = hbase_utils.make_query(metaquery=metaquery, user_id=user, project_id=project, resource_id=resource, source=source) LOG.debug(_("Query Resource table: %s") % q) gen = resource_table.scan(filter=q) # We need result set to be sure that user doesn't receive several # same meters. Please see bug # https://bugs.launchpad.net/report/+bug/1301371 result = set() for ignored, data in gen: flatten_result, s, meters, md = hbase_utils.deserialize_entry( data) for m in meters: _m_rts, m_source, name, m_type, unit = m[0] meter_dict = { 'name': name, 'type': m_type, 'unit': unit, 'resource_id': flatten_result['resource_id'], 'project_id': flatten_result['project_id'], 'user_id': flatten_result['user_id'] } frozen_meter = frozenset(meter_dict.items()) if frozen_meter in result: continue result.add(frozen_meter) meter_dict.update( {'source': m_source if m_source else None}) yield models.Meter(**meter_dict)
def validate(reporttask): paramdata = {} paramdata['task_name'] = reporttask.task_name paramdata['task_type'] = reporttask.task_type paramdata['task_period'] = reporttask.task_period paramdata['task_language'] = reporttask.task_language paramdata['task_content'] = reporttask.task_content paramdata['task_status'] = reporttask.task_status paramdata['task_metadata'] = reporttask.task_metadata for k, value in paramdata.items(): if value != wtypes.Unset: if not value: raise wsme.exc.ClientSideError( _("Error in posting task:" "parameter '%s' is null") % k ) paramdata['task_id'] = reporttask.task_id paramdata['task_time'] = reporttask.task_time for k, value in paramdata.items(): if value != wtypes.Unset: if len(value) >= 256: raise wsme.exc.ClientSideError( _("Error in posting task:" "parameter '%s' is too long, 255 limited") % k ) try: paramdata['task_content'] = json.loads(paramdata['task_content']) paramdata['task_metadata'] = json.loads(paramdata['task_metadata']) except Exception as e: raise wsme.exc.ClientSideError( _("Add the task failed, format error."), 500) content_type = paramdata['task_content'].get('meterlist', []) if content_type == [] or type(content_type) != list: raise wsme.exc.ClientSideError( _("task_content format error.") ) if paramdata['task_type'] == 'meter': report_list = meter_list elif paramdata['task_type'] == 'monitor': report_list = monitor_list else: report_list = alarm_list if not set(content_type).issubset(set(report_list)): raise wsme.exc.ClientSideError( _("task_content content error.") ) return reporttask
def clear(self): LOG.debug(_('Dropping HBase schema...')) with self.conn_pool.connection() as conn: for table in [self.RESOURCE_TABLE, self.METER_TABLE]: try: conn.disable_table(table) except Exception: LOG.debug(_('Cannot disable table but ignoring error')) try: conn.delete_table(table) except Exception: LOG.debug(_('Cannot delete table but ignoring error'))
def get_meters(self, user=None, project=None, resource=None, source=None, metaquery=None, pagination=None): """Return an iterable of models.Meter instances :param user: Optional ID for user that owns the resource. :param project: Optional ID for project that owns the resource. :param resource: Optional resource filter. :param source: Optional source filter. :param metaquery: Optional dict with metadata to match on. :param pagination: Optional pagination query. """ metaquery = metaquery or {} if pagination: raise report.NotImplementedError( _('Pagination not implemented')) with self.conn_pool.connection() as conn: resource_table = conn.table(self.RESOURCE_TABLE) q = hbase_utils.make_query(metaquery=metaquery, user_id=user, project_id=project, resource_id=resource, source=source) LOG.debug(_("Query Resource table: %s") % q) gen = resource_table.scan(filter=q) # We need result set to be sure that user doesn't receive several # same meters. Please see bug # https://bugs.launchpad.net/report/+bug/1301371 result = set() for ignored, data in gen: flatten_result, s, meters, md = hbase_utils.deserialize_entry( data) for m in meters: _m_rts, m_source, name, m_type, unit = m[0] meter_dict = {'name': name, 'type': m_type, 'unit': unit, 'resource_id': flatten_result['resource_id'], 'project_id': flatten_result['project_id'], 'user_id': flatten_result['user_id']} frozen_meter = frozenset(meter_dict.items()) if frozen_meter in result: continue result.add(frozen_meter) meter_dict.update({'source': m_source if m_source else None}) yield models.Meter(**meter_dict)
def get_my_linklocal(interface): try: if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface) condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link' links = [re.search(condition, x) for x in if_str[0].split('\n')] address = [w.group(1) for w in links if w is not None] if address[0] is not None: return address[0] else: msg = _('Link Local address is not found.:%s') % if_str raise exception.NovaException(msg) except Exception as ex: msg = _("Couldn't get Link Local IP of %(interface)s" " :%(ex)s") % {'interface': interface, 'ex': ex} raise exception.NovaException(msg)
def wrapped(*args, **kwargs): try: return f(*args, **kwargs) except Exception as exc: if isinstance(exc, webob.exc.WSGIHTTPException): if isinstance(errors, int): t_errors = (errors,) else: t_errors = errors if exc.code in t_errors: raise elif isinstance(exc, exception.PolicyNotAuthorized): # Note(cyeoh): Special case to handle # PolicyNotAuthorized exceptions so every # extension method does not need to wrap authorize # calls. ResourceExceptionHandler silently # converts NotAuthorized to HTTPForbidden raise elif isinstance(exc, exception.ValidationError): # Note(oomichi): Handle a validation error, which # happens due to invalid API parameters, as an # expected error. raise print (_LE("Unexpected exception in API method")) msg = _('Unexpected API Error. Please report this at ' 'http://bugs.launchpad.net/nova/ and attach the Nova ' 'API log if possible.\n%s') % type(exc) raise webob.exc.HTTPInternalServerError(explanation=msg)
def action_peek_json(body): """Determine action to invoke.""" try: decoded = jsonutils.loads(body) except ValueError: msg = _("cannot understand JSON") raise exception.MalformedRequestBody(reason=msg) # Make sure there's exactly one key... if len(decoded) != 1: msg = _("too many body keys") raise exception.MalformedRequestBody(reason=msg) # Return the action and the decoded body... return decoded.keys()[0]
def get_samples(self, filterdict=None, limit=None): """Return an iterable of models.Sample instances. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ if limit == 0: return with self.conn_pool.connection() as conn: meter_table = conn.table(self.METER_TABLE) q, start, stop, columns = ( hbase_utils.make_sample_query_from_filterdict( filterdict, require_meter=False)) LOG.debug(_("Query Meter Table: %s") % q) gen = meter_table.scan(filter=q, row_start=start, row_stop=stop, limit=limit, columns=columns) """ q = "SingleColumnValueFilter ('f', 'counter_name', =, 'binary:\"cpu\"',true, true)" gen = meter_table.scan(filter=q, limit = limit, columns = ['f:name','f:resource_id','f:counter_name', 'f:counter_volume','f:counter_unit','f:timestamp']) """ for key, date in gen: yield date
def __call__(self, req): user_id = req.headers.get('X_USER') user_id = req.headers.get('X_USER_ID', user_id) if user_id is None: LOG.debug("Neither X_USER_ID nor X_USER found in request") # return webob.exc.HTTPUnauthorized() roles = self._get_roles(req) """ if 'X_TENANT_ID' in req.headers: # This is the new header since Keystone went to ID/Name project_id = req.headers['X_TENANT_ID'] else: # This is for legacy compatibility project_id = req.headers['X_TENANT'] """ project_id = None project_name = req.headers.get('X_TENANT_NAME') user_name = req.headers.get('X_USER_NAME') req_id = req.environ.get(request_id.ENV_REQUEST_ID) # Get the auth token auth_token = req.headers.get('X_AUTH_TOKEN', req.headers.get('X_STORAGE_TOKEN')) # Build a context, including the auth_token... remote_address = req.remote_addr if CONF.use_forwarded_for: remote_address = req.headers.get('X-Forwarded-For', remote_address) service_catalog = None if req.headers.get('X_SERVICE_CATALOG') is not None: try: catalog_header = req.headers.get('X_SERVICE_CATALOG') service_catalog = jsonutils.loads(catalog_header) except ValueError: raise webob.exc.HTTPInternalServerError( _('Invalid service catalog json.')) # NOTE(jamielennox): This is a full auth plugin set by auth_token # middleware in newer versions. user_auth_plugin = req.environ.get('keystone.token_auth') ctx = context.RequestContext(user_id, project_id, user_name=user_name, project_name=project_name, roles=roles, auth_token=auth_token, remote_address=remote_address, service_catalog=service_catalog, request_id=req_id, user_auth_plugin=user_auth_plugin) req.environ['report.context'] = ctx return self.application
def convert_version_to_int(version): try: if isinstance(version, six.string_types): version = convert_version_to_tuple(version) if isinstance(version, tuple): return reduce(lambda x, y: (x * 1000) + y, version) except Exception: msg = _("Hypervisor version %s is invalid.") % version raise exception.NovaException(msg)
def get_resources(self, user=None, project=None, source=None, start_timestamp=None, start_timestamp_op=None, end_timestamp=None, end_timestamp_op=None, metaquery=None, resource=None, pagination=None): """Return an iterable of models.Resource instances :param user: Optional ID for user that owns the resource. :param project: Optional ID for project that owns the resource. :param source: Optional source filter. :param start_timestamp: Optional modified timestamp start range. :param start_timestamp_op: Optional start time operator, like ge, gt. :param end_timestamp: Optional modified timestamp end range. :param end_timestamp_op: Optional end time operator, like lt, le. :param metaquery: Optional dict with metadata to match on. :param resource: Optional resource filter. :param pagination: Optional pagination query. """ if pagination: raise report.NotImplementedError('Pagination not implemented') q = hbase_utils.make_query(metaquery=metaquery, user_id=user, project_id=project, resource_id=resource, source=source) q = hbase_utils.make_meter_query_for_resource(start_timestamp, start_timestamp_op, end_timestamp, end_timestamp_op, source, q) with self.conn_pool.connection() as conn: resource_table = conn.table(self.RESOURCE_TABLE) LOG.debug(_("Query Resource table: %s") % q) for resource_id, data in resource_table.scan(filter=q): f_res, sources, meters, md = hbase_utils.deserialize_entry( data) resource_id = hbase_utils.encode_unicode(resource_id) # Unfortunately happybase doesn't keep ordered result from # HBase. So that's why it's needed to find min and max # manually first_ts = min(meters, key=operator.itemgetter(1))[1] last_ts = max(meters, key=operator.itemgetter(1))[1] source = meters[0][0][1] # If we use QualifierFilter then HBase returnes only # qualifiers filtered by. It will not return the whole entry. # That's why if we need to ask additional qualifiers manually. if 'project_id' not in f_res and 'user_id' not in f_res: row = resource_table.row( resource_id, columns=['f:project_id', 'f:user_id', 'f:resource_metadata']) f_res, _s, _m, md = hbase_utils.deserialize_entry(row) yield models.Resource( resource_id=resource_id, first_sample_timestamp=first_ts, last_sample_timestamp=last_ts, project_id=f_res['project_id'], source=source, user_id=f_res['user_id'], metadata=md)
def delete(self, file_id): conn = pecan.request.storage_conn try: file_detail = conn.get_rptfiles(file_id=file_id) url_path = file_detail[0]['file_path'] # file_path = CONF.report_file.rptfile_path + urlparse(url_path).path file_path = CONF.report_file.rptfile_path + url_path os.remove(file_path) except: # LOG.error("File is already remove") raise wsme.exc.ClientSideError( _("File with file_id=%s doesn\'t exit!") % file_id, 404) conn.del_rptfiles(file_id=file_id)
def _get_connection_pool(conf): """Return a connection pool to the database. .. note:: The tests use a subclass to override this and return an in-memory connection pool. """ LOG.debug(_('connecting to HBase on %(host)s:%(port)s') % ( {'host': conf['host'], 'port': conf['port']})) return happybase.ConnectionPool(size=100, host=conf['host'], port=conf['port'], table_prefix=conf['table_prefix'])
def dels(self, task_id): id_list = [] for each_id in task_id: conn = pecan.request.storage_conn id = conn.get_rpttasks(task_id = each_id) if not id: raise wsme.exc.ClientSideError( _("Task_id %s doesn\'t exist!") % each_id, 404) ctx = context.RequestContext() self.rpcclient.del_task(ctx, task_id=each_id) id_list.append(each_id) return id_list
def __init__(self, ext_mgr=None, init_only=None): if ext_mgr is None: if self.ExtensionManager: """ """ ext_mgr = self.ExtensionManager() else: raise Exception(_("Must specify an ExtensionManager class")) mapper = ProjectMapper() self.resources = {} self._setup_routes(mapper, ext_mgr, init_only) self._setup_ext_routes(mapper, ext_mgr, init_only) self._setup_extensions(ext_mgr) super(APIRouter, self).__init__(mapper)
def validate_integer(value, name, min_value=None, max_value=None): """Make sure that value is a valid integer, potentially within range.""" try: value = int(str(value)) except (ValueError, UnicodeEncodeError): msg = _('%(value_name)s must be an integer') raise exception.InvalidInput(reason=( msg % {'value_name': name})) if min_value is not None: if value < min_value: msg = _('%(value_name)s must be >= %(min_value)d') raise exception.InvalidInput( reason=(msg % {'value_name': name, 'min_value': min_value})) if max_value is not None: if value > max_value: msg = _('%(value_name)s must be <= %(max_value)d') raise exception.InvalidInput( reason=( msg % {'value_name': name, 'max_value': max_value}) ) return value
def create_tables(conn, tables, column_families): for table in tables: try: conn.create_table(table, column_families) except ttypes.AlreadyExists: if conn.table_prefix: table = ("%(table_prefix)s" "%(separator)s" "%(table_name)s" % dict(table_prefix=conn.table_prefix, separator=conn.table_prefix_separator, table_name=table)) LOG.warn( _("Cannot create table %(table_name)s " "it already exists. Ignoring error") % {'table_name': table})
def create_tables(conn, tables, column_families): for table in tables: try: conn.create_table(table, column_families) except ttypes.AlreadyExists: if conn.table_prefix: table = ("%(table_prefix)s" "%(separator)s" "%(table_name)s" % dict(table_prefix=conn.table_prefix, separator=conn.table_prefix_separator, table_name=table)) LOG.warn(_("Cannot create table %(table_name)s " "it already exists. Ignoring error") % {'table_name': table})
def _get_connection_pool(conf): """Return a connection pool to the database. .. note:: The tests use a subclass to override this and return an in-memory connection pool. """ LOG.debug( _('connecting to HBase on %(host)s:%(port)s') % ({ 'host': conf['host'], 'port': conf['port'] })) return happybase.ConnectionPool(size=100, host=conf['host'], port=conf['port'], table_prefix=conf['table_prefix'])
def __init__(self, url): """Hbase Connection Initialization.""" opts = self._parse_connection_url(url) if opts['host'] == '__test__': url = os.environ.get('CEILOMETER_TEST_HBASE_URL') if url: # Reparse URL, but from the env variable now opts = self._parse_connection_url(url) self.conn_pool = self._get_connection_pool(opts) else: # This is a in-memory usage for unit tests if Connection._memory_instance is None: LOG.debug(_('Creating a new in-memory HBase ' 'Connection object')) Connection._memory_instance = (hbase_inmemory. MConnectionPool()) self.conn_pool = Connection._memory_instance else: self.conn_pool = self._get_connection_pool(opts)
def __init__(self, url): """Hbase Connection Initialization.""" opts = self._parse_connection_url(url) if opts['host'] == '__test__': url = os.environ.get('CEILOMETER_TEST_HBASE_URL') if url: # Reparse URL, but from the env variable now opts = self._parse_connection_url(url) self.conn_pool = self._get_connection_pool(opts) else: # This is a in-memory usage for unit tests if Connection._memory_instance is None: LOG.debug( _('Creating a new in-memory HBase ' 'Connection object')) Connection._memory_instance = ( hbase_inmemory.MConnectionPool()) self.conn_pool = Connection._memory_instance else: self.conn_pool = self._get_connection_pool(opts)
def put(self, task_id, data): """Update a task""" conn = pecan.request.storage_conn tasks = conn.get_rpttasks(task_id=task_id) if not tasks: LOG.error("Fail to get task %s." % task_id) raise wsme.exc.ClientSideError( _("Reporttask with task_id=%s not exsit") % task_id, 404) data['task_updatetime'] = datetime.now() def _update(task): task_content = json.loads(task.get('task_content')) task_metadata = json.loads(task.get('task_metadata')) for key in data.keys(): if task.get(key): task[key] = data.pop(key) # elif task_content.get(key): # task_content[key] = data.pop(key) # elif task_metadata.get(key): # task_metadata[key] = data.pop(key) # task["task_content"] = json.dumps(task_content) # task["task_metadata"] = json.dumps(task_metadata) return task ctx = context.RequestContext() task_dict = _update(tasks[0]) self.rpcclient.update_task(ctx, task_dict) # sleep 1s and wait for agent update db time.sleep(1) task_now = conn.get_rpttasks(task_id=task_id) # result = result.append(ReportTask.from_dict(task_now[0])) return ReportTask.from_dict(task_now[0])
def get_samples(self, filterdict=None, limit=None): """Return an iterable of models.Sample instances. :param sample_filter: Filter. :param limit: Maximum number of results to return. """ if limit == 0: return with self.conn_pool.connection() as conn: meter_table = conn.table(self.METER_TABLE) q, start, stop, columns = (hbase_utils. make_sample_query_from_filterdict (filterdict, require_meter=False)) LOG.debug(_("Query Meter Table: %s") % q) gen = meter_table.scan(filter=q, row_start=start, row_stop=stop, limit=limit, columns=columns) """ q = "SingleColumnValueFilter ('f', 'counter_name', =, 'binary:\"cpu\"', true, true)" gen = meter_table.scan(filter=q, limit = limit, columns = ['f:name','f:resource_id','f:counter_name','f:counter_volume','f:counter_unit','f:timestamp']) """ for key, date in gen: yield date
def _set_read_deleted(self, read_deleted): if read_deleted not in ('no', 'yes', 'only'): raise ValueError( _("read_deleted can only be one of 'no', " "'yes' or 'only', not %r") % read_deleted) self._read_deleted = read_deleted
def check_isinstance(obj, cls): """Checks that obj is of type cls, and lets PyLint infer types.""" if isinstance(obj, cls): return obj raise Exception(_('Expected object of type: %s') % (str(cls)))
def get_resources(self, user=None, project=None, source=None, start_timestamp=None, start_timestamp_op=None, end_timestamp=None, end_timestamp_op=None, metaquery=None, resource=None, pagination=None): """Return an iterable of models.Resource instances :param user: Optional ID for user that owns the resource. :param project: Optional ID for project that owns the resource. :param source: Optional source filter. :param start_timestamp: Optional modified timestamp start range. :param start_timestamp_op: Optional start time operator, like ge, gt. :param end_timestamp: Optional modified timestamp end range. :param end_timestamp_op: Optional end time operator, like lt, le. :param metaquery: Optional dict with metadata to match on. :param resource: Optional resource filter. :param pagination: Optional pagination query. """ if pagination: raise report.NotImplementedError('Pagination not implemented') q = hbase_utils.make_query(metaquery=metaquery, user_id=user, project_id=project, resource_id=resource, source=source) q = hbase_utils.make_meter_query_for_resource(start_timestamp, start_timestamp_op, end_timestamp, end_timestamp_op, source, q) with self.conn_pool.connection() as conn: resource_table = conn.table(self.RESOURCE_TABLE) LOG.debug(_("Query Resource table: %s") % q) for resource_id, data in resource_table.scan(filter=q): f_res, sources, meters, md = hbase_utils.deserialize_entry( data) resource_id = hbase_utils.encode_unicode(resource_id) # Unfortunately happybase doesn't keep ordered result from # HBase. So that's why it's needed to find min and max # manually first_ts = min(meters, key=operator.itemgetter(1))[1] last_ts = max(meters, key=operator.itemgetter(1))[1] source = meters[0][0][1] # If we use QualifierFilter then HBase returnes only # qualifiers filtered by. It will not return the whole entry. # That's why if we need to ask additional qualifiers manually. if 'project_id' not in f_res and 'user_id' not in f_res: row = resource_table.row(resource_id, columns=[ 'f:project_id', 'f:user_id', 'f:resource_metadata' ]) f_res, _s, _m, md = hbase_utils.deserialize_entry(row) yield models.Resource(resource_id=resource_id, first_sample_timestamp=first_ts, last_sample_timestamp=last_ts, project_id=f_res['project_id'], source=source, user_id=f_res['user_id'], metadata=md)
def _set_read_deleted(self, read_deleted): if read_deleted not in ('no', 'yes', 'only'): raise ValueError(_("read_deleted can only be one of 'no', " "'yes' or 'only', not %r") % read_deleted) self._read_deleted = read_deleted
def start(self): """Start serving a WSGI application. :returns: None """ # The server socket object will be closed after server exits, # but the underlying file descriptor will remain open, and will # give bad file descriptor error. So duplicating the socket object, # to keep file descriptor usable. dup_socket = self._socket.dup() dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # sockets can hang around forever without keepalive dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # This option isn't available in the OS X version of eventlet if hasattr(socket, "TCP_KEEPIDLE"): dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, CONF.tcp_keepidle) if self._use_ssl: try: ca_file = CONF.ssl_ca_file cert_file = CONF.ssl_cert_file key_file = CONF.ssl_key_file if cert_file and not os.path.exists(cert_file): raise RuntimeError(_("Unable to find cert_file : %s") % cert_file) if ca_file and not os.path.exists(ca_file): raise RuntimeError(_("Unable to find ca_file : %s") % ca_file) if key_file and not os.path.exists(key_file): raise RuntimeError(_("Unable to find key_file : %s") % key_file) if self._use_ssl and (not cert_file or not key_file): raise RuntimeError( _( "When running server in SSL mode, you must " "specify both a cert_file and key_file " "option value in your configuration file" ) ) ssl_kwargs = { "server_side": True, "certfile": cert_file, "keyfile": key_file, "cert_reqs": ssl.CERT_NONE, } if CONF.ssl_ca_file: ssl_kwargs["ca_certs"] = ca_file ssl_kwargs["cert_reqs"] = ssl.CERT_REQUIRED dup_socket = eventlet.wrap_ssl(dup_socket, **ssl_kwargs) except Exception: with excutils.save_and_reraise_exception(): LOG.error( _LE("Failed to start %(name)s on %(host)s" ":%(port)s with SSL support"), {"name": self.name, "host": self.host, "port": self.port}, ) wsgi_kwargs = { "func": eventlet.wsgi.server, "sock": dup_socket, "site": self.app, "protocol": self._protocol, "custom_pool": self._pool, "log": self._logger, "log_format": CONF.wsgi_log_format, "debug": False, "keepalive": CONF.wsgi_keep_alive, "socket_timeout": self.client_socket_timeout, } if self._max_url_len: wsgi_kwargs["url_length_limit"] = self._max_url_len self._server = eventlet.spawn(**wsgi_kwargs)
def open(): LOG.debug(_("Opening in-memory HBase connection"))
def _process_stack(self, request, action, action_args, content_type, body, accept): """Implement the processing stack.""" # Get the implementing method try: meth, extensions = self.get_method(request, action, content_type, body) except (AttributeError, TypeError): return Fault(webob.exc.HTTPNotFound()) except KeyError as ex: msg = _("There is no such action: %s") % ex.args[0] return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) if body: msg = _("Action: '%(action)s', calling method: %(meth)s, body: " "%(body)s") % { 'action': action, 'body': six.text_type(body, 'utf-8'), 'meth': str(meth) } print(strutils.mask_password(msg)) else: print("Calling method '%(meth)s'", {'meth': str(meth)}) # Now, deserialize the request body... try: contents = {} if self._should_have_body(request): # allow empty body with PUT and POST if request.content_length == 0: contents = {'body': None} else: contents = self.deserialize(meth, content_type, body) except exception.InvalidContentType: msg = _("Unsupported Content-Type") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Update the action args action_args.update(contents) project_id = action_args.pop("project_id", None) context = request.environ.get('nova.context') if (context and project_id and (project_id != context.project_id)): msg = _("Malformed request URL: URL's project_id '%(project_id)s'" " doesn't match Context's project_id" " '%(context_project_id)s'") % \ {'project_id': project_id, 'context_project_id': context.project_id} return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Run pre-processing extensions response, post = self.pre_process_extensions(extensions, request, action_args) if not response: try: with ResourceExceptionHandler(): action_result = self.dispatch(meth, request, action_args) except Fault as ex: response = ex if not response: # No exceptions; convert action_result into a # ResponseObject resp_obj = None if type(action_result) is dict or action_result is None: resp_obj = ResponseObject(action_result) elif isinstance(action_result, ResponseObject): resp_obj = action_result else: response = action_result # Run post-processing extensions if resp_obj: # Do a preserialize to set up the response object serializers = getattr(meth, 'wsgi_serializers', {}) resp_obj._bind_method_serializers(serializers) if hasattr(meth, 'wsgi_code'): resp_obj._default_code = meth.wsgi_code resp_obj.preserialize(accept, self.default_serializers) # Process post-processing extensions response = self.post_process_extensions( post, resp_obj, request, action_args) if resp_obj and not response: response = resp_obj.serialize(request, accept, self.default_serializers) if hasattr(response, 'headers'): for hdr, val in response.headers.items(): # Headers must be utf-8 strings response.headers[hdr] = utils.utf8(str(val)) if not request.api_version_request.is_null(): response.headers[API_VERSION_REQUEST_HEADER] = \ request.api_version_request.get_string() response.headers['Vary'] = API_VERSION_REQUEST_HEADER return response
def start(self): """Start serving a WSGI application. :returns: None """ # The server socket object will be closed after server exits, # but the underlying file descriptor will remain open, and will # give bad file descriptor error. So duplicating the socket object, # to keep file descriptor usable. dup_socket = self._socket.dup() dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # sockets can hang around forever without keepalive dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # This option isn't available in the OS X version of eventlet if hasattr(socket, 'TCP_KEEPIDLE'): dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, CONF.tcp_keepidle) if self._use_ssl: try: ca_file = CONF.ssl_ca_file cert_file = CONF.ssl_cert_file key_file = CONF.ssl_key_file if cert_file and not os.path.exists(cert_file): raise RuntimeError( _("Unable to find cert_file : %s") % cert_file) if ca_file and not os.path.exists(ca_file): raise RuntimeError( _("Unable to find ca_file : %s") % ca_file) if key_file and not os.path.exists(key_file): raise RuntimeError( _("Unable to find key_file : %s") % key_file) if self._uses_ssl and (not cert_file or not key_file): raise RuntimeError( _("When running server in SSL mode, you must " "specify both a cert_file and key_file " "option value in your configuration file")) ssl_kwargs = { 'server_side': True, 'certfile': cert_file, 'keyfile': key_file, 'cert_reqs': ssl.CERT_NONE, } if CONF.ssl_ca_file: ssl_kwargs['ca_certs'] = ca_file ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED dup_socket = eventlet.wrap_ssl(dup_socket, **ssl_kwargs) except Exception: with excutils.save_and_reraise_exception(): LOG.error( _LE("Failed to start %(name)s on %(host)s" ":%(port)s with SSL support"), { 'name': self.name, 'host': self.host, 'port': self.port }) wsgi_kwargs = { 'func': eventlet.wsgi.server, 'sock': dup_socket, 'site': self.app, 'protocol': self._protocol, 'custom_pool': self._pool, 'log': self._logger, 'log_format': CONF.wsgi_log_format, 'debug': False, 'keepalive': CONF.wsgi_keep_alive, 'socket_timeout': self.client_socket_timeout } if self._max_url_len: wsgi_kwargs['url_length_limit'] = self._max_url_len self._server = eventlet.spawn(**wsgi_kwargs)
def _from_json(self, datastring): try: return jsonutils.loads(datastring) except ValueError: msg = _("cannot understand JSON") raise exception.MalformedRequestBody(reason=msg)