def test_do_request_unicode(self): self.client.format = self.format self.mox.StubOutWithMock(self.client.httpclient, "request") unicode_text = u'\u7f51\u7edc' # url with unicode action = u'/test' expected_action = action # query string with unicode params = {'test': unicode_text} expect_query = urlparse.urlencode(utils.safe_encode_dict(params)) # request body with unicode body = params expect_body = self.client.serialize(body) self.client.httpclient.auth_token = encodeutils.safe_encode( unicode_text) expected_auth_token = encodeutils.safe_encode(unicode_text) self.client.httpclient.request( end_url(expected_action, query=expect_query, format=self.format), 'PUT', body=expect_body, headers=mox.ContainsKeyValue( 'X-Auth-Token', expected_auth_token)).AndReturn((MyResp(200), expect_body)) self.mox.ReplayAll() res_body = self.client.do_request('PUT', action, body=body, params=params) self.mox.VerifyAll() self.mox.UnsetStubs() # test response with unicode self.assertEqual(res_body, body)
def print_list(objs, fields, formatters=None, sortby_index=0, mixed_case_fields=None, field_labels=None, print_header=True, print_border=True, out=sys.stdout): """Print a list or objects as a table, one row per object. :param objs: iterable of :class:`Resource` :param fields: attributes that correspond to columns, in order :param formatters: `dict` of callables for field formatting :param sortby_index: index of the field for sorting table rows :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param field_labels: Labels to use in the heading of the table, default to fields. :param print_header: print table header. :param print_border: print table border. :param out: stream to write output to. """ formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] field_labels = field_labels or fields if len(field_labels) != len(fields): raise ValueError(_("Field labels list %(labels)s has different number " "of elements than fields list %(fields)s"), {"labels": field_labels, "fields": fields}) if sortby_index is None: kwargs = {} else: kwargs = {"sortby": field_labels[sortby_index]} pt = prettytable.PrettyTable(field_labels) pt.align = "l" for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: if field in mixed_case_fields: field_name = field.replace(" ", "_") else: field_name = field.lower().replace(" ", "_") data = getattr(o, field_name, "") row.append(data) pt.add_row(row) if not print_border or not print_header: pt.set_style(prettytable.PLAIN_COLUMNS) pt.left_padding_width = 0 pt.right_padding_width = 1 outstr = pt.get_string(header=print_header, border=print_border, **kwargs) + "\n" if six.PY3: out.write(encodeutils.safe_encode(outstr).decode()) else: out.write(encodeutils.safe_encode(outstr))
def print_list(objs, fields, formatters={}, sortby=None, align='c'): pt = prettytable.PrettyTable([f for f in fields], caching=False) pt.align = align for o in objs: row = [] for field in fields: if field in formatters: if callable(formatters[field]): row.append(formatters[field](o)) else: row.append(o.get(formatters[field], None)) else: data = o.get(field, None) if data is None or data == "": data = '-' data = six.text_type(data).replace("\r", "") row.append(data) pt.add_row(row) if sortby is not None and sortby in fields: result = encodeutils.safe_encode(pt.get_string(sortby=sortby)) else: result = encodeutils.safe_encode(pt.get_string()) if six.PY3: result = result.decode() print(result)
def print_dict(dct, dict_property="Property", wrap=0): """Print a `dict` as a table of two columns. :param dct: `dict` to print :param dict_property: name of the first column :param wrap: wrapping for the second column """ pt = prettytable.PrettyTable([dict_property, 'Value']) pt.align = 'l' for k, v in dct.items(): # convert dict to str to check length if isinstance(v, dict): v = six.text_type(v) if wrap > 0: v = textwrap.fill(six.text_type(v), wrap) elif wrap < 0: raise ValueError(_("Wrap argument should be a positive integer")) # if value has a newline, add in multiple rows # e.g. fault with stacktrace if v and isinstance(v, six.string_types) and r'\n' in v: lines = v.strip().split(r'\n') col1 = k for line in lines: pt.add_row([col1, line]) col1 = '' else: if v is None: v = '-' pt.add_row([k, v]) if six.PY3: print(encodeutils.safe_encode(pt.get_string()).decode()) else: print(encodeutils.safe_encode(pt.get_string()))
def print_dict(dct, dict_property="Property", wrap=0, dict_value='Value'): """Print a `dict` as a table of two columns. :param dct: `dict` to print :param dict_property: name of the first column :param wrap: wrapping for the second column :param dict_value: header label for the value (second) column """ pt = prettytable.PrettyTable([dict_property, dict_value]) pt.align = 'l' for k, v in sorted(dct.items()): # convert dict to str to check length if isinstance(v, dict): v = six.text_type(v) if wrap > 0: v = textwrap.fill(six.text_type(v), wrap) # if value has a newline, add in multiple rows # e.g. fault with stacktrace if v and isinstance(v, six.string_types) and r'\n' in v: lines = v.strip().split(r'\n') col1 = k for line in lines: pt.add_row([col1, line]) col1 = '' else: pt.add_row([k, v]) if six.PY2: print(encodeutils.safe_encode(pt.get_string())) else: print(encodeutils.safe_encode(pt.get_string()).decode())
def test_do_request_unicode(self): self.mox.StubOutWithMock(self.client.httpclient, "request") unicode_text = u"\u7f51\u7edc" # url with unicode action = u"/test" expected_action = action # query string with unicode params = {"test": unicode_text} expect_query = urlparse.urlencode(utils.safe_encode_dict(params)) # request body with unicode body = params expect_body = self.client.serialize(body) self.client.httpclient.auth_token = encodeutils.safe_encode(unicode_text) expected_auth_token = encodeutils.safe_encode(unicode_text) resp_headers = {"x-openstack-request-id": REQUEST_ID} self.client.httpclient.request( end_url(expected_action, query=expect_query, format=self.format), "PUT", body=expect_body, headers=mox.ContainsKeyValue("X-Auth-Token", expected_auth_token), ).AndReturn((MyResp(200, resp_headers), expect_body)) self.mox.ReplayAll() result = self.client.do_request("PUT", action, body=body, params=params) self.mox.VerifyAll() self.mox.UnsetStubs() # test response with unicode self.assertEqual(body, result)
def test_do_request_unicode(self): unicode_text = u'\u7f51\u7edc' # url with unicode action = u'/test' expected_action = action # query string with unicode params = {'test': unicode_text} expect_query = urlparse.urlencode(utils.safe_encode_dict(params)) # request body with unicode body = params expect_body = self.client.serialize(body) self.client.httpclient.auth_token = encodeutils.safe_encode( unicode_text) expected_auth_token = encodeutils.safe_encode(unicode_text) resp_headers = {'x-openstack-request-id': REQUEST_ID} resp = (MyResp(200, resp_headers), expect_body) with mock.patch.object(self.client.httpclient, "request", return_value=resp) as mock_request: result = self.client.do_request('PUT', action, body=body, params=params) mock_request.assert_called_once_with( end_url(expected_action, query=expect_query), 'PUT', body=expect_body, headers=ContainsKeyValue({'X-Auth-Token': expected_auth_token})) # test response with unicode self.assertEqual(body, result)
def _check_restore_vol_size(self, backup_base, restore_vol, restore_length, src_pool): """Ensure that the restore volume is the correct size. If the restore volume was bigger than the backup, the diff restore will shrink it to the size of the original backup so we need to post-process and resize it back to its expected size. """ with rbd_driver.RADOSClient(self, self._ceph_backup_pool) as client: adjust_size = 0 base_image = self.rbd.Image(client.ioctx, encodeutils.safe_encode(backup_base), read_only=True) try: if restore_length != base_image.size(): adjust_size = restore_length finally: base_image.close() if adjust_size: with rbd_driver.RADOSClient(self, src_pool) as client: restore_vol_encode = encodeutils.safe_encode(restore_vol) dest_image = self.rbd.Image(client.ioctx, restore_vol_encode) try: LOG.debug("Adjusting restore vol size") dest_image.resize(adjust_size) finally: dest_image.close()
def print_list(objs, fields, formatters={}, sortby_index=None): if sortby_index is None: sortby = None else: sortby = fields[sortby_index] mixed_case_fields = ['serverId'] pt = prettytable.PrettyTable([f for f in fields], caching=False) pt.align = 'l' for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: if field in mixed_case_fields: field_name = field.replace(' ', '_') else: field_name = field.lower().replace(' ', '_') data = getattr(o, field_name, '') if data is None: data = '-' row.append(data) pt.add_row(row) if sortby is not None: result = encodeutils.safe_encode(pt.get_string(sortby=sortby)) else: result = encodeutils.safe_encode(pt.get_string()) if six.PY3: result = result.decode() print(result)
def azure_auth(datestr, path): signature = hmac.new( encodeutils.safe_encode(secret_access_key), encodeutils.safe_encode(datestr + CRLF + path), hashlib.sha256 ).digest() return (encodeutils.safe_encode('SharedKeyLite %s:' % access_key_id) + base64.b64encode(signature))
def decrypt(msg, key): '''Decrypt message using provided key. :param msg: encrypted message :param key: key used to decrypt :returns: decrypted message string ''' f = fernet.Fernet(encodeutils.safe_encode(msg)) msg = f.decrypt(encodeutils.safe_encode(key)) return encodeutils.safe_decode(msg)
def encode_headers(headers): """Encodes headers. Note: This should be used right before sending anything out. :param headers: Headers to encode :returns: Dictionary with encoded headers' names and values """ return dict((encodeutils.safe_encode(h), encodeutils.safe_encode(v)) for h, v in six.iteritems(headers) if v is not None)
def _clone(self, volume, src_pool, src_image, src_snap): LOG.debug('cloning %(pool)s/%(img)s@%(snap)s to %(dst)s', dict(pool=src_pool, img=src_image, snap=src_snap, dst=volume['name'])) with RADOSClient(self, src_pool) as src_client: with RADOSClient(self) as dest_client: self.RBDProxy().clone(src_client.ioctx, encodeutils.safe_encode(src_image), encodeutils.safe_encode(src_snap), dest_client.ioctx, encodeutils.safe_encode(volume['name']), features=src_client.features)
def delete_snapshot(self, snapshot): """Deletes an rbd snapshot.""" # NOTE(dosaboy): this was broken by commit cbe1d5f. Ensure names are # utf-8 otherwise librbd will barf. volume_name = encodeutils.safe_encode(snapshot['volume_name']) snap_name = encodeutils.safe_encode(snapshot['name']) with RBDVolumeProxy(self, volume_name) as volume: try: volume.unprotect_snap(snap_name) except self.rbd.ImageBusy: raise exception.SnapshotIsBusy(snapshot_name=snap_name) volume.remove_snap(snap_name)
def take_action(self, args): test_cases = [] if args.playbook is not None: playbooks = args.playbook results = (models.TaskResult().query .join(models.Task) .filter(models.TaskResult.task_id == models.Task.id) .filter(models.Task.playbook_id.in_(playbooks))) else: results = models.TaskResult().query.all() for result in results: task_name = result.task.name if not task_name: task_name = result.task.action additional_results = { 'host': result.host.name, 'playbook_path': result.task.playbook.path } result_str = jsonutils.dumps(additional_results) test_path = \ u'{playbook_file}.{play_name}'.format( playbook_file=os.path.basename(result.task.playbook.path), play_name=result.task.play.name) test_case = TestCase( name=task_name, classname=test_path, elapsed_sec=result.duration.seconds, stdout=result_str) if result.status == 'skipped': test_case.add_skipped_info(message=result.result) elif ((result.status in ('failed', 'unreachable') and result.ignore_errors is False and 'EXPECTED FAILURE' not in task_name and 'TOGGLE RESULT' not in task_name) or (result.status == 'ok' and 'TOGGLE RESULT' in task_name)): test_case.add_failure_info(message=result.result) test_cases.append(test_case) test_suite = TestSuite('Ansible Tasks', test_cases) # TODO: junit_xml doesn't order the TestCase parameters. # This makes it so the order of the parameters for the same exact # TestCase is not guaranteed to be the same and thus results in a # different stdout (or file). This is easily reproducible on Py3. xml_string = six.text_type(test_suite.to_xml_string([test_suite])) if args.output_file == '-': if six.PY2: sys.stdout.write(encodeutils.safe_encode(xml_string)) else: sys.stdout.buffer.write(encodeutils.safe_encode(xml_string)) else: with open(args.output_file, 'wb') as f: f.write(encodeutils.safe_encode(xml_string))
def list(self, filters = None, page_size = None, marker = None, sort_key = None, sort_dir = None, **kwargs): """Retrieve a listing of Namespace objects :param page_size: Number of items to request in each paginated request :param limit: Use to request a specific page size. Expect a response to a limited request to return between zero and limit items. :param marker: Specifies the namespace of the last-seen namespace. The typical pattern of limit and marker is to make an initial limited request and then to use the last namespace from the response as the marker parameter in a subsequent limited request. :param sort_key: The field to sort on (for example, 'created_at') :param sort_dir: The direction to sort ('asc' or 'desc') :returns generator over list of Namespaces """ filters = {} if filters is None else filters if page_size: try: int(page_size) except: raise ValueError('limit must be an integer') filters['limit'] = page_size else: filters['limit'] = DEFAULT_PAGE_SIZE if marker: filters['marker'] = marker if sort_key is not None: if sort_key in SORT_KEY_VALUES: filters['sort_key'] = sort_key else: raise ValueError('sort_key must be one of the following: %s.' % ', '.join(SORT_KEY_VALUES)) if sort_dir is not None: if sort_dir in SORT_DIR_VALUES: filters['sort_dir'] = sort_dir else: raise ValueError('sort_dir must be one of the following: %s.' % ', '.join(SORT_DIR_VALUES)) for param, value in six.iteritems(filters): if isinstance(value, list): filters[param] = encodeutils.safe_encode(','.join(value)) elif isinstance(value, six.string_types): filters[param] = encodeutils.safe_encode(value) url = '/metadefs/namespaces?%s' % parse.urlencode(filters) return self._get(url, **kwargs)
def test_safe_encode_same_encoding_different_cases(self): with mock.patch.object(encodeutils, 'safe_decode', mock.Mock()): utf8 = encodeutils.safe_encode( six.u('foo\xf1bar'), encoding='utf-8') self.assertEqual( encodeutils.safe_encode(utf8, 'UTF-8', 'utf-8'), encodeutils.safe_encode(utf8, 'utf-8', 'UTF-8'), ) self.assertEqual( encodeutils.safe_encode(utf8, 'UTF-8', 'utf-8'), encodeutils.safe_encode(utf8, 'utf-8', 'utf-8'), ) encodeutils.safe_decode.assert_has_calls([])
def print_list(objs, fields, formatters=None, sortby_index=0, mixed_case_fields=None, field_labels=None): """Print a list of objects or dict as a table, one row per object or dict. :param objs: iterable of :class:`Resource` :param fields: attributes that correspond to columns, in order :param formatters: `dict` of callables for field formatting :param sortby_index: index of the field for sorting table rows :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param field_labels: Labels to use in the heading of the table, default to fields. """ formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] field_labels = field_labels or fields if len(field_labels) != len(fields): raise ValueError( _("Field labels list %(labels)s has different number " "of elements than fields list %(fields)s"), {"labels": field_labels, "fields": fields}, ) if sortby_index is None: kwargs = {} else: kwargs = {"sortby": field_labels[sortby_index]} pt = prettytable.PrettyTable(field_labels) pt.align = "l" for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: if field in mixed_case_fields: field_name = field.replace(" ", "_") else: field_name = field.lower().replace(" ", "_") if isinstance(o, dict): data = o.get(field_name, "") else: data = getattr(o, field_name, "") row.append(data) pt.add_row(row) if six.PY3: print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode()) else: print(encodeutils.safe_encode(pt.get_string(**kwargs)))
def main(): try: argv = [encodeutils.safe_decode(a) for a in sys.argv[1:]] OpenStackComputeShell().main(argv) except Exception as e: logger.debug(e, exc_info=1) details = {'name': encodeutils.safe_encode(e.__class__.__name__), 'msg': encodeutils.safe_encode(six.text_type(e))} print("ERROR (%(name)s): %(msg)s" % details, file=sys.stderr) sys.exit(1) except KeyboardInterrupt: print("... terminating nova client", file=sys.stderr) sys.exit(130)
def __init__(self, client, name, snapshot=None, read_only=False): if snapshot is not None: snapshot = encodeutils.safe_encode(snapshot) try: self.image = client.rbd.Image(client.ioctx, encodeutils.safe_encode(name), snapshot=snapshot, read_only=read_only) except client.rbd.Error: LOG.exception(_LE("error opening rbd image %s"), name) client.disconnect() raise self.client = client
def __str__(self): '''Return a string representation of the parameter.''' value = self.value() if self.hidden(): return '******' else: return encodeutils.safe_encode(six.text_type(value))
def print_dict(d, dict_property="Property", dict_value="Value", wrap=0): pt = prettytable.PrettyTable([dict_property, dict_value], caching=False) pt.align = 'l' for k, v in sorted(d.items()): # convert dict to str to check length if isinstance(v, (dict, list)): v = jsonutils.dumps(v) if wrap > 0: v = textwrap.fill(six.text_type(v), wrap) # if value has a newline, add in multiple rows # e.g. fault with stacktrace if v and isinstance(v, six.string_types) and (r'\n' in v or '\r' in v): # '\r' would break the table, so remove it. if '\r' in v: v = v.replace('\r', '') lines = v.strip().split(r'\n') col1 = k for line in lines: pt.add_row([col1, line]) col1 = '' else: if v is None: v = '-' pt.add_row([k, v]) result = encodeutils.safe_encode(pt.get_string()) if six.PY3: result = result.decode() print(result)
def __call__(self, req): """Respond to a request for all OpenStack API versions.""" version_objs = [ { "id": "1.0", "status": "CURRENT", "updated": "2016-01-18T00:00:00Z", "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.clustering-v1+json" } ], "links": [ { "rel": "self", "href": "/v1/" } ], "min_version": _MIN_API_VERSION, "max_version": _MAX_API_VERSION, }] body = jsonutils.dumps(dict(versions=version_objs)) response = webob.Response(request=req, status=http_client.MULTIPLE_CHOICES, content_type='application/json') response.body = encodeutils.safe_encode(body) return response
def _connect_to_rados(self, pool=None): LOG.debug("opening connection to ceph cluster (timeout=%s)." % (self.configuration.rados_connect_timeout)) client = self.rados.Rados(rados_id=self.configuration.rbd_user, conffile=self.configuration.rbd_ceph_conf) if pool is not None: pool = encodeutils.safe_encode(pool) else: pool = self.configuration.rbd_pool try: if self.configuration.rados_connect_timeout >= 0: client.connect(timeout= self.configuration.rados_connect_timeout) else: client.connect() ioctx = client.open_ioctx(pool) return client, ioctx except self.rados.Error: msg = _("Error connecting to ceph cluster.") LOG.exception(msg) # shutdown cannot raise an exception client.shutdown() raise exception.VolumeBackendAPIException(data=msg)
def request_url(self, request, proxies): # NOTE(flaper87): Make sure the url is encoded, otherwise # python's standard httplib will fail with a TypeError. url = super(HTTPSAdapter, self).request_url(request, proxies) if six.PY2: url = encodeutils.safe_encode(url) return url
def _construct_url(self, action, params=None): """ Create a URL object we can use to pass to _do_request(). """ action = urlparse.quote(action) path = '/'.join([self.doc_root or '', action.lstrip('/')]) scheme = "https" if self.use_ssl else "http" netloc = "%s:%d" % (self.host, self.port) if isinstance(params, dict): for (key, value) in list(params.items()): if value is None: del params[key] continue if not isinstance(value, six.string_types): value = str(value) params[key] = encodeutils.safe_encode(value) query = urlparse.urlencode(params) else: query = None url = urlparse.ParseResult(scheme, netloc, path, '', query, '') log_msg = _("Constructed URL: %s") LOG.debug(log_msg, url.geturl()) return url
def list(self, stack_id, resource_name=None, **kwargs): """Get a list of events. :param stack_id: ID of stack the events belong to :param resource_name: Optional name of resources to filter events by :rtype: list of :class:`Event` """ params = {} if 'filters' in kwargs: filters = kwargs.pop('filters') params.update(filters) for key, value in kwargs.items(): if value: params[key] = value if resource_name is None: url = '/stacks/%s/events' % stack_id else: stack_id = self._resolve_stack_id(stack_id) url = '/stacks/%s/resources/%s/events' % ( parse.quote(stack_id), parse.quote(encodeutils.safe_encode(resource_name))) if params: # convert to a sorted dict for python3 predictible order params = collections.OrderedDict(sorted(params.items())) url += '?%s' % parse.urlencode(params, True) return self._list(url, 'events')
def create_snapshot(self, snapshot): """Creates an rbd snapshot.""" with RBDVolumeProxy(self, snapshot['volume_name']) as volume: snap = encodeutils.safe_encode(snapshot['name']) volume.create_snap(snap) if self._supports_layering(): volume.protect_snap(snap)
def _connect_to_rados(self, pool=None): LOG.debug("opening connection to ceph cluster (timeout=%s).", self.configuration.rados_connect_timeout) # NOTE (e0ne): rados is binding to C lbirary librados. # It blocks eventlet loop so we need to run it in a native # python thread. client = tpool.Proxy( self.rados.Rados( rados_id=self.configuration.rbd_user, clustername=self.configuration.rbd_cluster_name, conffile=self.configuration.rbd_ceph_conf)) if pool is not None: pool = encodeutils.safe_encode(pool) else: pool = self.configuration.rbd_pool try: if self.configuration.rados_connect_timeout >= 0: client.connect(timeout= self.configuration.rados_connect_timeout) else: client.connect() ioctx = client.open_ioctx(pool) return client, ioctx except self.rados.Error: msg = _("Error connecting to ceph cluster.") LOG.exception(msg) # shutdown cannot raise an exception client.shutdown() raise exception.VolumeBackendAPIException(data=msg)
def print_dict(d, dict_property="Property", wrap=0): pt = prettytable.PrettyTable([dict_property, 'Value'], print_empty=False) pt.align = 'l' for k, v in sorted(six.iteritems(d)): # convert dict to str to check length if isinstance(v, dict): v = jsonutils.dumps(v) # if value has a newline, add in multiple rows # e.g. fault with stacktrace if v and isinstance(v, six.string_types) and r'\n' in v: lines = v.strip().split(r'\n') col1 = k for line in lines: if wrap > 0: line = textwrap.fill(str(line), wrap) pt.add_row([col1, line]) col1 = '' else: if wrap > 0: v = textwrap.fill(str(v), wrap) pt.add_row([k, v]) encoded = encodeutils.safe_encode(pt.get_string()) # FIXME(gordc): https://bugs.launchpad.net/oslo-incubator/+bug/1370710 if six.PY3: encoded = encoded.decode() print(encoded)
def __unicode__(self): '''Return a human-readable string representation of the stack.''' text = 'Stack "%s" [%s]' % (self.name, self.id) return encodeutils.safe_encode(text)
def cryptography_decrypt_v1(value, encryption_key=None): encryption_key = get_valid_encryption_key(encryption_key, fix_length=True) encoded_key = base64.b64encode(encryption_key.encode('utf-8')) sym = fernet.Fernet(encoded_key) return sym.decrypt(encodeutils.safe_encode(value))
def _run(self, client, cmd, stdin=None, stdout=None, stderr=None, raise_on_error=True, timeout=3600, keep_stdin_open=False, pty=False): transport = client.get_transport() session = transport.open_session() if pty: session.get_pty() session.exec_command(cmd) start_time = time.time() # encode on transmit, decode on receive data_to_send = encodeutils.safe_encode("", incoming='utf-8') stderr_data = None # If we have data to be sent to stdin then `select' should also # check for stdin availability. if stdin and not stdin.closed: writes = [session] else: writes = [] while True: # Block until data can be read/write. e = select.select([session], writes, [session], 1)[2] if session.recv_ready(): data = encodeutils.safe_decode(session.recv(4096), 'utf-8') self.log.debug("stdout: %r", data) if stdout is not None: stdout.write(data) continue if session.recv_stderr_ready(): stderr_data = encodeutils.safe_decode( session.recv_stderr(4096), 'utf-8') self.log.debug("stderr: %r", stderr_data) if stderr is not None: stderr.write(stderr_data) continue if session.send_ready(): if stdin is not None and not stdin.closed: if not data_to_send: stdin_txt = stdin.read(4096) if stdin_txt is None: stdin_txt = '' data_to_send = encodeutils.safe_encode( stdin_txt, incoming='utf-8') if not data_to_send: # we may need to keep stdin open if not keep_stdin_open: stdin.close() session.shutdown_write() writes = [] if data_to_send: sent_bytes = session.send(data_to_send) # LOG.debug("sent: %s" % data_to_send[:sent_bytes]) data_to_send = data_to_send[sent_bytes:] if session.exit_status_ready(): break if timeout and (time.time() - timeout) > start_time: message = ( 'Timeout executing command %(cmd)s on host %(host)s' % { "cmd": cmd, "host": self.host }) raise exceptions.SSHTimeout(error_msg=message) if e: raise exceptions.SSHError(error_msg='Socket error') exit_status = session.recv_exit_status() if exit_status != 0 and raise_on_error: fmt = "Command '%(cmd)s' failed with exit_status %(status)d." details = fmt % {"cmd": cmd, "status": exit_status} if stderr_data: details += " Last stderr data: '%s'." % stderr_data raise exceptions.SSHError(error_msg=details) return exit_status
def _test_list_resources(self, resources, cmd, detail=False, tags=(), fields_1=(), fields_2=(), page_size=None, sort_key=(), sort_dir=(), response_contents=None, base_args=None, path=None, cmd_resources=None, parent_id=None, output_format=None, query=""): self.mox.StubOutWithMock(cmd, "get_client") self.mox.StubOutWithMock(self.client.httpclient, "request") cmd.get_client().MultipleTimes().AndReturn(self.client) if not cmd_resources: cmd_resources = resources if response_contents is None: contents = [ { self.id_field: 'myid1', }, { self.id_field: 'myid2', }, ] else: contents = response_contents reses = {resources: contents} resstr = self.client.serialize(reses) # url method body args = base_args if base_args is not None else [] if detail: args.append('-D') if fields_1: for field in fields_1: args.append('--fields') args.append(field) if tags: args.append('--') args.append("--tag") for tag in tags: args.append(tag) tag_query = urlparse.urlencode( {'tag': encodeutils.safe_encode(tag)}) if query: query += "&" + tag_query else: query = tag_query if (not tags) and fields_2: args.append('--') if fields_2: args.append("--fields") for field in fields_2: args.append(field) if detail: query = query and query + '&verbose=True' or 'verbose=True' for field in itertools.chain(fields_1, fields_2): if query: query += "&fields=" + field else: query = "fields=" + field if page_size: args.append("--page-size") args.append(str(page_size)) if query: query += "&limit=%s" % page_size else: query = "limit=%s" % page_size if sort_key: for key in sort_key: args.append('--sort-key') args.append(key) if query: query += '&' query += 'sort_key=%s' % key if sort_dir: len_diff = len(sort_key) - len(sort_dir) if len_diff > 0: sort_dir = tuple(sort_dir) + ('asc', ) * len_diff elif len_diff < 0: sort_dir = sort_dir[:len(sort_key)] for dir in sort_dir: args.append('--sort-dir') args.append(dir) if query: query += '&' query += 'sort_dir=%s' % dir if path is None: path = getattr(self.client, cmd_resources + "_path") if parent_id: path = path % parent_id if output_format: args.append('-f') args.append(output_format) self.client.httpclient.request( MyUrlComparator(end_url(path, query, format=self.format), self.client), 'GET', body=None, headers=mox.ContainsKeyValue('X-Auth-Token', TOKEN)).AndReturn( (MyResp(200), resstr)) self.mox.ReplayAll() cmd_parser = cmd.get_parser("list_" + cmd_resources) shell.run_command(cmd, cmd_parser, args) self.mox.VerifyAll() self.mox.UnsetStubs() _str = self.fake_stdout.make_string() if response_contents is None: self.assertIn('myid1', _str) return _str
def request_url(self, request, proxies): # NOTE(flaper87): Make sure the url is encoded, otherwise # python's standard httplib will fail with a TypeError. url = super(HTTPSAdapter, self).request_url(request, proxies) return encodeutils.safe_encode(url)
def _boot(self, resource_url, response_key, name, image, flavor, meta=None, files=None, userdata=None, reservation_id=None, return_raw=False, min_count=None, max_count=None, security_groups=None, key_name=None, availability_zone=None, block_device_mapping=None, block_device_mapping_v2=None, nics=None, scheduler_hints=None, config_drive=None, admin_pass=None, disk_config=None, **kwargs): """ Create (boot) a new server. """ body = {"server": { "name": name, "imageRef": str(base.getid(image)) if image else '', "flavorRef": str(base.getid(flavor)), }} if userdata: if hasattr(userdata, 'read'): userdata = userdata.read() # NOTE(melwitt): Text file data is converted to bytes prior to # base64 encoding. The utf-8 encoding will fail for binary files. if six.PY3: try: userdata = userdata.encode("utf-8") except AttributeError: # In python 3, 'bytes' object has no attribute 'encode' pass else: try: userdata = encodeutils.safe_encode(userdata) except UnicodeDecodeError: pass userdata_b64 = base64.b64encode(userdata).decode('utf-8') body["server"]["user_data"] = userdata_b64 if meta: body["server"]["metadata"] = meta if reservation_id: body["server"]["reservation_id"] = reservation_id if key_name: body["server"]["key_name"] = key_name if scheduler_hints: body['os:scheduler_hints'] = scheduler_hints if config_drive: body["server"]["config_drive"] = config_drive if admin_pass: body["server"]["adminPass"] = admin_pass if not min_count: min_count = 1 if not max_count: max_count = min_count body["server"]["min_count"] = min_count body["server"]["max_count"] = max_count if security_groups: body["server"]["security_groups"] = [{'name': sg} for sg in security_groups] # Files are a slight bit tricky. They're passed in a "personality" # list to the POST. Each item is a dict giving a file name and the # base64-encoded contents of the file. We want to allow passing # either an open file *or* some contents as files here. if files: personality = body['server']['personality'] = [] for filepath, file_or_string in sorted(files.items(), key=lambda x: x[0]): if hasattr(file_or_string, 'read'): data = file_or_string.read() else: data = file_or_string if six.PY3 and isinstance(data, str): data = data.encode('utf-8') cont = base64.b64encode(data).decode('utf-8') personality.append({ 'path': filepath, 'contents': cont, }) if availability_zone: body["server"]["availability_zone"] = availability_zone # Block device mappings are passed as a list of dictionaries if block_device_mapping: body['server']['block_device_mapping'] = \ self._parse_block_device_mapping(block_device_mapping) elif block_device_mapping_v2: # Following logic can't be removed because it will leaves # a valid boot with both --image and --block-device # failed , see bug 1433609 for more info if image: bdm_dict = {'uuid': base.getid(image), 'source_type': 'image', 'destination_type': 'local', 'boot_index': 0, 'delete_on_termination': True} block_device_mapping_v2.insert(0, bdm_dict) body['server']['block_device_mapping_v2'] = block_device_mapping_v2 if nics is not None: # NOTE(tr3buchet): nics can be an empty list all_net_data = [] for nic_info in nics: net_data = {} # if value is empty string, do not send value in body if nic_info.get('net-id'): net_data['uuid'] = nic_info['net-id'] if (nic_info.get('v4-fixed-ip') and nic_info.get('v6-fixed-ip')): raise base.exceptions.CommandError(_( "Only one of 'v4-fixed-ip' and 'v6-fixed-ip' may be" " provided.")) elif nic_info.get('v4-fixed-ip'): net_data['fixed_ip'] = nic_info['v4-fixed-ip'] elif nic_info.get('v6-fixed-ip'): net_data['fixed_ip'] = nic_info['v6-fixed-ip'] if nic_info.get('port-id'): net_data['port'] = nic_info['port-id'] all_net_data.append(net_data) body['server']['networks'] = all_net_data if disk_config is not None: body['server']['OS-DCF:diskConfig'] = disk_config return self._create(resource_url, body, response_key, return_raw=return_raw, **kwargs)
def find_resource(manager, name_or_id, **find_args): """Look for resource in a given manager. Used as a helper for the _find_* methods. Example: .. code-block:: python def _find_hypervisor(cs, hypervisor): #Get a hypervisor by name or ID. return cliutils.find_resource(cs.hypervisors, hypervisor) """ # first try to get entity as integer id try: return manager.get(int(name_or_id)) except (TypeError, ValueError, exceptions.NotFound): pass # now try to get entity as uuid try: if six.PY2: tmp_id = encodeutils.safe_encode(name_or_id) else: tmp_id = encodeutils.safe_decode(name_or_id) if uuidutils.is_uuid_like(tmp_id): return manager.get(tmp_id) except (TypeError, ValueError, exceptions.NotFound): pass # for str id which is not uuid if getattr(manager, 'is_alphanum_id_allowed', False): try: return manager.get(name_or_id) except exceptions.NotFound: pass try: try: return manager.find(human_id=name_or_id, **find_args) except exceptions.NotFound: pass # finally try to find entity by name try: resource = getattr(manager, 'resource_class', None) name_attr = resource.NAME_ATTR if resource else 'name' kwargs = {name_attr: name_or_id} kwargs.update(find_args) return manager.find(**kwargs) except exceptions.NotFound: msg = _("No %(name)s with a name or " "ID of '%(name_or_id)s' exists.") % \ { "name": manager.resource_class.__name__.lower(), "name_or_id": name_or_id } raise exceptions.CommandError(msg) except exceptions.NoUniqueMatch: msg = _("Multiple %(name)s matches found for " "'%(name_or_id)s', use an ID to be more specific.") % \ { "name": manager.resource_class.__name__.lower(), "name_or_id": name_or_id } raise exceptions.CommandError(msg)
def _process_stack(self, request, action, action_args, content_type, body, accept): """Implement the processing stack.""" # Get the implementing method try: meth = self.get_method(request, action, content_type, body) except (AttributeError, TypeError): return Fault(webob.exc.HTTPNotFound()) except KeyError as ex: msg = _("There is no such action: %s") % ex.args[0] return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) if body: msg = _("Action: '%(action)s', calling method: %(meth)s, body: " "%(body)s") % { 'action': action, 'body': six.text_type(body, 'utf-8'), 'meth': str(meth) } LOG.debug(strutils.mask_password(msg)) else: LOG.debug("Calling method '%(meth)s'", {'meth': str(meth)}) # Now, deserialize the request body... try: contents = self._get_request_content(body, request) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Update the action args action_args.update(contents) project_id = action_args.pop("project_id", None) context = request.environ.get('nova.context') if (context and project_id and (project_id != context.project_id)): msg = _("Malformed request URL: URL's project_id '%(project_id)s'" " doesn't match Context's project_id" " '%(context_project_id)s'") % \ {'project_id': project_id, 'context_project_id': context.project_id} return Fault(webob.exc.HTTPBadRequest(explanation=msg)) response = None try: with ResourceExceptionHandler(): action_result = self.dispatch(meth, request, action_args) except Fault as ex: response = ex if not response: # No exceptions; convert action_result into a # ResponseObject resp_obj = None if type(action_result) is dict or action_result is None: resp_obj = ResponseObject(action_result) elif isinstance(action_result, ResponseObject): resp_obj = action_result else: response = action_result # Run post-processing extensions if resp_obj: # Do a preserialize to set up the response object if hasattr(meth, 'wsgi_code'): resp_obj._default_code = meth.wsgi_code if resp_obj and not response: response = resp_obj.serialize(request, accept) if hasattr(response, 'headers'): for hdr, val in list(response.headers.items()): if not isinstance(val, six.text_type): val = six.text_type(val) if six.PY2: # In Py2.X Headers must be UTF-8 encoded string response.headers[hdr] = encodeutils.safe_encode(val) else: # In Py3.X Headers must be a string response.headers[hdr] = encodeutils.safe_decode( encodeutils.safe_encode(val)) if not request.api_version_request.is_null(): response.headers[API_VERSION_REQUEST_HEADER] = \ 'compute ' + request.api_version_request.get_string() response.headers[LEGACY_API_VERSION_REQUEST_HEADER] = \ request.api_version_request.get_string() response.headers.add('Vary', API_VERSION_REQUEST_HEADER) response.headers.add('Vary', LEGACY_API_VERSION_REQUEST_HEADER) return response
def _safe_encode_without_obj(data): if isinstance(data, str): return encodeutils.safe_encode(data) return data
def _encode_headers(headers): return { encodeutils.safe_encode(h): encodeutils.safe_encode(v) for h, v in six.iteritems(headers) if v is not None }
def print_list(objs, fields, formatters=None, sortby_index=0, mixed_case_fields=None, field_labels=None, table_label=None, print_header=True, print_border=True, out=sys.stdout): """Print a list or objects as a table, one row per object. :param objs: iterable of :class:`Resource` :param fields: attributes that correspond to columns, in order :param formatters: `dict` of callables for field formatting :param sortby_index: index of the field for sorting table rows :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param field_labels: Labels to use in the heading of the table, default to fields. :param table_label: Label to use as header for the whole table. :param print_header: print table header. :param print_border: print table border. :param out: stream to write output to. """ formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] field_labels = field_labels or fields if len(field_labels) != len(fields): raise ValueError( _("Field labels list %(labels)s has different number " "of elements than fields list %(fields)s"), { "labels": field_labels, "fields": fields }) if sortby_index is None: kwargs = {} else: kwargs = {"sortby": field_labels[sortby_index]} pt = prettytable.PrettyTable(field_labels) pt.align = "l" for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: if field in mixed_case_fields: field_name = field.replace(" ", "_") else: field_name = field.lower().replace(" ", "_") data = getattr(o, field_name, "") row.append(data) pt.add_row(row) if not print_border or not print_header: pt.set_style(prettytable.PLAIN_COLUMNS) pt.left_padding_width = 0 pt.right_padding_width = 1 table_body = pt.get_string( header=print_header, border=print_border, **kwargs) + "\n" table_header = "" if table_label: table_width = table_body.index("\n") table_header = make_table_header(table_label, table_width) table_header += "\n" if six.PY3: if table_header: out.write(encodeutils.safe_encode(table_header).decode()) out.write(encodeutils.safe_encode(table_body).decode()) else: if table_header: out.write(encodeutils.safe_encode(table_header)) out.write(encodeutils.safe_encode(table_body))
def create_cloned_volume(self, volume, src_vref): """Create a cloned volume from another volume. Since we are cloning from a volume and not a snapshot, we must first create a snapshot of the source volume. The user has the option to limit how long a volume's clone chain can be by setting rbd_max_clone_depth. If a clone is made of another clone and that clone has rbd_max_clone_depth clones behind it, the source volume will be flattened. """ src_name = encodeutils.safe_encode(src_vref['name']) dest_name = encodeutils.safe_encode(volume['name']) flatten_parent = False # Do full copy if requested if CONF.rbd_max_clone_depth <= 0: with RBDVolumeProxy(self, src_name, read_only=True) as vol: vol.copy(vol.ioctx, dest_name) return # Otherwise do COW clone. with RADOSClient(self) as client: depth = self._get_clone_depth(client, src_name) # If source volume is a clone and rbd_max_clone_depth reached, # flatten the source before cloning. Zero rbd_max_clone_depth means # infinite is allowed. if depth == CONF.rbd_max_clone_depth: LOG.debug("maximum clone depth (%d) has been reached - " "flattening source volume" % (CONF.rbd_max_clone_depth)) flatten_parent = True src_volume = self.rbd.Image(client.ioctx, src_name) try: # First flatten source volume if required. if flatten_parent: _pool, parent, snap = self._get_clone_info( src_volume, src_name) # Flatten source volume LOG.debug("flattening source volume %s" % (src_name)) src_volume.flatten() # Delete parent clone snap parent_volume = self.rbd.Image(client.ioctx, parent) try: parent_volume.unprotect_snap(snap) parent_volume.remove_snap(snap) finally: parent_volume.close() # Create new snapshot of source volume clone_snap = "%s.clone_snap" % dest_name LOG.debug("creating snapshot='%s'" % (clone_snap)) src_volume.create_snap(clone_snap) src_volume.protect_snap(clone_snap) except Exception as exc: # Only close if exception since we still need it. src_volume.close() raise exc # Now clone source volume snapshot try: LOG.debug("cloning '%(src_vol)s@%(src_snap)s' to " "'%(dest)s'" % { 'src_vol': src_name, 'src_snap': clone_snap, 'dest': dest_name }) self.RBDProxy().clone(client.ioctx, src_name, clone_snap, client.ioctx, dest_name, features=client.features) except Exception as exc: src_volume.unprotect_snap(clone_snap) src_volume.remove_snap(clone_snap) raise exc finally: src_volume.close() if volume['size'] != src_vref['size']: LOG.debug( "resize volume '%(dst_vol)s' from %(src_size)d to " "%(dst_size)d" % { 'dst_vol': volume['name'], 'src_size': src_vref['size'], 'dst_size': volume['size'] }) self._resize(volume) LOG.debug("clone created successfully")
def print_list(objs, fields, formatters=None, sortby_index=0, mixed_case_fields=None, field_labels=None, json_flag=False): """Print a list of objects or dict as a table, one row per object or dict. :param objs: iterable of :class:`Resource` :param fields: attributes that correspond to columns, in order :param formatters: `dict` of callables for field formatting :param sortby_index: index of the field for sorting table rows :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param field_labels: Labels to use in the heading of the table, default to fields. :param json_flag: print the list as JSON instead of table """ def _get_name_and_data(field): if field in formatters: # The value of the field has to be modified. # For example, it can be used to add extra fields. return (field, formatters[field](o)) field_name = field.replace(' ', '_') if field not in mixed_case_fields: field_name = field.lower() if isinstance(o, dict): data = o.get(field_name, '') else: data = getattr(o, field_name, '') return (field_name, data) formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] field_labels = field_labels or fields if len(field_labels) != len(fields): raise ValueError(_("Field labels list %(labels)s has different number " "of elements than fields list %(fields)s"), {'labels': field_labels, 'fields': fields}) if sortby_index is None: kwargs = {} else: kwargs = {'sortby': field_labels[sortby_index]} pt = prettytable.PrettyTable(field_labels) pt.align = 'l' json_array = [] for o in objs: row = [] for field in fields: row.append(_get_name_and_data(field)) if json_flag: json_array.append(dict(row)) else: pt.add_row([r[1] for r in row]) if json_flag: print(json.dumps(json_array, indent=4, separators=(',', ': '))) elif six.PY3: print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode()) else: print(encodeutils.safe_encode(pt.get_string(**kwargs)))
def create_snapshot(self, snapshot): """Creates an rbd snapshot.""" with RBDVolumeProxy(self, snapshot['volume_name']) as volume: snap = encodeutils.safe_encode(snapshot['name']) volume.create_snap(snap) volume.protect_snap(snap)
def _boot(self, resource_url, response_key, name, image, flavor, meta=None, files=None, userdata=None, return_raw=False, key_name=None, availability_zone=None, nics=None, admin_pass=None, disk_config=None, **kwargs): """ Create (boot) a new baremetal server. """ body = { "server": { "name": name, # "imageRef": str(base.getid(image)) if image else '', "flavorRef": str(base.getid(flavor)), } } image = str(base.getid(image)) if image: body['server'].update({'imageRef': image}) if userdata: if os.path.exists(userdata): with open(userdata, "r") as fuserdata: userdata = fuserdata.read() if six.PY3: userdata = userdata.encode("utf-8") else: userdata = encodeutils.safe_encode(userdata) userdata_b64 = base64.b64encode(userdata).decode('utf-8') body["server"]["user_data"] = userdata_b64 if meta: body["server"]["metadata"] = meta # if reservation_id: # body["server"]["reservation_id"] = reservation_id if key_name: body["server"]["key_name"] = key_name # if scheduler_hints: # body['os:scheduler_hints'] = scheduler_hints # if config_drive: # body["server"]["config_drive"] = config_drive if admin_pass: body["server"]["adminPass"] = admin_pass # if not min_count: # min_count = 1 # if not max_count: # max_count = min_count # body["server"]["min_count"] = min_count # body["server"]["max_count"] = max_count # if security_groups: # body["server"]["security_groups"] = [{'name': sg} # for sg in security_groups] # Files are a slight bit tricky. They're passed in a "personality" # list to the POST. Each item is a dict giving a file name and the # base64-encoded contents of the file. We want to allow passing # either an open file *or* some contents as files here. if files: personality = body['server']['personality'] = [] for filepath, file_or_string in sorted(files.items(), key=lambda x: x[0]): if hasattr(file_or_string, 'read'): data = file_or_string.read() else: data = file_or_string if six.PY3 and isinstance(data, str): data = data.encode('utf-8') cont = base64.b64encode(data).decode('utf-8') personality.append({ 'path': filepath, 'contents': cont, }) if availability_zone: body["server"]["availability_zone"] = availability_zone # Block device mappings are passed as a list of dictionaries # if block_device_mapping: # body['server']['block_device_mapping'] = \ # self._parse_block_device_mapping(block_device_mapping) # elif block_device_mapping_v2: # body['server']['block_device_mapping_v2'] = block_device_mapping_v2 # if nics is not None: # # NOTE(tr3buchet): nics can be an empty list # all_net_data = [] # for nic_info in nics: # net_data = {} # # if value is empty string, do not send value in body # if nic_info.get('net-id'): # net_data['uuid'] = nic_info['net-id'] # if (nic_info.get('v4-fixed-ip') and # nic_info.get('v6-fixed-ip')): # raise base.exceptions.CommandError(_( # "Only one of 'v4-fixed-ip' and 'v6-fixed-ip' may be" # " provided.")) # elif nic_info.get('v4-fixed-ip'): # net_data['fixed_ip'] = nic_info['v4-fixed-ip'] # elif nic_info.get('v6-fixed-ip'): # net_data['fixed_ip'] = nic_info['v6-fixed-ip'] # if nic_info.get('port-id'): # net_data['port'] = nic_info['port-id'] # all_net_data.append(net_data) # body['server']['networks'] = all_net_data if nics is not None: body['server']['networks'] = nics if disk_config is not None: disk_config_dict = json.loads(disk_config) # body['server']['OS-DCF:diskConfig'] = disk_config for k, v in disk_config_dict.items(): body['server'][k] = v return self._create(resource_url, body, response_key, return_raw=return_raw, **kwargs)
def _print(pt, order): if sys.version_info >= (3, 0): print(pt.get_string(sortby=order)) else: print(encodeutils.safe_encode(pt.get_string(sortby=order)))
def delete_volume(self, volume): """Deletes a logical volume.""" # NOTE(dosaboy): this was broken by commit cbe1d5f. Ensure names are # utf-8 otherwise librbd will barf. volume_name = encodeutils.safe_encode(volume['name']) with RADOSClient(self) as client: try: rbd_image = self.rbd.Image(client.ioctx, volume_name) except self.rbd.ImageNotFound: LOG.info( _LI("volume %s no longer exists in backend") % (volume_name)) return clone_snap = None parent = None # Ensure any backup snapshots are deleted self._delete_backup_snaps(rbd_image) # If the volume has non-clone snapshots this delete is expected to # raise VolumeIsBusy so do so straight away. try: snaps = rbd_image.list_snaps() for snap in snaps: if snap['name'].endswith('.clone_snap'): LOG.debug("volume has clone snapshot(s)") # We grab one of these and use it when fetching parent # info in case the volume has been flattened. clone_snap = snap['name'] break raise exception.VolumeIsBusy(volume_name=volume_name) # Determine if this volume is itself a clone _pool, parent, parent_snap = self._get_clone_info( rbd_image, volume_name, clone_snap) finally: rbd_image.close() if clone_snap is None: LOG.debug("deleting rbd volume %s" % (volume_name)) try: self.RBDProxy().remove(client.ioctx, volume_name) except self.rbd.ImageBusy: msg = (_("ImageBusy error raised while deleting rbd " "volume. This may have been caused by a " "connection from a client that has crashed and, " "if so, may be resolved by retrying the delete " "after 30 seconds has elapsed.")) LOG.warn(msg) # Now raise this so that volume stays available so that we # delete can be retried. raise exception.VolumeIsBusy(msg, volume_name=volume_name) except self.rbd.ImageNotFound: msg = (_LI("RBD volume %s not found, allowing delete " "operation to proceed.") % volume_name) LOG.info(msg) return # If it is a clone, walk back up the parent chain deleting # references. if parent: LOG.debug("volume is a clone so cleaning references") self._delete_clone_parent_refs(client, parent, parent_snap) else: # If the volume has copy-on-write clones we will not be able to # delete it. Instead we will keep it as a silent volume which # will be deleted when it's snapshot and clones are deleted. new_name = "%s.deleted" % (volume_name) self.RBDProxy().rename(client.ioctx, volume_name, new_name)
def _safe_encode_without_obj(data): if isinstance(data, six.string_types): return encodeutils.safe_encode(data) return data
def __init__(self, image, pool, user, conf): self.image = image self.pool = encodeutils.safe_encode(pool) self.user = encodeutils.safe_encode(user) self.conf = encodeutils.safe_encode(conf)
def print_dict(obj, fields=None, formatters=None, mixed_case_fields=False, normalize_field_names=False, property_label="Property", value_label="Value", table_label=None, print_header=True, print_border=True, wrap=0, out=sys.stdout): """Print dict as a table. :param obj: dict to print :param fields: `dict` of keys to print from d. Defaults to all keys :param formatters: `dict` of callables for field formatting :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param normalize_field_names: If True, field names will be transformed, e.g. "Field Name" -> "field_name", otherwise they will be used unchanged. :param property_label: label of "property" column :param value_label: label of "value" column :param table_label: Label to use as header for the whole table. :param print_header: print table header. :param print_border: print table border. :param out: stream to write output to. """ formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] if not fields: if isinstance(obj, dict): fields = sorted(obj.keys()) else: fields = [ name for name in dir(obj) if (not name.startswith("_") and not callable(getattr(obj, name))) ] pt = prettytable.PrettyTable([property_label, value_label], caching=False) pt.align = "l" for field_name in fields: if field_name in formatters: data = formatters[field_name](obj) else: field = field_name if normalize_field_names: if field not in mixed_case_fields: field = field_name.lower() field = field.replace(" ", "_").replace("-", "_") if isinstance(obj, dict): data = obj.get(field, "") else: data = getattr(obj, field, "") # convert dict to str to check length if isinstance(data, (dict, list)): data = json.dumps(data) if wrap > 0: data = textwrap.fill(six.text_type(data), wrap) # if value has a newline, add in multiple rows # e.g. fault with stacktrace if (data and isinstance(data, six.string_types) and (r"\n" in data or "\r" in data)): # "\r" would break the table, so remove it. if "\r" in data: data = data.replace("\r", "") lines = data.strip().split(r"\n") col1 = field_name for line in lines: pt.add_row([col1, line]) col1 = "" else: if data is None: data = "-" pt.add_row([field_name, data]) table_body = pt.get_string(header=print_header, border=print_border) + "\n" table_header = "" if table_label: table_width = table_body.index("\n") table_header = make_table_header(table_label, table_width) table_header += "\n" if six.PY3: if table_header: out.write(encodeutils.safe_encode(table_header).decode()) out.write(encodeutils.safe_encode(table_body).decode()) else: if table_header: out.write(encodeutils.safe_encode(table_header)) out.write(encodeutils.safe_encode(table_body))
def test_do_create(self): cc = mock.Mock() nc = mock.Mock() profile = server.ServerProfile('t', self.spec) profile._computeclient = cc profile._networkclient = nc self._stubout_profile(profile, mock_image=True, mock_flavor=True, mock_keypair=True, mock_net=True) node_obj = mock.Mock(id='FAKE_NODE_ID', index=123, cluster_id='FAKE_CLUSTER_ID', data={ 'placement': { 'zone': 'AZ1', 'servergroup': 'SERVER_GROUP_1' } }) node_obj.name = 'TEST_SERVER' cc.server_create.return_value = mock.Mock(id='FAKE_ID') server_id = profile.do_create(node_obj) attrs = dict( adminPass='******', availability_zone='AZ1', config_drive=False, flavorRef='FAKE_FLAVOR_ID', imageRef='FAKE_IMAGE_ID', key_name='FAKE_KEYNAME', metadata={ 'cluster_id': 'FAKE_CLUSTER_ID', 'cluster_node_id': 'FAKE_NODE_ID', 'cluster_node_index': '123', 'meta var': 'meta val' }, name='FAKE_SERVER_NAME', networks=[{ 'fixed_ip': 'FAKE_IP', 'port': 'FAKE_PORT', 'uuid': 'FAKE_NETWORK_ID', }], personality=[{ 'path': '/etc/motd', 'contents': 'foo' }], scheduler_hints={ 'same_host': 'HOST_ID', 'group': 'SERVER_GROUP_1', }, security_groups=[{ 'name': 'HIGH_SECURITY_GROUP' }], user_data='FAKE_USER_DATA', ) ud = encodeutils.safe_encode('FAKE_USER_DATA') attrs['user_data'] = encodeutils.safe_decode(base64.b64encode(ud)) attrs['OS-DCF:diskConfig'] = 'AUTO' cc.server_create.assert_called_once_with(**attrs) self.assertEqual('FAKE_ID', server_id)
def _list(self, drafts, type_name=None, type_version=None, **kwargs): """Retrieve a listing of Image objects. :param page_size: Number of images to request in each paginated request. :returns: generator over list of artifacts. """ type_name, type_version = self._check_type_params( type_name, type_version) limit = kwargs.get('limit') page_size = kwargs.get('page_size') or self.default_page_size def paginate(url, page_size, limit=None): next_url = url while True: if limit and page_size > limit: next_url = next_url.replace("limit=%s" % page_size, "limit=%s" % limit) resp, body = self.http_client.get(next_url) for artifact in body['artifacts']: yield ArtifactType(**artifact) if limit: limit -= 1 if limit <= 0: raise StopIteration try: next_url = body['next'] except KeyError: return filters = kwargs.get('filters', {}) filters['limit'] = page_size url_params = [] for param, items in six.iteritems(filters): values = [items] if not isinstance(items, list) else items for value in values: if isinstance(value, six.string_types): value = encodeutils.safe_encode(value) url_params.append({param: value}) if drafts: url = '/v0.1/artifacts/%s/' \ 'v%s/drafts?' % (type_name, type_version) else: url = '/v0.1/artifacts/%s/v%s?' % (type_name, type_version) for param in url_params: url = '%s&%s' % (url, parse.urlencode(param)) if 'sort' in kwargs: url = '%s&sort=%s' % (url, self._validate_sort_param( kwargs['sort'])) for artifact in paginate(url, page_size, limit): yield artifact
def do_create(self, obj): """Create a server for the node object. :param obj: The node object for which a server will be created. """ kwargs = {} for key in self.KEYS: if self.properties[key] is not None: kwargs[key] = self.properties[key] image_ident = self.properties[self.IMAGE] if image_ident is not None: image = self._validate_image(obj, image_ident, 'create') kwargs.pop(self.IMAGE) kwargs['imageRef'] = image.id flavor_ident = self.properties[self.FLAVOR] flavor = self._validate_flavor(obj, flavor_ident, 'create') kwargs.pop(self.FLAVOR) kwargs['flavorRef'] = flavor.id keypair_name = self.properties[self.KEY_NAME] if keypair_name: keypair = self._validate_keypair(obj, keypair_name, 'create') kwargs['key_name'] = keypair.name kwargs['name'] = obj.name metadata = self._build_metadata(obj, {}) kwargs['metadata'] = metadata jj_vars = {} cluster_data = self._get_cluster_data(obj) kwargs['networks'] = [{'uuid': cluster_data[self.PRIVATE_NETWORK]}] # Get user_data parameters from metadata jj_vars['KUBETOKEN'] = cluster_data[self.KUBEADM_TOKEN] jj_vars['MASTER_FLOATINGIP'] = cluster_data[ self.KUBE_MASTER_FLOATINGIP] block_device_mapping_v2 = self.properties[self.BLOCK_DEVICE_MAPPING_V2] if block_device_mapping_v2 is not None: kwargs['block_device_mapping_v2'] = self._resolve_bdm( obj, block_device_mapping_v2, 'create') # user_data = self.properties[self.USER_DATA] user_data = base.loadScript('./scripts/master.sh') if user_data is not None: # Use jinja2 to replace variables defined in user_data try: jj_t = jinja2.Template(user_data) user_data = jj_t.render(**jj_vars) except (jinja2.exceptions.UndefinedError, ValueError) as ex: # TODO(anyone) Handle jinja2 error pass ud = encodeutils.safe_encode(user_data) kwargs['user_data'] = encodeutils.safe_decode(base64.b64encode(ud)) sgid = self._get_security_group(obj) kwargs['security_groups'] = [{'name': sgid}] server = None resource_id = None try: server = self.compute(obj).server_create(**kwargs) self.compute(obj).wait_for_server(server.id) server = self.compute(obj).server_get(server.id) self._update_master_ip(obj, server.addresses[''][0]['addr']) self._associate_floatingip(obj, server) LOG.info("Created master node: %s" % server.id) return server.id except exc.InternalError as ex: if server and server.id: resource_id = server.id raise exc.EResourceCreation(type='server', message=six.text_type(ex), resource_id=resource_id)
def exit(msg=''): if msg: print(encodeutils.safe_encode(msg), file=sys.stderr) sys.exit(1)
def take_action(self, args): # Setup where the output stream must go if args.output_file == '-': output_stream = sys.stdout else: output_stream = open(args.output_file, 'wb') # Create the output stream output = StreamResultToBytes(output_stream) # Create the test run output.startTestRun() if args.playbook is not None: playbooks = args.playbook results = (models.TaskResult().query.join(models.Task).filter( models.TaskResult.task_id == models.Task.id).filter( models.Task.playbook_id.in_(playbooks))) else: results = models.TaskResult().query.all() for result in results: # Generate a fixed length identifier for the task test_id = utils.generate_identifier(result) # Assign the test_status value if result.status in ('failed', 'unreachable'): if result.ignore_errors is False: test_status = 'xfail' else: test_status = 'fail' elif result.status == 'skipped': test_status = 'skip' else: test_status = 'success' # Determine the play file path if result.task.playbook and result.task.playbook.path: playbook_path = result.task.playbook.path else: playbook_path = '' # Determine the task file path if result.task.file and result.task.file.path: task_path = result.task.file.path else: task_path = '' # Assign the file_bytes value test_data = { 'host': result.host.name, 'playbook_id': result.task.playbook.id, 'playbook_path': playbook_path, 'play_name': result.task.play.name, 'task_action': result.task.action, 'task_action_lineno': result.task.lineno, 'task_id': result.task.id, 'task_name': result.task.name, 'task_path': task_path } file_bytes = encodeutils.safe_encode(jsonutils.dumps(test_data)) # Assign the start_time and stop_time value # The timestamp needs to be an epoch, so we need # to convert it. start_time = datetime.datetime.fromtimestamp( float(result.time_start.strftime('%s'))).replace( tzinfo=iso8601.UTC) end_time = datetime.datetime.fromtimestamp( float(result.time_end.strftime('%s'))).replace( tzinfo=iso8601.UTC) # Output the start of the event output.status(test_id=test_id, timestamp=start_time) # Output the end of the event output.status(test_id=test_id, test_status=test_status, test_tags=None, runnable=False, file_name=test_id, file_bytes=file_bytes, timestamp=end_time, eof=True, mime_type='text/plain; charset=UTF8') output.stopTestRun()
def list(self, **kwargs): """Retrieve a listing of Image objects. :param page_size: Number of images to request in each paginated request. :returns: generator over list of Images. """ ori_validate_fun = self.model.validate empty_fun = lambda *args, **kwargs: None limit = kwargs.get('limit') # NOTE(flaper87): Don't use `get('page_size', DEFAULT_SIZE)` otherwise, # it could be possible to send invalid data to the server by passing # page_size=None. page_size = kwargs.get('page_size') or DEFAULT_PAGE_SIZE def paginate(url, page_size, limit=None): next_url = url while True: if limit and page_size > limit: # NOTE(flaper87): Avoid requesting 2000 images when limit # is 1 next_url = next_url.replace("limit=%s" % page_size, "limit=%s" % limit) resp, body = self.http_client.get(next_url) for image in body['images']: # NOTE(bcwaldon): remove 'self' for now until we have # an elegant way to pass it into the model constructor # without conflict. image.pop('self', None) yield self.model(**image) # NOTE(zhiyan): In order to resolve the performance issue # of JSON schema validation for image listing case, we # don't validate each image entry but do it only on first # image entry for each page. self.model.validate = empty_fun if limit: limit -= 1 if limit <= 0: raise StopIteration # NOTE(zhiyan); Reset validation function. self.model.validate = ori_validate_fun try: next_url = body['next'] except KeyError: return filters = kwargs.get('filters', {}) # NOTE(flaper87): We paginate in the client, hence we use # the page_size as Glance's limit. filters['limit'] = page_size tags = filters.pop('tag', []) tags_url_params = [] for tag in tags: if isinstance(tag, six.string_types): tags_url_params.append({'tag': encodeutils.safe_encode(tag)}) for param, value in six.iteritems(filters): if isinstance(value, six.string_types): filters[param] = encodeutils.safe_encode(value) url = '/v2/images?%s' % parse.urlencode(filters) for param in tags_url_params: url = '%s&%s' % (url, parse.urlencode(param)) if 'sort' in kwargs: if 'sort_key' in kwargs or 'sort_dir' in kwargs: raise exc.HTTPBadRequest("The 'sort' argument is not supported" " with 'sort_key' or 'sort_dir'.") url = '%s&sort=%s' % (url, self._validate_sort_param( kwargs['sort'])) else: sort_dir = self._wrap(kwargs.get('sort_dir', [])) sort_key = self._wrap(kwargs.get('sort_key', [])) if len(sort_key) != len(sort_dir) and len(sort_dir) > 1: raise exc.HTTPBadRequest( "Unexpected number of sort directions: " "either provide a single sort direction or an equal " "number of sort keys and sort directions.") for key in sort_key: url = '%s&sort_key=%s' % (url, key) for dir in sort_dir: url = '%s&sort_dir=%s' % (url, dir) for image in paginate(url, page_size, limit): yield image
def list(self, **kwargs): """Retrieve a listing of Image objects. :param page_size: Number of images to request in each paginated request. :returns: generator over list of Images. """ limit = kwargs.get('limit') # NOTE(flaper87): Don't use `get('page_size', DEFAULT_SIZE)` otherwise, # it could be possible to send invalid data to the server by passing # page_size=None. page_size = kwargs.get('page_size') or DEFAULT_PAGE_SIZE def paginate(url, page_size, limit=None): next_url = url req_id_hdr = {} while True: if limit and page_size > limit: # NOTE(flaper87): Avoid requesting 2000 images when limit # is 1 next_url = next_url.replace("limit=%s" % page_size, "limit=%s" % limit) resp, body = self.http_client.get(next_url, headers=req_id_hdr) # NOTE(rsjethani): Store curent request id so that it can be # used in subsequent requests. Refer bug #1525259 req_id_hdr['x-openstack-request-id'] = \ utils._extract_request_id(resp) for image in body['images']: # NOTE(bcwaldon): remove 'self' for now until we have # an elegant way to pass it into the model constructor # without conflict. image.pop('self', None) # We do not validate the model when listing. # This prevents side-effects of injecting invalid # schema values via v1. yield self.unvalidated_model(**image), resp if limit: limit -= 1 if limit <= 0: raise StopIteration try: next_url = body['next'] except KeyError: return filters = kwargs.get('filters', {}) # NOTE(flaper87): We paginate in the client, hence we use # the page_size as Glance's limit. filters['limit'] = page_size tags = filters.pop('tag', []) tags_url_params = [] for tag in tags: if not isinstance(tag, six.string_types): raise exc.HTTPBadRequest("Invalid tag value %s" % tag) tags_url_params.append({'tag': encodeutils.safe_encode(tag)}) for param, value in filters.items(): if isinstance(value, six.string_types): filters[param] = encodeutils.safe_encode(value) url = '/v2/images?%s' % parse.urlencode(filters) for param in tags_url_params: url = '%s&%s' % (url, parse.urlencode(param)) if 'sort' in kwargs: if 'sort_key' in kwargs or 'sort_dir' in kwargs: raise exc.HTTPBadRequest("The 'sort' argument is not supported" " with 'sort_key' or 'sort_dir'.") url = '%s&sort=%s' % (url, self._validate_sort_param( kwargs['sort'])) else: sort_dir = self._wrap(kwargs.get('sort_dir', [])) sort_key = self._wrap(kwargs.get('sort_key', [])) if len(sort_key) != len(sort_dir) and len(sort_dir) > 1: raise exc.HTTPBadRequest( "Unexpected number of sort directions: " "either provide a single sort direction or an equal " "number of sort keys and sort directions.") for key in sort_key: url = '%s&sort_key=%s' % (url, key) for dir in sort_dir: url = '%s&sort_dir=%s' % (url, dir) if isinstance(kwargs.get('marker'), six.string_types): url = '%s&marker=%s' % (url, kwargs['marker']) for image, resp in paginate(url, page_size, limit): yield image, resp
def __init__(self, config): config_file = tempfile.NamedTemporaryFile(delete=False) config_file.write(encodeutils.safe_encode(json.dumps(config))) config_file.close() self.filename = config_file.name
def new_websocket_client(self): """Called after a new WebSocket connection has been established.""" # Reopen the eventlet hub to make sure we don't share an epoll # fd with parent and/or siblings, which would be bad from eventlet import hubs hubs.use_hub() # The nova expected behavior is to have token # passed to the method GET of the request parse = urlparse.urlparse(self.path) if parse.scheme not in ('http', 'https'): # From a bug in urlparse in Python < 2.7.4 we cannot support # special schemes (cf: http://bugs.python.org/issue9374) if sys.version_info < (2, 7, 4): raise exception.NovaException( _("We do not support scheme '%s' under Python < 2.7.4, " "please use http or https") % parse.scheme) query = parse.query token = urlparse.parse_qs(query).get("token", [""]).pop() if not token: # NoVNC uses it's own convention that forward token # from the request to a cookie header, we should check # also for this behavior hcookie = self.headers.get('cookie') if hcookie: cookie = Cookie.SimpleCookie() for hcookie_part in hcookie.split(';'): hcookie_part = hcookie_part.lstrip() try: cookie.load(hcookie_part) except Cookie.CookieError: # NOTE(stgleb): Do not print out cookie content # for security reasons. LOG.warning('Found malformed cookie') else: if 'token' in cookie: token = cookie['token'].value ctxt = context.get_admin_context() connect_info = self._get_connect_info(ctxt, token) # Verify Origin expected_origin_hostname = self.headers.get('Host') if ':' in expected_origin_hostname: e = expected_origin_hostname if '[' in e and ']' in e: expected_origin_hostname = e.split(']')[0][1:] else: expected_origin_hostname = e.split(':')[0] expected_origin_hostnames = CONF.console.allowed_origins expected_origin_hostnames.append(expected_origin_hostname) origin_url = self.headers.get('Origin') # missing origin header indicates non-browser client which is OK if origin_url is not None: origin = urlparse.urlparse(origin_url) origin_hostname = origin.hostname origin_scheme = origin.scheme if origin_hostname == '' or origin_scheme == '': detail = _("Origin header not valid.") raise exception.ValidationError(detail=detail) if origin_hostname not in expected_origin_hostnames: detail = _("Origin header does not match this host.") raise exception.ValidationError(detail=detail) if not self.verify_origin_proto(connect_info, origin_scheme): detail = _("Origin header protocol does not match this host.") raise exception.ValidationError(detail=detail) self.msg(_('connect info: %s'), str(connect_info)) host = connect_info['host'] port = int(connect_info['port']) # Connect to the target self.msg( _("connecting to: %(host)s:%(port)s") % { 'host': host, 'port': port }) tsock = self.socket(host, port, connect=True) # Handshake as necessary if connect_info.get('internal_access_path'): tsock.send( encodeutils.safe_encode("CONNECT %s HTTP/1.1\r\n\r\n" % connect_info['internal_access_path'])) end_token = "\r\n\r\n" while True: data = tsock.recv(4096, socket.MSG_PEEK) token_loc = data.find(end_token) if token_loc != -1: if data.split("\r\n")[0].find("200") == -1: raise exception.InvalidConnectionInfo() # remove the response from recv buffer tsock.recv(token_loc + len(end_token)) break if self.server.security_proxy is not None: tenant_sock = TenantSock(self) try: tsock = self.server.security_proxy.connect(tenant_sock, tsock) except exception.SecurityProxyNegotiationFailed: LOG.exception("Unable to perform security proxying, shutting " "down connection") tenant_sock.close() tsock.shutdown(socket.SHUT_RDWR) tsock.close() raise tenant_sock.finish_up() # Start proxying try: self.do_proxy(tsock) except Exception: if tsock: tsock.shutdown(socket.SHUT_RDWR) tsock.close() self.vmsg( _("%(host)s:%(port)s: " "Websocket client or target closed") % { 'host': host, 'port': port }) raise
def sendall(self, data): self.reqhandler.send_frames([encodeutils.safe_encode(data)])