def _submit_request(self, url_or_request): """Submits the given request, and handles the errors appropriately. Args: url_or_request (str or `urlib.request.Request`): the request to send. Returns: tuple of (int, str): The response status code and the json parsed body, or the error message. Raises: `CliException`: If any issues occur with the URL. """ try: result = urllib.urlopen(url_or_request) return (result.status, json.loads(result.read().decode())) except HTTPError as e: return (e.code, e.msg) except RemoteDisconnected as e: raise CliException(e) except URLError as e: raise CliException( ('Unable to connect to "{}": ' 'make sure URL is correct').format(self._base_url))
def _read_signer(key_filename): """Reads the given file as a hex key. Args: key_filename: The filename where the key is stored. If None, defaults to the default key for the current user. Returns: Signer: the signer Raises: CliException: If unable to read the file. """ filename = key_filename if filename is None: filename = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys', getpass.getuser() + '.priv') try: with open(filename, 'r') as key_file: signing_key = key_file.read().strip() except IOError as e: raise CliException('Unable to read key file: {}'.format(str(e))) try: private_key = Secp256k1PrivateKey.from_hex(signing_key) except ParseError as e: raise CliException('Unable to read key in file: {}'.format(str(e))) context = create_context('secp256k1') crypto_factory = CryptoFactory(context) return crypto_factory.new_signer(private_key)
def _get_data(self, path, **queries): url = self._base_url + path params = self._format_queries(queries) limit = None if "limit" in params: limit = params["limit"] while url: code, json_result = self._submit_request( url, params=params, ) if code == 404: raise CliException( '{}: There is no resource with the identifier "{}"'.format( self._base_url, path.split('/')[-1])) elif code != 200: raise CliException("{}: {} {}".format(self._base_url, code, json_result)) for item in json_result.get('data', []): yield item if limit: limit = limit - len(json_result.get('data', [])) if limit <= 0: break url = json_result['paging'].get('next', None)
def do_keygen(args): if args.key_name is not None: key_name = args.key_name else: key_name = getpass.getuser() if args.key_dir is not None: key_dir = args.key_dir if not os.path.exists(key_dir): raise CliException('no such directory: {}'.format(key_dir)) else: key_dir = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys') if not os.path.exists(key_dir): if not args.quiet: print('creating key directory: {}'.format(key_dir)) try: os.makedirs(key_dir) except IOError as e: raise CliException('IOError: {}'.format(str(e))) wif_filename = os.path.join(key_dir, key_name + '.wif') addr_filename = os.path.join(key_dir, key_name + '.addr') if not args.force: file_exists = False for filename in [wif_filename, addr_filename]: if os.path.exists(filename): file_exists = True print('file exists: {}'.format(filename), file=sys.stderr) if file_exists: raise CliException( 'files exist, rerun with --force to overwrite existing files') privkey = signing.generate_privkey() encoded = signing.encode_privkey(privkey) pubkey = signing.generate_pubkey(privkey) addr = signing.generate_identifier(pubkey) try: wif_exists = os.path.exists(wif_filename) with open(wif_filename, 'w') as wif_fd: if not args.quiet: if wif_exists: print('overwriting file: {}'.format(wif_filename)) else: print('writing file: {}'.format(wif_filename)) wif_fd.write(encoded) wif_fd.write('\n') addr_exists = os.path.exists(addr_filename) with open(addr_filename, 'w') as addr_fd: if not args.quiet: if addr_exists: print('overwriting file: {}'.format(addr_filename)) else: print('writing file: {}'.format(addr_filename)) addr_fd.write(addr) addr_fd.write('\n') except IOError as ioe: raise CliException('IOError: {}'.format(str(ioe)))
def _do_config_proposal_vote(args): """Executes the 'proposal vote' subcommand. Given a key file, a proposal id and a vote value, it generates a batch of sawtooth_config transactions in a BatchList instance. The BatchList is file or submitted to a validator. """ pubkey, signing_key = _read_signing_keys(args.key) rest_client = RestClient(args.url) proposals = _get_proposals(rest_client) proposal = None for candidate in proposals.candidates: if candidate.proposal_id == args.proposal_id: proposal = candidate break if proposal is None: raise CliException('No proposal exists with the given id') for vote_record in proposal.votes: if vote_record.public_key == pubkey: raise CliException( 'A vote has already been recorded with this signing key') txn = _create_vote_txn(pubkey, signing_key, args.proposal_id, proposal.proposal.setting, args.vote_value) batch = _create_batch(pubkey, signing_key, [txn]) batch_list = BatchList(batches=[batch]) rest_client.send_batches(batch_list)
def do_keygen(args): if args.key_name is not None: key_name = args.key_name else: key_name = getpass.getuser() if args.key_dir is not None: key_dir = args.key_dir if not os.path.exists(key_dir): raise CliException('no such directory: {}'.format(key_dir)) else: key_dir = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys') if not os.path.exists(key_dir): if not args.quiet: print('creating key directory: {}'.format(key_dir)) try: os.makedirs(key_dir) except IOError as e: raise CliException('IOError: {}'.format(str(e))) priv_filename = os.path.join(key_dir, key_name + '.priv') pub_filename = os.path.join(key_dir, key_name + '.pub') if not args.force: file_exists = False for filename in [priv_filename, pub_filename]: if os.path.exists(filename): file_exists = True print('file exists: {}'.format(filename), file=sys.stderr) if file_exists: raise CliException( 'files exist, rerun with --force to overwrite existing files') context = create_context('secp256k1') private_key = context.new_random_private_key() public_key = context.get_public_key(private_key) try: priv_exists = os.path.exists(priv_filename) with open(priv_filename, 'w') as priv_fd: if not args.quiet: if priv_exists: print('overwriting file: {}'.format(priv_filename)) else: print('writing file: {}'.format(priv_filename)) priv_fd.write(private_key.as_hex()) priv_fd.write('\n') pub_exists = os.path.exists(pub_filename) with open(pub_filename, 'w') as pub_fd: if not args.quiet: if pub_exists: print('overwriting file: {}'.format(pub_filename)) else: print('writing file: {}'.format(pub_filename)) pub_fd.write(public_key.as_hex()) pub_fd.write('\n') except IOError as ioe: raise CliException('IOError: {}'.format(str(ioe)))
def do_genesis(args, data_dir=None): """Given the command args, take an series of input files containing GenesisData, combine all the batches into one GenesisData, and output the result into a new file. """ if data_dir is None: data_dir = get_data_dir() if not os.path.exists(data_dir): raise CliException( "Data directory does not exist: {}".format(data_dir)) genesis_batches = [] for input_file in args.input_file: print('Processing {}...'.format(input_file)) input_data = BatchList() try: with open(input_file, 'rb') as in_file: input_data.ParseFromString(in_file.read()) except: raise CliException('Unable to read {}'.format(input_file)) genesis_batches += input_data.batches _validate_depedencies(genesis_batches) if args.output: genesis_file = args.output else: genesis_file = os.path.join(data_dir, 'genesis.batch') print('Generating {}'.format(genesis_file)) output_data = GenesisData(batches=genesis_batches) with open(genesis_file, 'wb') as out_file: out_file.write(output_data.SerializeToString())
def do_keygen(args): """Executes the key generation operation, given the parsed arguments. Args: args (:obj:`Namespace`): The parsed args. """ if args.key_name is not None: key_name = args.key_name else: key_name = 'validator' key_dir = get_key_dir() if not os.path.exists(key_dir): raise CliException("Key directory does not exist: {}".format(key_dir)) priv_filename = os.path.join(key_dir, key_name + '.priv') pub_filename = os.path.join(key_dir, key_name + '.pub') if not args.force: file_exists = False for filename in [priv_filename, pub_filename]: if os.path.exists(filename): file_exists = True print('file exists: {}'.format(filename), file=sys.stderr) if file_exists: raise CliException( 'files exist, rerun with --force to overwrite existing files') context = create_context('secp256k1') private_key = context.new_random_private_key() public_key = context.get_public_key(private_key) try: priv_exists = os.path.exists(priv_filename) with open(priv_filename, 'w') as priv_fd: if not args.quiet: if priv_exists: print('overwriting file: {}'.format(priv_filename)) else: print('writing file: {}'.format(priv_filename)) priv_fd.write(private_key.as_hex()) priv_fd.write('\n') pub_exists = os.path.exists(pub_filename) with open(pub_filename, 'w') as pub_fd: if not args.quiet: if pub_exists: print('overwriting file: {}'.format(pub_filename)) else: print('writing file: {}'.format(pub_filename)) pub_fd.write(public_key.as_hex()) pub_fd.write('\n') except IOError as ioe: raise CliException('IOError: {}'.format(str(ioe)))
def do_keygen(args): """Executes the key generation operation, given the parsed arguments. Args: args (:obj:`Namespace`): The parsed args. """ if args.key_name is not None: key_name = args.key_name else: key_name = 'validator' key_dir = get_key_dir() if not os.path.exists(key_dir): raise CliException("Key directory does not exist: {}".format(key_dir)) wif_filename = os.path.join(key_dir, key_name + '.wif') addr_filename = os.path.join(key_dir, key_name + '.addr') if not args.force: file_exists = False for filename in [wif_filename, addr_filename]: if os.path.exists(filename): file_exists = True print('file exists: {}'.format(filename), file=sys.stderr) if file_exists: raise CliException( 'files exist, rerun with --force to overwrite existing files') privkey = signing.generate_privkey() encoded = signing.encode_privkey(privkey) pubkey = signing.generate_pubkey(privkey) addr = signing.generate_identifier(pubkey) try: wif_exists = os.path.exists(wif_filename) with open(wif_filename, 'w') as wif_fd: if not args.quiet: if wif_exists: print('overwriting file: {}'.format(wif_filename)) else: print('writing file: {}'.format(wif_filename)) wif_fd.write(encoded) wif_fd.write('\n') addr_exists = os.path.exists(addr_filename) with open(addr_filename, 'w') as addr_fd: if not args.quiet: if addr_exists: print('overwriting file: {}'.format(addr_filename)) else: print('writing file: {}'.format(addr_filename)) addr_fd.write(addr) addr_fd.write('\n') except IOError as ioe: raise CliException('IOError: {}'.format(str(ioe)))
def _submit_request(self, url, params=None, data=None, headers=None, method="GET"): """Submits the given request, and handles the errors appropriately. Args: url (str): the request to send. params (dict): params to be passed along to get/post data (bytes): the data to include in the request. headers (dict): the headers to include in the request. method (str): the method to use for the request, "POST" or "GET". Returns: tuple of (int, str): The response status code and the json parsed body, or the error message. Raises: `CliException`: If any issues occur with the URL. """ if headers is None: headers = {} if self._auth_header is not None: headers['Authorization'] = self._auth_header try: if method == 'POST': result = requests.post(url, params=params, data=data, headers=headers) elif method == 'GET': result = requests.get(url, params=params, data=data, headers=headers) result.raise_for_status() return (result.status_code, result.json()) except requests.exceptions.HTTPError as e: return (e.response.status_code, e.response.reason) except RemoteDisconnected as e: raise CliException(e) from e except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL) as e: raise CliException(e) from e except requests.exceptions.InvalidSchema as e: raise CliException(('Schema not valid in "{}": ' 'make sure URL has valid schema').format( self._base_url)) from e except requests.exceptions.ConnectionError as e: raise CliException( ('Unable to connect to "{}": ' 'make sure URL is correct').format(self._base_url)) from e
def do_cluster_extend(args): state = load_state() node_controller = get_node_controller(state, args) node_command_generator = SimpleNodeCommandGenerator() vnm = ValidatorNetworkManager( node_controller=node_controller, node_command_generator=node_command_generator) existing_nodes = state["Nodes"] desired_stated = state["DesiredState"] if desired_stated != "Running": raise CliException( "You must have a running network.\n" + "Use the cluster start command to start a validator network.") print("Extending network by {} nodes.".format(args.count)) index_offset = len(existing_nodes) for i in range(0, args.count): j = i + index_offset node_name = "validator-{:0>3}".format(j) if node_name in existing_nodes and vnm.is_running(node_name): print("Already running: {}".format(node_name)) continue # genesis is true for the first node genesis = (j == 0) gossip_port = 5500 + j http_port = 8800 + j node_args = NodeArguments(node_name, http_port=http_port, gossip_port=gossip_port, genesis=genesis) node_command_generator.start(node_args) state["Nodes"][node_name] = { "Status": "Running", "Index": i, "HttpPort": str(http_port), "GossipPort": str(gossip_port) } save_state(state) try: vnm.update() except ManagementError as e: raise CliException(str(e))
def _get(self, path): try: response = urllib.urlopen(self._base_url + path) except HTTPError as e: raise CliException('({}) {}'.format(e.code, e.msg)) except URLError as e: raise CliException( ('Unable to connect to "{}" ' 'make sure URL is correct').format(self._base_url)) result = response.read().decode('utf-8') return json.loads(result)
def _get(self, path, queries=None): query_string = '?' + urlencode(queries) if queries else '' try: response = urllib.urlopen(self._base_url + path + query_string) except HTTPError as e: raise CliException('({}) {}'.format(e.code, e.msg)) except URLError as e: raise CliException( ('Unable to connect to "{}" ' 'make sure URL is correct').format(self._base_url)) result = response.read().decode('utf-8') return json.loads(result)
def get_node_controller(state, args): # Get base controller: manage_type = state['Manage'] node_controller_types = { 'docker': DockerNodeController, 'subprocess': SubprocessNodeController } try: node_controller_type = node_controller_types[manage_type] except: # manage_type hasn't been added to node_controller_types if manage_type in MANAGE_TYPES: error_msg = '{} manamgement type not implemented' else: error_msg = 'Invalid management type: {}' raise CliException(error_msg.format(manage_type)) node_controller = node_controller_type() # Optionally decorate with WrappedNodeController args_wrap = False if not hasattr(args, 'wrap') else args.wrap if 'Wrap' not in state.keys(): # if wrap has not been set in state, set it state['Wrap'] = args_wrap else: # state already knows about a wrapper if args_wrap is not False and args_wrap != state['Wrap']: raise CliException("Already wrapped to %s." % state["Wrap"]) if state['Wrap'] is not False: wrappable_types = () if not isinstance(node_controller, wrappable_types): msg = '--wrap currently only implemented for {} management types' raise CliException(msg.format(wrappable_types)) # either args or state have indicated a WrappedNodeController if 'ManageWrap' not in state.keys(): state['ManageWrap'] = None node_controller = WrappedNodeController( node_controller, data_dir=state['Wrap'], clean_data_dir=state['ManageWrap']) if state['Wrap'] is None: state['Wrap'] = node_controller.get_data_dir() state['ManageWrap'] = True print('{} wrapped to {}'.format(args.cluster_command, state['Wrap'])) # Return out construction: return node_controller
def _read_signing_keys(key_filename): """Reads the given file as a WIF formatted key. Args: key_filename: The filename where the key is stored. Returns: tuple (str, str): the public and private key pair Raises: CliException: If unable to read the file. """ filename = key_filename try: with open(filename, 'r') as key_file: wif_key = key_file.read().strip() signing_key = signing.encode_privkey( signing.decode_privkey(wif_key, 'wif'), 'hex') pubkey = signing.encode_pubkey( signing.generate_pubkey(signing_key), 'hex') return pubkey, signing_key except IOError as e: raise CliException('Unable to read key file: {}'.format(str(e)))
def do_cluster_logs(args): state = load_state() supported_types = ('docker', ) if state['Manage'] in supported_types: prefix = 'sawtooth-cluster-0' for node_name in args.node_names: try: node_num = node_name[len('validator-'):] processes = state['Processors'] + ['validator'] containers = [ '-'.join([prefix, proc, node_num]) for proc in processes ] for c in containers: print("Logs for container: " + c + "of node: " + node_name) cmd = ['docker', 'logs', c] handle = subprocess.Popen(cmd) while handle.returncode is None: handle.poll() except subprocess.CalledProcessError as cpe: raise CliException(str(cpe)) else: print("logs not implemented for {}".format(state['Manage']))
def do_cluster_status(args): state = load_state() node_controller = get_node_controller(state, args) node_command_generator = SimpleNodeCommandGenerator() vnm = ValidatorNetworkManager( node_controller=node_controller, node_command_generator=node_command_generator) if args.node_names: node_names = args.node_names node_superset = vnm.get_node_names() for node_name in args.node_names: try: node_name in node_superset except KeyError: raise CliException( "{} is not a known node name".format(node_name)) else: node_names = vnm.get_node_names() # Check expected status of nodes vs what is returned from vnm print("NodeName".ljust(15), "Status".ljust(10)) for node_name in node_names: status = vnm.status(node_name) if status == "UNKNOWN": status = "Not Running" print(node_name.ljust(15), status.ljust(10))
def send_batches(self, batch_list): """Sends a list of batches to the validator. Args: batch_list (:obj:`BatchList`): the list of batches Returns: dict: the json result data, as a dict """ data_bytes = batch_list.SerializeToString() batch_request = urllib.Request(self._base_url + '/batches', data=data_bytes, headers={ 'Content-Type': 'application/octet-stream', 'Content-Length': "%d" % len(data_bytes) }, method='POST') code, json_result = self._submit_request(batch_request) if code == 200 or code == 202: return json_result else: raise CliException("({}): {}".format(code, json_result))
def main(prog_name=os.path.basename(sys.argv[0]), args=None, with_loggers=True): parser = create_parser(prog_name) if args is None: args = sys.argv[1:] args = parser.parse_args(args) if with_loggers is True: if args.verbose is None: verbose_level = 0 else: verbose_level = args.verbose setup_loggers(verbose_level=verbose_level) if args.subcommand == 'proposal' and args.proposal_cmd == 'create': _do_config_proposal_create(args) elif args.subcommand == 'proposal' and args.proposal_cmd == 'list': _do_config_proposal_list(args) elif args.subcommand == 'proposal' and args.proposal_cmd == 'vote': _do_config_proposal_vote(args) elif args.subcommand == 'genesis': _do_config_genesis(args) else: raise CliException('"{}" is not a valid subcommand of "config"'.format( args.subcommand))
def do_peers(args): if args.peers_command == 'list': _do_peers_list(args) elif args.peers_command == 'graph': _do_peers_graph(args) else: raise CliException('Invalid command: {}'.format(args.subcommand))
def main(prog_name=os.path.basename(sys.argv[0]), args=None, with_loggers=True): parser = create_parser(prog_name) if args is None: args = sys.argv[1:] args = parser.parse_args(args) if with_loggers is True: if args.verbose is None: verbose_level = 0 else: verbose_level = args.verbose setup_loggers(verbose_level=verbose_level) if args.command == 'admin': do_admin(args) elif args.command == 'keygen': do_keygen(args) elif args.command == 'config': do_config(args) elif args.command == 'block': do_block(args) elif args.command == 'batch': do_batch(args) elif args.command == 'transaction': do_transaction(args) elif args.command == 'state': do_state(args) elif args.command == 'identity': do_identity(args) else: raise CliException("invalid command: {}".format(args.command))
def _do_identity_role_create(args): """Executes the 'role create' subcommand. Given a key file, a role name, and a policy name it generates a batch of sawtooth_identity transactions in a BatchList instance. The BatchList is either stored to a file or submitted to a validator, depending on the supplied CLI arguments. """ pubkey, signing_key = _read_signing_keys(args.key) txns = [_create_role_txn(pubkey, signing_key, args.name, args.policy)] batch = _create_batch(pubkey, signing_key, txns) batch_list = BatchList(batches=[batch]) if args.output is not None: try: with open(args.output, 'wb') as batch_file: batch_file.write(batch_list.SerializeToString()) except IOError as e: raise CliException('Unable to write to batch file: {}'.format( str(e))) elif args.url is not None: rest_client = RestClient(args.url) rest_client.send_batches(batch_list) else: raise AssertionError('No target for create set.')
def _do_config_proposal_create(args): """Executes the 'proposal create' subcommand. Given a key file, and a series of key/value pairs, it generates batches of sawtooth_config transactions in a BatchList instance. The BatchList is either stored to a file or submitted to a validator, depending on the supplied CLI arguments. """ settings = [s.split('=', 1) for s in args.setting] pubkey, signing_key = _read_signing_keys(args.key) txns = [ _create_propose_txn(pubkey, signing_key, setting) for setting in settings ] batch = _create_batch(pubkey, signing_key, txns) batch_list = BatchList(batches=[batch]) if args.output is not None: try: with open(args.output, 'wb') as batch_file: batch_file.write(batch_list.SerializeToString()) except IOError as e: raise CliException('Unable to write to batch file: {}'.format( str(e))) elif args.url is not None: rest_client = RestClient(args.url) rest_client.send_batches(batch_list) else: raise AssertionError('No target for create set.')
def _do_config_set(args): """Executes the 'set' subcommand. Given a key file, and a series of key/value pairs, it generates batches of sawtooth_config transactions in a BatchList instance, and stores it in a file. """ settings = [s.split('=', 1) for s in args.setting] with open(args.key, 'r') as key_file: wif_key = key_file.read().strip() signing_key = signing.encode_privkey( signing.decode_privkey(wif_key, 'wif'), 'hex') pubkey = signing.encode_pubkey(signing.generate_pubkey(signing_key), 'hex') txns = [ _create_config_txn(pubkey, signing_key, setting) for setting in settings ] txn_ids = [txn.header_signature for txn in txns] batch_header = BatchHeader(signer_pubkey=pubkey, transaction_ids=txn_ids).SerializeToString() batch = Batch(header=batch_header, header_signature=signing.sign(batch_header, signing_key), transactions=txns) batch_list = BatchList(batches=[batch]).SerializeToString() try: with open(args.output, 'wb') as batch_file: batch_file.write(batch_list) except: raise CliException('Unable to write to {}'.format(args.output))
def do_block(args): """Runs the block list or block show command, printing output to the console Args: args: The parsed arguments sent to the command at runtime """ rest_client = RestClient(args.url, args.user) if args.subcommand == 'list': blocks = rest_client.list_blocks() keys = ('num', 'block_id', 'batches', 'txns', 'signer') headers = tuple(k.upper() if k != 'batches' else 'BATS' for k in keys) def parse_block_row(block): batches = block.get('batches', []) txns = [t for b in batches for t in b['transactions']] return (block['header'].get('block_num', 0), block['header_signature'], len(batches), len(txns), block['header']['signer_public_key']) if args.format == 'default': fmt.print_terminal_table(headers, blocks, parse_block_row) elif args.format == 'csv': fmt.print_csv(headers, blocks, parse_block_row) elif args.format == 'json' or args.format == 'yaml': data = [{k: d for k, d in zip(keys, parse_block_row(b))} for b in blocks] if args.format == 'yaml': fmt.print_yaml(data) elif args.format == 'json': fmt.print_json(data) else: raise AssertionError('Missing handler: {}'.format(args.format)) else: raise AssertionError('Missing handler: {}'.format(args.format)) if args.subcommand == 'show': output = rest_client.get_block(args.block_id) if args.key: if args.key in output: output = output[args.key] elif args.key in output['header']: output = output['header'][args.key] else: raise CliException( 'key "{}" not found in block or header'.format(args.key)) if args.format == 'yaml': fmt.print_yaml(output) elif args.format == 'json': fmt.print_json(output) else: raise AssertionError('Missing handler: {}'.format(args.format))
def _read_signing_keys(key_filename): """Reads the given file as a WIF formatted key. Args: key_filename: The filename where the key is stored. If None, defaults to the default key for the current user. Returns: tuple (str, str): the public and private key pair Raises: CliException: If unable to read the file. """ filename = key_filename if filename is None: filename = os.path.join(os.path.expanduser('~'), '.sawtooth', 'keys', getpass.getuser() + '.priv') try: with open(filename, 'r') as key_file: signing_key = key_file.read().strip() public_key = signing.generate_public_key(signing_key) return public_key, signing_key except IOError as e: raise CliException('Unable to read key file: {}'.format(str(e)))
def _do_identity_policy_list(args): rest_client = RestClient(args.url) state = rest_client.list_state(subtree=IDENTITY_NAMESPACE + _POLICY_PREFIX) head = state['head'] state_values = state['data'] printable_policies = [] for state_value in state_values: policies_list = PolicyList() decoded = b64decode(state_value['data']) policies_list.ParseFromString(decoded) for policy in policies_list.policies: printable_policies.append(policy) printable_policies.sort(key=lambda p: p.name) if args.format == 'default': tty_width = tty.width() for policy in printable_policies: # Set value width to the available terminal space, or the min width width = tty_width - len(policy.name) - 3 width = width if width > _MIN_PRINT_WIDTH else _MIN_PRINT_WIDTH value = "Entries:\n" for entry in policy.entries: entry_string = (" " * 4) + Policy.Type.Name(entry.type) + " " \ + entry.key value += (entry_string[:width] + '...' if len(entry_string) > width else entry_string) + "\n" print('{}: \n {}'.format(policy.name, value)) elif args.format == 'csv': try: writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL) writer.writerow(['POLICY NAME', 'ENTRIES']) for policy in printable_policies: output = [policy.name] for entry in policy.entries: output.append( Policy.Type.Name(entry.type) + " " + entry.key) writer.writerow(output) except csv.Error: raise CliException('Error writing CSV') elif args.format == 'json' or args.format == 'yaml': output = {} for policy in printable_policies: value = "Entries: " for entry in policy.entries: entry_string = Policy.Type.Name(entry.type) + " " \ + entry.key value += entry_string + " " output[policy.name] = value policies_snapshot = {'head': head, 'policies': output} if args.format == 'json': print(json.dumps(policies_snapshot, indent=2, sort_keys=True)) else: print(yaml.dump(policies_snapshot, default_flow_style=False)[0:-1]) else: raise AssertionError('Unknown format {}'.format(args.format))
def _do_config_list(args): """Lists the current on-chain configuration values. """ rest_client = RestClient(args.url) state = rest_client.list_state(subtree=SETTINGS_NAMESPACE) prefix = args.filter head = state['head'] state_values = state['data'] printable_settings = [] proposals_address = _key_to_address('sawtooth.settings.vote.proposals') for state_value in state_values: if state_value['address'] == proposals_address: # This is completely internal setting and we won't list it here continue decoded = b64decode(state_value['data']) setting = Setting() setting.ParseFromString(decoded) for entry in setting.entries: if entry.key.startswith(prefix): printable_settings.append(entry) printable_settings.sort(key=lambda s: s.key) if args.format == 'default': tty_width = tty.width() for setting in printable_settings: # Set value width to the available terminal space, or the min width width = tty_width - len(setting.key) - 3 width = width if width > _MIN_PRINT_WIDTH else _MIN_PRINT_WIDTH value = (setting.value[:width] + '...' if len(setting.value) > width else setting.value) print('{}: {}'.format(setting.key, value)) elif args.format == 'csv': try: writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL) writer.writerow(['KEY', 'VALUE']) for setting in printable_settings: writer.writerow([setting.key, setting.value]) except csv.Error: raise CliException('Error writing CSV') elif args.format == 'json' or args.format == 'yaml': settings_snapshot = { 'head': head, 'settings': {setting.key: setting.value for setting in printable_settings} } if args.format == 'json': print(json.dumps(settings_snapshot, indent=2, sort_keys=True)) else: print(yaml.dump(settings_snapshot, default_flow_style=False)[0:-1]) else: raise AssertionError('Unknown format {}'.format(args.format))
def _get(self, path, **queries): code, json_result = self._submit_request(self._base_url + path + self._format_queries(queries)) if code == 200: return json_result elif code == 404: return None else: raise CliException("({}): {}".format(code, json_result))
def do_batch(args): """Runs the batch list or batch show command, printing output to the console Args: args: The parsed arguments sent to the command at runtime """ rest_client = RestClient(args.url, args.user) if args.subcommand == 'list': batches = rest_client.list_batches() keys = ('batch_id', 'txns', 'signer') headers = tuple(k.upper() for k in keys) def parse_batch_row(batch): return (batch['header_signature'], len(batch.get('transactions', [])), batch['header']['signer_pubkey']) if args.format == 'default': fmt.print_terminal_table(headers, batches, parse_batch_row) elif args.format == 'csv': fmt.print_csv(headers, batches, parse_batch_row) elif args.format == 'json' or args.format == 'yaml': data = [{k: d for k, d in zip(keys, parse_batch_row(b))} for b in batches] if args.format == 'yaml': fmt.print_yaml(data) elif args.format == 'json': fmt.print_json(data) else: raise AssertionError('Missing handler: {}'.format(args.format)) else: raise AssertionError('Missing handler: {}'.format(args.format)) if args.subcommand == 'show': output = rest_client.get_batch(args.batch_id) if args.key: if args.key in output: output = output[args.key] elif args.key in output['header']: output = output['header'][args.key] else: raise CliException( 'key "{}" not found in batch or header'.format(args.key)) if args.format == 'yaml': fmt.print_yaml(output) elif args.format == 'json': fmt.print_json(output) else: raise AssertionError('Missing handler: {}'.format(args.format))