def events(args): """`conduct events` command""" log = logging.getLogger(__name__) request_url = conduct_url.url('bundles/{}/events?count={}'.format(quote_plus(args.bundle), args.lines), args) response = conduct_request.get(args.dcos_mode, conductr_host(args), request_url, auth=args.conductr_auth, verify=args.server_verification_file, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) data = [ { 'time': validation.format_timestamp(event['timestamp'], args), 'event': event['event'], 'description': event['description'] } for event in json.loads(response.text) ] data.insert(0, {'time': 'TIME', 'event': 'EVENT', 'description': 'DESC'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {time: <{time_width}}{padding}\ {event: <{event_width}}{padding}\ {description: <{description_width}}{padding}'''.format(**dict(row, **column_widths)).rstrip()) return True
def ps(args): log = logging.getLogger(__name__) core_info, agent_info = sandbox_common.resolve_conductr_info( args.image_dir) pids_info = sandbox_common.find_pids(core_info['extraction_dir'], agent_info['extraction_dir']) if args.is_filter_core: data = [ pid_info for pid_info in pids_info if pid_info['type'] == 'core' ] elif args.is_filter_agent: data = [ pid_info for pid_info in pids_info if pid_info['type'] == 'agent' ] else: data = [pid_info for pid_info in pids_info] if args.is_quiet: for row in data: log.info(row['id']) else: data.insert(0, {'id': 'PID', 'type': 'TYPE', 'ip': 'IP'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {id: <{id_width}}{padding}\ {type: >{type_width}}{padding}\ {ip: >{ip_width}}'''.format(**dict(row, **column_widths)).rstrip()) return True
def events(args): """`conduct events` command""" log = logging.getLogger(__name__) request_url = conduct_url.url( 'bundles/{}/events?count={}'.format(quote_plus(args.bundle), args.lines), args) response = conduct_request.get(args.dcos_mode, conductr_host(args), request_url, auth=args.conductr_auth, verify=args.server_verification_file, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) data = [{ 'time': validation.format_timestamp(event['timestamp'], args), 'event': event['event'], 'description': event['description'] } for event in json.loads(response.text)] data.insert(0, {'time': 'TIME', 'event': 'EVENT', 'description': 'DESC'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {time: <{time_width}}{padding}\ {event: <{event_width}}{padding}\ {description: <{description_width}}{padding}'''.format( **dict(row, **column_widths)).rstrip()) return True
def logs(args): """`conduct logs` command""" log = logging.getLogger(__name__) request_url = conduct_url.url('bundles/{}/logs?count={}'.format(quote_plus(args.bundle), args.lines), args) # At the time when this comment is being written, we need to pass the Host header when making HTTP request due to # a bug with requests python library not working properly when IPv6 address is supplied: # https://github.com/kennethreitz/requests/issues/3002 # The workaround for this problem is to explicitly set the Host header when making HTTP request. # This fix is benign and backward compatible as the library would do this when making HTTP request anyway. response = requests.get(request_url, timeout=DEFAULT_HTTP_TIMEOUT, headers=conduct_url.request_headers(args)) validation.raise_for_status_inc_3xx(response) data = [ { 'time': validation.format_timestamp(event['timestamp'], args), 'host': event['host'], 'log': event['message'] } for event in json.loads(response.text) ] data.insert(0, {'time': 'TIME', 'host': 'HOST', 'log': 'LOG'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {time: <{time_width}}{padding}\ {host: <{host_width}}{padding}\ {log: <{log_width}}{padding}'''.format(**dict(row, **column_widths)).rstrip()) return True
def logs(args): """`conduct logs` command""" log = logging.getLogger(__name__) request_url = conduct_url.url( 'bundles/{}/logs?count={}'.format(quote_plus(args.bundle), args.lines), args) # At the time when this comment is being written, we need to pass the Host header when making HTTP request due to # a bug with requests python library not working properly when IPv6 address is supplied: # https://github.com/kennethreitz/requests/issues/3002 # The workaround for this problem is to explicitly set the Host header when making HTTP request. # This fix is benign and backward compatible as the library would do this when making HTTP request anyway. response = requests.get(request_url, timeout=DEFAULT_HTTP_TIMEOUT, headers=conduct_url.request_headers(args)) validation.raise_for_status_inc_3xx(response) data = [{ 'time': validation.format_timestamp(event['timestamp'], args), 'host': event['host'], 'log': event['message'] } for event in json.loads(response.text)] data.insert(0, {'time': 'TIME', 'host': 'HOST', 'log': 'LOG'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {time: <{time_width}}{padding}\ {host: <{host_width}}{padding}\ {log: <{log_width}}{padding}'''.format(**dict(row, **column_widths)).rstrip()) return True
def services(args): """`conduct services` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) # At the time when this comment is being written, we need to pass the Host header when making HTTP request due to # a bug with requests python library not working properly when IPv6 address is supplied: # https://github.com/kennethreitz/requests/issues/3002 # The workaround for this problem is to explicitly set the Host header when making HTTP request. # This fix is benign and backward compatible as the library would do this when making HTTP request anyway. response = requests.get(url, timeout=DEFAULT_HTTP_TIMEOUT, headers=conduct_url.request_headers(args)) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = sorted([ ( { 'service': service, 'bundle_id': bundle['bundleId'] if args.long_ids else bundle_utils.short_id( bundle['bundleId']), 'bundle_name': bundle['attributes']['bundleName'], 'status': 'Running' if execution['isStarted'] else 'Starting' } ) for bundle in json.loads(response.text) for execution in bundle['bundleExecutions'] for endpoint_name, endpoint in bundle['bundleConfig']['endpoints'].items() if 'services' in endpoint for service in endpoint['services'] ], key=lambda line: line['service']) service_endpoints = {} for service in data: url = urlparse(service['service']) if not (url.path == '' or url.path == '/'): try: service_endpoints[url.path] |= {service['service']} except KeyError: service_endpoints[url.path] = {service['service']} duplicate_endpoints = [service for (service, endpoint) in service_endpoints.items() if len(endpoint) > 1] \ if len(service_endpoints) > 0 else [] data.insert(0, {'service': 'SERVICE', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( '{service: <{service_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format(**dict(row, **column_widths)).rstrip()) if len(duplicate_endpoints) > 0: log.screen('') log.warning('Multiple endpoints found for the following services: {}'.format(', '.join(duplicate_endpoints))) log.warning('Service resolution for these services is undefined.') return True
def display_bundle_executions_without_endpoints(bundle): log = logging.getLogger(__name__) if 'bundleExecutions' in bundle and bundle['bundleExecutions']: rows = sorted([ { 'host': bundle_execution['host'], 'pid': bundle_execution['pid'] if 'pid' in bundle_execution else 'Unknown', 'is_started': 'Yes' if bundle_execution['isStarted'] else 'No', 'uptime': get_uptime(bundle_execution['startTime']) if 'startTime' in bundle_execution else 'Unknown' } for bundle_execution in bundle['bundleExecutions'] ], key=lambda v: v['host']) if rows: display_title_table('BUNDLE EXECUTIONS') rows.insert(0, { 'host': 'HOST', 'pid': 'PID', 'is_started': 'STARTED', 'uptime': 'UPTIME' }) column_widths = dict(screen_utils.calc_column_widths(rows), **{'padding': ' ' * DISPLAY_PADDING}) for row in rows: log.screen('{host: <{host_width}}{padding}' '{pid: >{pid_width}}{padding}' '{is_started: >{is_started_width}}{padding}' '{uptime: >{uptime_width}}{padding}'.format(**dict(row, **column_widths)).rstrip()) log.screen('')
def services(args): """`conduct services` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) response = requests.get(url, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = sorted([ ( { 'service': service, 'bundle_id': bundle['bundleId'] if args.long_ids else bundle_utils.short_id( bundle['bundleId']), 'bundle_name': bundle['attributes']['bundleName'], 'status': 'Running' if execution['isStarted'] else 'Starting' } ) for bundle in json.loads(response.text) for execution in bundle['bundleExecutions'] for endpoint_name, endpoint in bundle['bundleConfig']['endpoints'].items() for service in endpoint['services'] ], key=lambda line: line['service']) service_endpoints = {} for service in data: url = urlparse(service['service']) if not (url.path == '' or url.path == '/'): try: service_endpoints[url.path] |= {service['service']} except KeyError: service_endpoints[url.path] = {service['service']} duplicate_endpoints = [service for (service, endpoint) in service_endpoints.items() if len(endpoint) > 1] \ if len(service_endpoints) > 0 else [] data.insert(0, {'service': 'SERVICE', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( '{service: <{service_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format(**dict(row, **column_widths)).rstrip()) if len(duplicate_endpoints) > 0: log.screen('') log.warning('Multiple endpoints found for the following services: {}'.format(', '.join(duplicate_endpoints))) log.warning('Service resolution for these services is undefined.') return True
def display_key_value_table(entries): log = logging.getLogger(__name__) rows = [{'key': key, 'value': value} for key, value in entries if value is not None] column_widths = dict(screen_utils.calc_column_widths(rows), **{'padding': ' ' * DISPLAY_PADDING}) for row in rows: log.screen('''\ {key: <{key_width}}{padding}\ {value: <{value_width}}'''.format(**dict(row, **column_widths)).rstrip()) log.screen('')
def info(args): """`conduct info` command""" log = logging.getLogger(__name__) url = conduct_url.url("bundles", args) response = conduct_request.get( args.dcos_mode, conductr_host(args), url, auth=args.conductr_auth, verify=args.server_verification_file, timeout=DEFAULT_HTTP_TIMEOUT, ) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = [ { "id": ("! " if bundle.get("hasError", False) else "") + (bundle["bundleId"] if args.long_ids else bundle_utils.short_id(bundle["bundleId"])), "name": bundle["attributes"]["bundleName"], "replications": len(bundle["bundleInstallations"]), "starting": sum([not execution["isStarted"] for execution in bundle["bundleExecutions"]]), "executions": sum([execution["isStarted"] for execution in bundle["bundleExecutions"]]), } for bundle in json.loads(response.text) ] data.insert(0, {"id": "ID", "name": "NAME", "replications": "#REP", "starting": "#STR", "executions": "#RUN"}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{"padding": " " * padding}) has_error = False for row in data: has_error |= "!" in row["id"] log.screen( """\ {id: <{id_width}}{padding}\ {name: <{name_width}}{padding}\ {replications: >{replications_width}}{padding}\ {starting: >{starting_width}}{padding}\ {executions: >{executions_width}}""".format( **dict(row, **column_widths) ).rstrip() ) if has_error: log.screen("There are errors: use `conduct events` or `conduct logs` for further information") return True
def info(args): """`conduct info` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) # At the time when this comment is being written, we need to pass the Host header when making HTTP request due to # a bug with requests python library not working properly when IPv6 address is supplied: # https://github.com/kennethreitz/requests/issues/3002 # The workaround for this problem is to explicitly set the Host header when making HTTP request. # This fix is benign and backward compatible as the library would do this when making HTTP request anyway. response = requests.get(url, timeout=DEFAULT_HTTP_TIMEOUT, headers=conduct_url.request_headers(args)) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = [ { 'id': ('! ' if bundle.get('hasError', False) else '') + (bundle['bundleId'] if args.long_ids else bundle_utils.short_id(bundle['bundleId'])), 'name': bundle['attributes']['bundleName'], 'replications': len(bundle['bundleInstallations']), 'starting': sum([not execution['isStarted'] for execution in bundle['bundleExecutions']]), 'executions': sum([execution['isStarted'] for execution in bundle['bundleExecutions']]) } for bundle in json.loads(response.text) ] data.insert(0, {'id': 'ID', 'name': 'NAME', 'replications': '#REP', 'starting': '#STR', 'executions': '#RUN'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) has_error = False for row in data: has_error |= '!' in row['id'] log.screen('''\ {id: <{id_width}}{padding}\ {name: <{name_width}}{padding}\ {replications: >{replications_width}}{padding}\ {starting: >{starting_width}}{padding}\ {executions: >{executions_width}}'''.format(**dict(row, **column_widths)).rstrip()) if has_error: log.screen('There are errors: use `conduct events` or `conduct logs` for further information') return True
def display_tcp_acls(log, tcp_acls): def tcp_port_value(line): return int(line['tcp_port']) data = [ { 'tcp_port': 'TCP/PORT', 'system': 'SYSTEM', 'system_version': 'SYSTEM VERSION', 'endpoint_name': 'ENDPOINT NAME', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS' } ] + sorted([ { 'tcp_port': tcp_port, 'system': tcp_acl['system'], 'system_version': tcp_acl['system_version'], 'endpoint_name': tcp_acl['endpoint_name'], 'bundle_id': tcp_acl['bundle_id'], 'bundle_name': tcp_acl['bundle_name'], 'status': tcp_acl['status'] } for tcp_acl in tcp_acls for tcp_port in tcp_acl['acl']['tcp']['requests'] ], key=tcp_port_value) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( '{tcp_port: <{tcp_port_width}}{padding}' '{system: <{system_width}}{padding}' '{system_version: <{system_version_width}}{padding}' '{endpoint_name: <{endpoint_name_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format(**dict(row, **column_widths)).rstrip()) duplicate_endpoints = find_duplicate_endpoints(tcp_acls) if duplicate_endpoints: log_duplicate_endpoints(log, duplicate_endpoints)
def agents(args): """`conduct agents` command""" log = logging.getLogger(__name__) request_url = conduct_url.url('agents', args) response = conduct_request.get(args.dcos_mode, conductr_host(args), request_url, auth=args.conductr_auth, verify=args.server_verification_file, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) raw_data = json.loads(response.text) data = [ { 'address': 'ADDRESS', 'roles': 'ROLES', 'observed': 'OBSERVED BY' } ] for entry in raw_data: if args.role is None or args.role in entry['roles']: data.append({ 'address': entry['address'], 'roles': ','.join(entry['roles']), 'observed': ','.join(map(lambda e: e['node']['address'], entry['observedBy'])) }) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {address: <{address_width}}{padding}\ {roles: <{roles_width}}{padding}\ {observed: >{observed_width}}{padding}'''.format(**dict(row, **column_widths)).rstrip()) return True
def info(args): """`conduct info` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) response = conduct_request.get(args.dcos_mode, conductr_host(args), url, auth=args.conductr_auth, verify=args.server_verification_file, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = [ { 'id': ('! ' if bundle.get('hasError', False) else '') + (bundle['bundleId'] if args.long_ids else bundle_utils.short_id(bundle['bundleId'])), 'name': bundle['attributes']['bundleName'], 'replications': len(bundle['bundleInstallations']), 'starting': sum([not execution['isStarted'] for execution in bundle['bundleExecutions']]), 'executions': sum([execution['isStarted'] for execution in bundle['bundleExecutions']]) } for bundle in json.loads(response.text) ] data.insert(0, {'id': 'ID', 'name': 'NAME', 'replications': '#REP', 'starting': '#STR', 'executions': '#RUN'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) has_error = False for row in data: has_error |= '!' in row['id'] log.screen('''\ {id: <{id_width}}{padding}\ {name: <{name_width}}{padding}\ {replications: >{replications_width}}{padding}\ {starting: >{starting_width}}{padding}\ {executions: >{executions_width}}'''.format(**dict(row, **column_widths)).rstrip()) if has_error: log.screen('There are errors: use `conduct events` or `conduct logs` for further information') return True
def display_tcp_acls(log, tcp_acls, display_bundle_id=True, display_bundle_name=True, display_status=True): def tcp_port_value(line): return int(line['tcp_port']) data = [{ 'tcp_port': 'TCP/PORT', 'bundle_id': 'BUNDLE ID' if display_bundle_id else '', 'bundle_name': 'BUNDLE NAME' if display_bundle_name else '', 'status': 'STATUS' if display_status else '' }] + sorted( [{ 'tcp_port': tcp_port, 'bundle_id': tcp_acl['bundle_id'] if display_bundle_id else '', 'bundle_name': tcp_acl['bundle_name'] if display_bundle_name else '', 'status': tcp_acl['status'] if display_status else '' } for tcp_acl in tcp_acls for tcp_port in tcp_acl['acl']['tcp']['requests']], key=tcp_port_value) display_template = '{tcp_port: <{tcp_port_width}}{padding}' if display_bundle_id: display_template += '{bundle_id: <{bundle_id_width}}{padding}' if display_bundle_name: display_template += '{bundle_name: <{bundle_name_width}}{padding}' if display_status: display_template += '{status: <{status_width}}' padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( display_template.format(**dict(row, **column_widths)).rstrip())
def display_tcp_acls(log, tcp_acls): def tcp_port_value(line): return int(line['tcp_port']) data = [{ 'tcp_port': 'TCP/PORT', 'system': 'SYSTEM', 'system_version': 'SYSTEM VERSION', 'endpoint_name': 'ENDPOINT NAME', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS' }] + sorted([{ 'tcp_port': tcp_port, 'system': tcp_acl['system'], 'system_version': tcp_acl['system_version'], 'endpoint_name': tcp_acl['endpoint_name'], 'bundle_id': tcp_acl['bundle_id'], 'bundle_name': tcp_acl['bundle_name'], 'status': tcp_acl['status'] } for tcp_acl in tcp_acls for tcp_port in tcp_acl['acl']['tcp']['requests']], key=tcp_port_value) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('{tcp_port: <{tcp_port_width}}{padding}' '{system: <{system_width}}{padding}' '{system_version: <{system_version_width}}{padding}' '{endpoint_name: <{endpoint_name_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format( **dict(row, **column_widths)).rstrip()) duplicate_endpoints = find_duplicate_endpoints(tcp_acls) if duplicate_endpoints: log_duplicate_endpoints(log, duplicate_endpoints)
def display_service_names(log, data, duplicate_endpoints, display_bundle_id=True, display_bundle_name=True, display_status=True): data.insert( 0, { 'service_name': 'SERVICE NAME', 'bundle_id': 'BUNDLE ID' if display_bundle_id else '', 'bundle_name': 'BUNDLE NAME' if display_bundle_name else '', 'status': 'STATUS' if display_status else '' }) padding = 2 display_template = '{service_name: <{service_name_width}}{padding}' if display_bundle_id: display_template += '{bundle_id: <{bundle_id_width}}{padding}' if display_bundle_name: display_template += '{bundle_name: <{bundle_name_width}}{padding}' if display_status: display_template += '{status: <{status_width}}' column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( display_template.format(**dict(row, **column_widths)).rstrip()) if len(duplicate_endpoints) > 0: log.screen('') log.warning( 'Multiple endpoints found for the following services: {}'.format( ', '.join(duplicate_endpoints))) log.warning('Service resolution for these services is undefined.')
def display_http_acls(log, http_acls): http_request_mappings = [ { 'http_method': http_request_mapping['method'] if 'method' in http_request_mapping else ALL_HTTP_METHOD, 'http_rewrite': http_request_mapping['rewrite'] if 'rewrite' in http_request_mapping else '', 'http_acl': get_http_acl(http_request_mapping), 'http_acl_type': get_http_acl_type(http_request_mapping), 'system': http_acl['system'], 'system_version': http_acl['system_version'], 'endpoint_name': http_acl['endpoint_name'], 'bundle_id': http_acl['bundle_id'], 'bundle_name': http_acl['bundle_name'], 'status': http_acl['status'] } for http_acl in http_acls for http_request_mapping in http_acl['acl']['http']['requests'] ] def capture_group_count_reverse(line): return len(re.findall('([^\\(\\)]+)', line['http_acl'])) * -1 http_path_regex_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH_REGEX ], key=capture_group_count_reverse) def path_depth_reverse(line): return len(line['http_acl'].split('/')) * -1 http_path_beg_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH_BEG ], key=path_depth_reverse) http_path_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH ], key=path_depth_reverse) data = [{ 'http_method': 'METHOD', 'http_acl': 'PATH', 'http_rewrite': 'REWRITE', 'system': 'SYSTEM', 'system_version': 'SYSTEM VERSION', 'endpoint_name': 'ENDPOINT NAME', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS' }] + http_path_regex_sorted + http_path_beg_sorted + http_path_sorted padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( '{http_method: <{http_method_width}}{padding}' '{http_acl: <{http_acl_width}}{padding}' '{http_rewrite: <{http_rewrite_width}}{padding}' '{system: <{system_width}}{padding}' '{system_version: <{system_version_width}}{padding}' '{endpoint_name: <{endpoint_name_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format(**dict(row, **column_widths)).rstrip())
def service_names(args): """`conduct service-names` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) response = conduct_request.get(args.dcos_mode, conductr_host(args), url, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) def execution_status(bundle_executions): for execution in bundle_executions: if execution['isStarted']: return 'Running' return 'Starting' def get_service_name_from_service_uri(service_uri): paths = urlparse(service_uri).path.split('/') if len(paths) > 1: return paths[1] else: return '' data_from_service_uri = [ ( { 'service_name': get_service_name_from_service_uri(service_uri), 'service_uri': service_uri, 'bundle_id': bundle['bundleId'] if args.long_ids else bundle_utils.short_id( bundle['bundleId']), 'bundle_name': bundle['attributes']['bundleName'], 'status': execution_status(bundle['bundleExecutions']) } ) for bundle in json.loads(response.text) if bundle['bundleExecutions'] for endpoint_name, endpoint in bundle['bundleConfig']['endpoints'].items() if 'services' in endpoint for service_uri in endpoint['services'] ] data_from_service_name = [ ( { 'service_name': endpoint['serviceName'], 'service_uri': None, 'bundle_id': bundle['bundleId'] if args.long_ids else bundle_utils.short_id( bundle['bundleId']), 'bundle_name': bundle['attributes']['bundleName'], 'status': execution_status(bundle['bundleExecutions']) } ) for bundle in json.loads(response.text) if bundle['bundleExecutions'] for endpoint_name, endpoint in bundle['bundleConfig']['endpoints'].items() if 'serviceName' in endpoint ] data = data_from_service_uri + data_from_service_name data = sorted([entry for entry in data if entry['service_name']], key=lambda line: line['service_name']) service_endpoints = {} for service in data: url = urlparse(service['service_uri']) if not (url.path == '' or url.path == '/'): try: service_endpoints[url.path] |= {service['service_uri']} except KeyError: service_endpoints[url.path] = {service['service_uri']} duplicate_endpoints = [service for (service, endpoint) in service_endpoints.items() if len(endpoint) > 1] \ if len(service_endpoints) > 0 else [] data.insert(0, {'service_name': 'SERVICE NAME', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS'}) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen( '{service_name: <{service_name_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format(**dict(row, **column_widths)).rstrip()) if len(duplicate_endpoints) > 0: log.screen('') log.warning('Multiple endpoints found for the following services: {}'.format(', '.join(duplicate_endpoints))) log.warning('Service resolution for these services is undefined.') return True
def info(args): """`conduct info` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) # At the time when this comment is being written, we need to pass the Host header when making HTTP request due to # a bug with requests python library not working properly when IPv6 address is supplied: # https://github.com/kennethreitz/requests/issues/3002 # The workaround for this problem is to explicitly set the Host header when making HTTP request. # This fix is benign and backward compatible as the library would do this when making HTTP request anyway. response = requests.get(url, timeout=DEFAULT_HTTP_TIMEOUT, headers=conduct_url.request_headers(args)) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = [{ 'id': ('! ' if bundle.get('hasError', False) else '') + (bundle['bundleId'] if args.long_ids else bundle_utils.short_id(bundle['bundleId'])), 'name': bundle['attributes']['bundleName'], 'replications': len(bundle['bundleInstallations']), 'starting': sum([ not execution['isStarted'] for execution in bundle['bundleExecutions'] ]), 'executions': sum([ execution['isStarted'] for execution in bundle['bundleExecutions'] ]) } for bundle in json.loads(response.text)] data.insert( 0, { 'id': 'ID', 'name': 'NAME', 'replications': '#REP', 'starting': '#STR', 'executions': '#RUN' }) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) has_error = False for row in data: has_error |= '!' in row['id'] log.screen('''\ {id: <{id_width}}{padding}\ {name: <{name_width}}{padding}\ {replications: >{replications_width}}{padding}\ {starting: >{starting_width}}{padding}\ {executions: >{executions_width}}'''.format( **dict(row, **column_widths)).rstrip()) if has_error: log.screen( 'There are errors: use `conduct events` or `conduct logs` for further information' ) return True
def display_bundles_default(args, is_license_success, conductr_license, bundles): log = logging.getLogger(__name__) if is_license_success: license_formatted = license.format_license(conductr_license) license_to_display = license_formatted if conductr_license['isLicensed'] \ else '{}\n{}'.format(UNLICENSED_DISPLAY_TEXT, license_formatted) log.screen('{}\n'.format(license_to_display)) has_tags_key = all('tags' in bundle['attributes'] for bundle in bundles) data = [{ 'id': display_bundle_id(args, bundle), 'name': bundle['attributes']['bundleName'], 'tag': display_tag_or_compatibility_version(bundle, has_tags_key), 'roles': ', '.join(sorted(bundle['attributes']['roles'])), 'replications': len(bundle['bundleInstallations']), 'starting': sum([ not execution['isStarted'] for execution in bundle['bundleExecutions'] ]), 'executions': sum([ execution['isStarted'] for execution in bundle['bundleExecutions'] ]) } for bundle in bundles] data.insert( 0, { 'id': 'ID', 'name': 'NAME', 'tag': 'TAG' if has_tags_key else 'VER', 'roles': 'ROLES', 'replications': '#REP', 'starting': '#STR', 'executions': '#RUN' }) column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * DISPLAY_PADDING}) has_error = False for row in data: has_error |= '!' in row['id'] log.screen('''\ {id: <{id_width}}{padding}\ {name: <{name_width}}{padding}\ {tag: >{tag_width}}{padding}\ {replications: >{replications_width}}{padding}\ {starting: >{starting_width}}{padding}\ {executions: >{executions_width}}{padding}\ {roles: <{roles_width}}'''.format(**dict(row, **column_widths)).rstrip()) if has_error: log.screen( 'There are errors: use `conduct events` or `conduct logs` for further information' )
def display_http_acls(log, http_acls, display_bundle_id=True, display_bundle_name=True, display_status=True): http_request_mappings = [ { 'http_method': http_request_mapping['method'] if 'method' in http_request_mapping else ALL_HTTP_METHOD, 'http_rewrite': http_request_mapping['rewrite'] if 'rewrite' in http_request_mapping else '', 'http_acl': get_http_acl(http_request_mapping), 'http_acl_type': get_http_acl_type(http_request_mapping), 'bundle_id': http_acl['bundle_id'] if display_bundle_id else '', 'bundle_name': http_acl['bundle_name'] if display_bundle_name else '', 'status': http_acl['status'] if display_status else '' } for http_acl in http_acls for http_request_mapping in http_acl['acl']['http']['requests'] ] def capture_group_count_reverse(line): return len(re.findall('([^\\(\\)]+)', line['http_acl'])) * -1 http_path_regex_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH_REGEX ], key=capture_group_count_reverse) def path_depth_reverse(line): return len(line['http_acl'].split('/')) * -1 http_path_beg_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH_BEG ], key=path_depth_reverse) http_path_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH ], key=path_depth_reverse) data = [{ 'http_method': 'METHOD', 'http_acl': 'PATH', 'http_rewrite': 'REWRITE', 'bundle_id': 'BUNDLE ID' if display_bundle_id else '', 'bundle_name': 'BUNDLE NAME' if display_bundle_name else '', 'status': 'STATUS' if display_status else '' }] + http_path_regex_sorted + http_path_beg_sorted + http_path_sorted padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) display_template = '{http_method: <{http_method_width}}{padding}' \ '{http_acl: <{http_acl_width}}{padding}' \ '{http_rewrite: <{http_rewrite_width}}{padding}' if display_bundle_id: display_template += '{bundle_id: <{bundle_id_width}}{padding}' if display_bundle_name: display_template += '{bundle_name: <{bundle_name_width}}{padding}' if display_status: display_template += '{status: <{status_width}}' for row in data: log.screen( display_template.format(**dict(row, **column_widths)).rstrip())
def members(args): """`conduct members` command""" log = logging.getLogger(__name__) request_url = conduct_url.url('members', args) response = conduct_request.get(args.dcos_mode, conductr_host(args), request_url, auth=args.conductr_auth, verify=args.server_verification_file, timeout=DEFAULT_HTTP_TIMEOUT) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) raw_data = json.loads(response.text) data = [{ 'address': 'ADDRESS', 'uid': 'UID', 'roles': 'ROLES', 'status': 'STATUS', 'reachable': 'REACHABLE' }] unreachable_nodes = [] for entry in raw_data['unreachable']: unreachable_nodes.append(entry['node']) for entry in raw_data['members']: if args.role is None or args.role in entry['roles']: data.append({ 'address': entry['node']['address'], 'uid': entry['node']['uid'], 'roles': ','.join(entry['roles']), 'status': entry['status'], 'reachable': 'Yes' if entry['node'] not in unreachable_nodes else 'No' }) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('''\ {uid: <{uid_width}}{padding}\ {address: <{address_width}}{padding}\ {roles: <{roles_width}}{padding}\ {status: <{status_width}}{padding}\ {reachable: >{reachable_width}}'''.format( **dict(row, **column_widths)).rstrip()) return True
def display_http_acls(log, http_acls): http_request_mappings = [ { 'http_method': http_request_mapping['method'] if 'method' in http_request_mapping else ALL_HTTP_METHOD, 'http_rewrite': http_request_mapping['rewrite'] if 'rewrite' in http_request_mapping else '', 'http_acl': get_http_acl(http_request_mapping), 'http_acl_type': get_http_acl_type(http_request_mapping), 'system': http_acl['system'], 'system_version': http_acl['system_version'], 'endpoint_name': http_acl['endpoint_name'], 'bundle_id': http_acl['bundle_id'], 'bundle_name': http_acl['bundle_name'], 'status': http_acl['status'] } for http_acl in http_acls for http_request_mapping in http_acl['acl']['http']['requests'] ] def capture_group_count_reverse(line): return len(re.findall('([^\\(\\)]+)', line['http_acl'])) * -1 http_path_regex_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH_REGEX ], key=capture_group_count_reverse) def path_depth_reverse(line): return len(line['http_acl'].split('/')) * -1 http_path_beg_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH_BEG ], key=path_depth_reverse) http_path_sorted = sorted([ request_mapping for request_mapping in http_request_mappings if request_mapping['http_acl_type'] == HTTP_ACL_PATH ], key=path_depth_reverse) data = [{ 'http_method': 'METHOD', 'http_acl': 'PATH', 'http_rewrite': 'REWRITE', 'system': 'SYSTEM', 'system_version': 'SYSTEM VERSION', 'endpoint_name': 'ENDPOINT NAME', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS' }] + http_path_regex_sorted + http_path_beg_sorted + http_path_sorted padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('{http_method: <{http_method_width}}{padding}' '{http_acl: <{http_acl_width}}{padding}' '{http_rewrite: <{http_rewrite_width}}{padding}' '{system: <{system_width}}{padding}' '{system_version: <{system_version_width}}{padding}' '{endpoint_name: <{endpoint_name_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format( **dict(row, **column_widths)).rstrip()) duplicate_endpoints = find_duplicate_endpoints(http_acls) if duplicate_endpoints: log_duplicate_endpoints(log, duplicate_endpoints)
def services(args): """`conduct services` command""" log = logging.getLogger(__name__) url = conduct_url.url('bundles', args) # At the time when this comment is being written, we need to pass the Host header when making HTTP request due to # a bug with requests python library not working properly when IPv6 address is supplied: # https://github.com/kennethreitz/requests/issues/3002 # The workaround for this problem is to explicitly set the Host header when making HTTP request. # This fix is benign and backward compatible as the library would do this when making HTTP request anyway. response = requests.get(url, timeout=DEFAULT_HTTP_TIMEOUT, headers=conduct_url.request_headers(args)) validation.raise_for_status_inc_3xx(response) if log.is_verbose_enabled(): log.verbose(validation.pretty_json(response.text)) data = sorted([({ 'service': service, 'bundle_id': bundle['bundleId'] if args.long_ids else bundle_utils.short_id(bundle['bundleId']), 'bundle_name': bundle['attributes']['bundleName'], 'status': 'Running' if execution['isStarted'] else 'Starting' }) for bundle in json.loads(response.text) for execution in bundle['bundleExecutions'] for endpoint_name, endpoint in bundle['bundleConfig'] ['endpoints'].items() if 'services' in endpoint for service in endpoint['services']], key=lambda line: line['service']) service_endpoints = {} for service in data: url = urlparse(service['service']) if not (url.path == '' or url.path == '/'): try: service_endpoints[url.path] |= {service['service']} except KeyError: service_endpoints[url.path] = {service['service']} duplicate_endpoints = [service for (service, endpoint) in service_endpoints.items() if len(endpoint) > 1] \ if len(service_endpoints) > 0 else [] data.insert( 0, { 'service': 'SERVICE', 'bundle_id': 'BUNDLE ID', 'bundle_name': 'BUNDLE NAME', 'status': 'STATUS' }) padding = 2 column_widths = dict(screen_utils.calc_column_widths(data), **{'padding': ' ' * padding}) for row in data: log.screen('{service: <{service_width}}{padding}' '{bundle_id: <{bundle_id_width}}{padding}' '{bundle_name: <{bundle_name_width}}{padding}' '{status: <{status_width}}'.format( **dict(row, **column_widths)).rstrip()) if len(duplicate_endpoints) > 0: log.screen('') log.warning( 'Multiple endpoints found for the following services: {}'.format( ', '.join(duplicate_endpoints))) log.warning('Service resolution for these services is undefined.') return True