def _discoverDatabases(solman, sysPairsBySysName, sendVector, reportError, resolveIps): try: logger.info("Discover Databases") query = GetDatabaseUsages() queryExecutor = TableQueryExecutor(solman) dbName = second usagePairsByDbName = groupby(dbName, queryExecutor.executeQuery(query)) logger.info("Found %s databases in use" % len(usagePairsByDbName)) query = GetDatabaseInstances() inDiscoveredSystems = F(_inDiscoveredSystems, fptools._, sysPairsBySysName) isUsedDb = comp(inDiscoveredSystems, usagePairsByDbName.get, _getDbInstance) findPlatform = comp(db_platform.findPlatformBySignature, _getDbVendor) dbInsts = queryExecutor.executeQuery(query) logger.info("Found %s database instances" % len(dbInsts)) dbs_1, dbs_2, dbs_3 = tee(ifilter(isUsedDb, dbInsts), 3) platforms = imap(findPlatform, dbs_1) ips = imap(comp(resolveIps, _getAddress), dbs_2) dbs = ifilter(all, izip(dbs_3, platforms, ips)) reported = len(map(comp(sendVector, _reportDatabase), dbs)) logger.info("Reported %s databases" % reported) except Exception: msg = 'Failed to discover databases' logger.debugException(msg) reportError(msg)
def handler(self, items): get_db_info = operator.attrgetter('DB2_BDINFO') is_not_none = Fn(operator.is_not, fptools._, None) filterd_items = ifilter(comp(is_not_none, get_db_info), items) is_pid_matched = Fn(re.match, '%s\s\d+\s\d+' % self.pid, fptools._) item = fptools.findFirst(comp(is_pid_matched, get_db_info), filterd_items) if item: registry_path = RegistryPath(item.keyPath) if self.isNode(registry_path): node_registry_path = RegistryPath(registry_path.getPath()) registry_path = RegistryPath(node_registry_path.getPath()) return registry_path.name
def groupby_unique_key(lines, separator=':'): '''Groupes key value output to the list of dictionaries. Decides whether current dictionary is completed by checking if current key was already added to the dictionary. The line is skipped if it does not contain separator substring @param lines: sequence of strings to use as input data to group @type lines: seq[basestring] @param separator: separator to be used while identifying key-value pairs @type separator: basestring @return: sequence of dictionaries @rtype: seq[dict] ''' grouped = [] if lines: sep_pattern = re.compile('\s*%s\s*' % separator) split_by_sep = fptools.comp(sep_pattern.split, methodcaller('strip')) lines = ifilter(identity, lines) _kwargs = {} for keyval in imap(split_by_sep, lines): if len(keyval) == 2: key, value = keyval if key in _kwargs: grouped.append(_kwargs) _kwargs = {} _kwargs[key] = value if _kwargs: grouped.append(_kwargs) return tuple(grouped)
def parse_items(fields, wmi_class, output, separator='='): """Parses wmic command output @param fields: originally requested list of fileds @type fields: seq[basestring] @param wmi_class: a WMI class descriptor @type wmi_class: namedtuple class enhanced with parse and get_type_by_name methods. See build_wmi_class_descriptor for details @param output: a command execution output result @type output: basestring @param separator: key-value separator substring @type separator: basestring @return: iterable of dictionaries @rtype: tuple[dict] """ sep_pattern = re.compile('\s*%s\s*' % separator) split_by_sep = fptools.comp(sep_pattern.split, unicode.strip) lines = ifilter(identity, output.strip().splitlines()) grouped = [] _kwargs = {} for key, value in imap(split_by_sep, lines): if key in _kwargs: grouped.append(_kwargs) _kwargs = {} _kwargs[key] = value grouped.append(_kwargs) result = [parse_item(fields, wmi_class, item) for item in grouped] return tuple(result)
def report(host_id, mgr_version, server_configs, vm_configs): '@types: str, str, list[ShowServerCmd.Config], list[ShowVmCmd.Config] -> list[osh]' # take manager from one of discovered server configs # if present build in one way otherwise in other get_manager_uuid = ovm_cli.ShowServerCmd.Config.manager_uuid.fget config = findFirst(get_manager_uuid, server_configs) mgr_osh, oshs = _report_manager(config.manager_uuid, mgr_version, host_id) get_server_id = comp(first, ShowVmCmd.Config.server.fget) vm_configs_by_server_id = groupby(get_server_id, vm_configs) for server_config in server_configs: # report server pool p_osh = report_server_pool(server_config.server_pool, mgr_osh) oshs.append(p_osh) # report OVM server ips = (server_config.server.ip_address, ) server_name = server_config.server.name host_osh, h_oshs = report_server_node(server_name, ips, p_osh) oshs.extend(h_oshs) # report hypervisor hypervisor_osh = report_hypervisor(server_config, host_osh) oshs.append(hypervisor_osh) # report domain config oshs.append(report_server_domain_config(server_config, host_osh)) server_id = server_config.server.id # report VMs vms = vm_configs_by_server_id.get(server_id, ()) for vm_config in vms: vm_host_osh, _oshs = report_vm_node(vm_config, hypervisor_osh) if _oshs: oshs.extend(_oshs) oshs.append(report_vm_domain_config(vm_config, vm_host_osh)) return filter(None, oshs)
def remote(self, remote_n_port_id='all'): '''Create command with `fcmsutil <device_filename> remote <remote_n_port_id>` cmdline and handler returning FcmsutilRemoteOptionDescriptor colelction''' parse = parse_remote_option if remote_n_port_id == 'all': parse = parse_remote_all_option handler = comp(parse, self.get_default_handler()) return Cmd(cmdline='%s get remote %s' % (self.cmdline, remote_n_port_id), handler=handler)
def report(host_id, mgr_version, server_configs, vm_configs): '@types: str, str, list[ShowServerCmd.Config], list[ShowVmCmd.Config] -> list[osh]' # take manager from one of discovered server configs # if present build in one way otherwise in other get_manager_uuid = ovm_cli.ShowServerCmd.Config.manager_uuid.fget config = findFirst(get_manager_uuid, server_configs) mgr_osh, oshs = _report_manager(config.manager_uuid, mgr_version, host_id) get_server_id = comp(first, ShowVmCmd.Config.server.fget) vm_configs_by_server_id = groupby(get_server_id, vm_configs) for server_config in server_configs: # report server pool p_osh = report_server_pool(server_config.server_pool, mgr_osh) oshs.append(p_osh) # report OVM server ips = (server_config.server.ip_address,) server_name = server_config.server.name host_osh, h_oshs = report_server_node(server_name, ips, p_osh) oshs.extend(h_oshs) # report hypervisor hypervisor_osh = report_hypervisor(server_config, host_osh) oshs.append(hypervisor_osh) # report domain config oshs.append(report_server_domain_config(server_config, host_osh)) server_id = server_config.server.id # report VMs vms = vm_configs_by_server_id.get(server_id, ()) for vm_config in vms: vm_host_osh, _oshs = report_vm_node(vm_config, hypervisor_osh) if _oshs: oshs.extend(_oshs) oshs.append(report_vm_domain_config(vm_config, vm_host_osh)) return filter(None, oshs)
def get_remote_databases(executor, shell_interpreter, instance_name, db2_home_path=None): r"""@types: command.CmdExecutor, shell_interpreter.Interpreter, unicode, file_topology.Path -> list(db2_model.Database) @raise ExecuteError: on DB2INSTANCE variable setting failure @raise command.ExecuteException: on db2 cmdb execution failure """ db2cmd = __get_configured_db2_cmd(shell_interpreter, instance_name, db2_home_path) list_db_directory = db2cmd.list_db_directory() dcs_entries = fptools.safeFunc(executor)(db2cmd.list_dcs_directory()) or () lt_names = ((e.local_dbname, e.target_dbname) for e in dcs_entries) is_remote = fptools.comp(partial(operator.eq, DatabaseEntryTypes.REMOTE), Db2.DatabaseEntry.entry_type.fget) remote_db_entries = list_db_directory | executor remote_db_entries = ifilter(is_remote, remote_db_entries) resolve_db_name = partial(_resolve_remotedb_name, dict(lt_names)) remote_db_entries = imap(resolve_db_name, remote_db_entries) get_nodename = Db2.DatabaseEntry.node_name.fget dbs_by_nodename = fptools.groupby(get_nodename, remote_db_entries) get_dbname = Db2.DatabaseEntry.name.fget node_map_pairs = [ (node, fptools.groupby(get_dbname, db_entries)) for node, db_entries in dbs_by_nodename.iteritems() ] return _parse_remote_databases(node_map_pairs)
def f(self): '''Creates command with `ioscan -f` cmdline and handler returning fOptionDescriptor objects''' f_option_handler = FOptionHandlerWrapper(parse_f_option_row) handler = comp(f_option_handler, skip_f_option_header, self.handler) return Cmd('%s -f' % self.cmdline, f_option_handler=f_option_handler, handler=handler)
def get_remote_databases(executor, shell_interpreter, instance_name, db2_home_path=None): r'''@types: command.CmdExecutor, shell_interpreter.Interpreter, unicode, file_topology.Path -> list(db2_model.Database) @raise ExecuteError: on DB2INSTANCE variable setting failure @raise command.ExecuteException: on db2 cmdb execution failure ''' db2cmd = __get_configured_db2_cmd(shell_interpreter, instance_name, db2_home_path) list_db_directory = db2cmd.list_db_directory() dcs_entries = fptools.safeFunc(executor)(db2cmd.list_dcs_directory()) or () lt_names = ((e.local_dbname, e.target_dbname) for e in dcs_entries) is_remote = fptools.comp(partial(operator.eq, DatabaseEntryTypes.REMOTE), Db2.DatabaseEntry.entry_type.fget) remote_db_entries = list_db_directory | executor remote_db_entries = ifilter(is_remote, remote_db_entries) resolve_db_name = partial(_resolve_remotedb_name, dict(lt_names)) remote_db_entries = imap(resolve_db_name, remote_db_entries) get_nodename = Db2.DatabaseEntry.node_name.fget dbs_by_nodename = fptools.groupby(get_nodename, remote_db_entries) get_dbname = Db2.DatabaseEntry.name.fget node_map_pairs = [(node, fptools.groupby(get_dbname, db_entries)) for node, db_entries in dbs_by_nodename.iteritems()] return _parse_remote_databases(node_map_pairs)
def DiscoveryMain(framework, creds_manager, cred_id): '@types: RichFramework, CredsManager, str -> list[osh]' config = (ovm_flow.DiscoveryConfigBuilder(framework) # parameters .bool_params(reportStoppedVMs=False) .int_params(commandExecutionDurationInMs=2000) # destination data .dest_data_params_as_int(protocol_port=None) .dest_data_params_as_str(hostId=None)).build() attr_name = Protocol.PROTOCOL_ATTRIBUTE_PORT port = int(config.protocol_port or creds_manager.get_attribute(cred_id, attr_name)) host_id = config.hostId oshs = [] warnings = [] with closing(_createSshClient(framework, cred_id, port)) as client: execute = _get_initialized_execute_fn( client, config.commandExecutionDurationInMs) server_configs = _discover_servers(execute) vm_configs, msgs_ = _discover_vms(execute) warnings.extend(msgs_) mgr_version = ovm_cli.get_version(client) logger.info("Report topology") if not config.reportStoppedVMs: is_running = lambda c: ovm_cli.is_vm_running(c.vm.status) running, stopped = fptools.partition(is_running, vm_configs) get_vm = ovm_cli.ShowVmCmd.Config.vm.fget info_on_stopped = '\n'.join(imap(comp(str, get_vm), stopped)) logger.info("Stopped VMs are not reported: %s" % info_on_stopped) vm_configs = running oshs.extend(report(host_id, mgr_version, server_configs, vm_configs)) return oshs, warnings
def __init__(self, cmdline, handler=None): r'@types: str, ResultHandler' if not handler: if hasattr(self, 'handler'): handler = comp(self.handler, self.get_default_handler()) else: handler = self.get_default_handler() command.Cmd.__init__(self, cmdline, handler=handler)
def _discoverServers(solman, hostnameToAddress, sysPairsBySysName, sendVector, reportError, resolveIps): ''' Discover SAP instances related to already discovered systems @type hostnameToAddress: dict[str, sap.Address] @type sysPairsBySysName: dict[str, tuple[System, osh]] ''' try: # get servers by one of the specified queries queries = (GetServersWithNotActiveFlag(), GetServers()) queryExecutor = TableQueryExecutor(solman) result = imap(Sfn(queryExecutor.executeQuery), queries) servers = findFirst(truth, result) or () # group servers by system name pairsBySysName = groupby(GetServers.Server.systemName.fget, servers) inDiscoveredSystems = comp(sysPairsBySysName.get, first) pairs = ifilter(inDiscoveredSystems, pairsBySysName.iteritems()) resolveIps = comp(resolveIps, GetServers.Server.hostname.fget) for sysName, servers in pairs: logger.info("Found %s servers for %s system" % (len(servers), sysName)) # collect parsed names for each server parseServerName = comp(GetServers.parseServerName, GetServers.Server.name.fget) parsedServerNames = imap(parseServerName, servers) # resolve IPs for each server ips = imap(resolveIps, servers) # get information for each server where name and IPs are present infoSeq = ifilter(all, izip(servers, parsedServerNames, ips)) # not interested in server nodes - only instances infoSeq = ifilterfalse(isServerNode, infoSeq) # report each server system, systemOsh = sysPairsBySysName.get(sysName) reportServer = F(_reportServer, fptools._, fptools._, fptools._, system, systemOsh) vector = ObjectStateHolderVector() each(vector.addAll, starmap(reportServer, infoSeq)) sendVector(vector) except (Exception, JException): msg = "Failed to discover servers" logger.warnException(msg) reportError(msg)
def handler(self, items): is_db2_home_dir = Fn(operator.eq, self.db2_home_path, fptools._) create_path = Fn(file_system.Path, fptools._, self.db2_home_path.path_tool) get_path_name = operator.attrgetter('DB2 Path Name') item = iteratortools.findFirst( comp(is_db2_home_dir, create_path, get_path_name), items) return item and RegistryPath(item.keyPath)
def parse_cpu(result): family, model, group = imap( result.get, ('Processor Family', 'Processor Model', 'CPU Compatibility Group')) count = opt_int(result.get('Processors')) speed_in_gz = opt_float(result.get('Processor Speed (GHz)')) cache_levels = ('L%s Cache Size' % i for i in xrange(1, 4)) cache_levels = tuple(map(comp(opt_float, result.get), cache_levels)) return ShowServerCmd.Cpu(family, model, group, speed_in_gz, cache_levels, count)
def report_node_with_ips(ips, host_osh): ''' @types: list[str], osh[node] -> osh[node], list[osh] ''' ips = ifilter(ip_addr.isValidIpAddress, ips) ip_oshs = map(comp(modeling.createIpOSH, ip_addr.IPAddress), ips) oshs = ovm_node.report_node_and_ips(host_osh, ip_oshs) oshs.append(host_osh) oshs.extend(ip_oshs) return host_osh, oshs
def parse_list(output, entryname, attr_handler_fn): entry_pattern = re.compile('%s \d+ entry:' % entryname) sep_pattern = re.compile('\s*=\s*') split_by_sep = fptools.comp(sep_pattern.split, unicode.strip) for entry_info in entry_pattern.split(output)[1:]: db_entry_lines = ifilter(identity, entry_info.strip().splitlines()) attrs = dict(imap(split_by_sep, db_entry_lines)) parsed_item = attr_handler_fn(attrs, entry_info) if parsed_item: yield parsed_item
def get_vendor(self, vmhba, executor): handler = comp(*reversed((command.cmdlet.raiseOnNonZeroReturnCode, command.cmdlet.raiseWhenOutputIsNone, command.cmdlet.stripOutput, _parse_vmkchdev_l))) lspci = command.UnixBaseCmd("vmkchdev -l | grep %s" % vmhba, handler=handler) result = executor.process(lspci) result = result.handler(result) return vendors.find_name_by_id_in_hex(result)
def parse_list(output, entryname, attr_handler_fn): entry_pattern = re.compile("%s \d+ entry:" % entryname) sep_pattern = re.compile("\s*=\s*") split_by_sep = fptools.comp(sep_pattern.split, unicode.strip) for entry_info in entry_pattern.split(output)[1:]: db_entry_lines = ifilter(identity, entry_info.strip().splitlines()) attrs = dict(imap(split_by_sep, db_entry_lines)) parsed_item = attr_handler_fn(attrs, entry_info) if parsed_item: yield parsed_item
def handler(self, items): is_db2_home_dir = Fn(operator.eq, self.db2_home_path, fptools._) create_path = Fn(file_system.Path, fptools._, self.db2_home_path.path_tool) get_path_name = operator.attrgetter('DB2 Path Name') item = iteratortools.findFirst(comp(is_db2_home_dir, create_path, get_path_name), items) return item and RegistryPath(item.keyPath)
def p(self, wwn): ''' @param wwn: port wwn value to get remote port list for @type wwn: basestring @return: command to use for getting remote port list descriptors by passed port wwn value. Returns list of POptionDescriptors instances after handler processing @rtype: command.Cmd -> list[RemotePort.POptionDescriptors] ''' return self._with_option('-p %s' % wwn, comp(self.POptionDescriptor.parse_from_dicts, self.handler))
def parse_cpu(result): family, model, group = imap(result.get, ( 'Processor Family', 'Processor Model', 'CPU Compatibility Group')) count = opt_int(result.get('Processors')) speed_in_gz = opt_float(result.get('Processor Speed (GHz)')) cache_levels = ('L%s Cache Size' % i for i in xrange(1, 4)) cache_levels = tuple(map(comp(opt_float, result.get), cache_levels)) return ShowServerCmd.Cpu(family, model, group, speed_in_gz, cache_levels, count)
def get_fc_hba_descriptors(vmhbaname, executor): exec_ = command.get_exec_fn(executor) vmkmgmt_keyval_cls = vmkmgmt_keyval.find(executor) vmkmgmt_keyval_impl = vmkmgmt_keyval_cls() instances = exec_(vmkmgmt_keyval_impl.dumpInstances) fn = comp(methodcaller('startswith', 'qlnativefc'), methodcaller('lower')) for instance in ifilter(fn, instances): key_descriptors = exec_(vmkmgmt_keyval_impl.instance(instance).list) for descriptor in key_descriptors: if vmhbaname in descriptor.value: return _parse(vmhbaname, descriptor)
def process(self, namespace): '''Builds esxcli command @param namespace: esxcli namespace to execute @type namespace: esxcle.Namespace @return: esxcli command @rtype: command.Cmd ''' handler = comp(namespace.handler, self.handler) cmdline = ' '.join((self.cmdline, namespace.cmdline)) return command.UnixBaseCmd(cmdline, handler=handler)
def p(self, wwn): ''' @param wwn: port wwn value to get remote port list for @type wwn: basestring @return: command to use for getting remote port list descriptors by passed port wwn value. Returns list of POptionDescriptors instances after handler processing @rtype: command.Cmd -> list[RemotePort.POptionDescriptors] ''' return self._with_option( '-p %s' % wwn, comp(self.POptionDescriptor.parse_from_dicts, self.handler))
def _parse_embedded_object(type_, object_value): m = re.match('instance of %s.+?{(.+)};' % type_, object_value.strip(), re.DOTALL) key_value_pairs = m.group(1).strip() sep_pattern = re.compile('\s*=\s*') split_by_sep = fptools.comp(sep_pattern.split, methodcaller('strip')) lines = ifilter(identity, key_value_pairs.splitlines()) res = {} for key, value in imap(split_by_sep, lines): #skip semicolon at the end value = value[:-1] res[key] = value return res
def build(self, port): if port is None: raise ValueError("port is None") interfaceOsh = modeling.createInterfaceOSH(port.getMac(), name=port.getName()) if interfaceOsh: _roleMethodFn = fptools.comp(self.roleMethods.get, lambda r: r.getId()) roleMethods = itertools.ifilter(None, itertools.imap(_roleMethodFn, port.iterRoles())) for method in roleMethods: method(interfaceOsh) return interfaceOsh
def create_wmic_command(self, wmicmd): '''Creates wmic command with a WMI query built basing on passed wmicmd @param wmicmd: a WMI command to create wmic command for @type wmicmd: wmi_base_command.Cmd @return: new command object with wmic cmdline @rtype: command.Cmd ''' handler = comp(wmicmd.handler, partial(parse_items, wmicmd.fields, wmicmd.WMI_CLASS), raise_when_empty, self.handler) return command.Cmd(self.get_cmdline(wmicmd), handler)
def discoverServers(client, ips_set, ignoreNodesWithoutIP, allowDnsLookup): ''' Discover host resources @types: Client, set[IPAddress], bool, bool -> generator @return: generator of tuples witch such elements host osh seq[osh] - built IPs seq[osh] - built CPUs list[tuple[Port, osh]] list[tuple[HostHba, osh]] list[tuple[LogicalVolume, osh]] ''' try: hosts = _query_hosts(client) name_to_host = zip(imap(_get_host_name, hosts), hosts) has_hostname = comp(any, first) name_to_host = _drop("Hosts without hostname", has_hostname, name_to_host) for (hostName, hostDnsName), host in name_to_host: logger.info("Discover (%s:%s) host topology" % (host.name, host.ip)) ips = _discover_host_ips(host, hostName, hostDnsName, ips_set, allowDnsLookup) if ips: each(ips_set.add, ips) host = host._replace(ip=first(ips)) elif ignoreNodesWithoutIP: logger.debug("(%s: %s) is ignored due to missing " "or duplicated IP" % (host.id, host.name)) continue hostOSH = _build_host(host, hostName) ipOshs = chain(*[_report_ips(hostOSH, ip, hostDnsName) for ip in ips]) cpuOshs = _report_cpus(host, hostOSH) ports = _query_ports(partial(_query_host_ports, client, host.id)) portOshs = (_report_port(hostOSH, port) for port in ports) port_2_osh = zip(ports, portOshs) host_hbas = _query_host_hbas(client, host.id) hbaOshs = (_report_host_hba(hostOSH, hba) for hba in host_hbas) hba_2_osh = zip(host_hbas, hbaOshs) volumes = _query_host_logical_volumes(client, host.id) volumes = ifilter(LogicalVolume.name.fget, volumes) volumeOshs = (_report_logical_volume(hostOSH, v) for v in volumes) volume_2_osh = zip(volumes, volumeOshs) yield hostOSH, ipOshs, cpuOshs, port_2_osh, hba_2_osh, volume_2_osh except: excInfo = logger.prepareJythonStackTrace('') logger.warn('[' + SCRIPT_NAME + ':discoverServers] Exception: <%s>' % excInfo)
def parse_remote_all_option(lines): '''parses `fcmsutil <device_filename> remote all` command output returning the descriptor object @param lines: output of `fcmsutil <device_filename> remote all` command splited by lines @type lines: list of str or unicode @return: descriptor of fcmsutil command output or None on parse failure @rtype: list[RemoteOptionDescriptor] ''' result = [] if len(lines) > 1: separator = '=' sep_pattern = re.compile('\s*%s\s*' % separator) split_by_sep = fptools.comp(sep_pattern.split, methodcaller('strip')) lines = ifilter(identity, lines) grouped = [] _kwargs = {} for keyval in imap(split_by_sep, lines): if len(keyval) == 2: key, value = keyval if key in _kwargs: grouped.append(_kwargs) _kwargs = {} _kwargs[key] = value grouped.append(_kwargs) for item in grouped: target_n_port_id = item.get('Target N_Port_id is') target_loop_id = item.get('Target Loop_id is') target_state = item.get('Target state') symbolic_port_name = item.get('Symbolic Port Name') symbolic_node_name = item.get('Symbolic Node Name') port_type = item.get('Port Type') fcp_2_support = item.get('FCP-2 Support') target_port_wwn = item.get('Target Port World Wide Name') target_node_wwn = item.get('Target Node World Wide Name') result.append(RemoteOptionDescriptor(target_n_port_id, target_loop_id, target_state, symbolic_port_name, symbolic_node_name, port_type, fcp_2_support, target_port_wwn, target_node_wwn)) return tuple(result)
class ExecutorCmdlet(command.Cmdlet): r'''An executor for WMI commands''' def __init__(self, client, **kwargs): r''' @param client: WMI client with executeQuery and setNamespace methods returning ResultSet with asTable method available @type client: wmi.ClientWrapper ''' assert client self.client = client def __call__(self, **kwargs): return self.__class__.__init__(self, self.client, **kwargs) def get_cmdline(self, wmi_cmd): r''' @param wmi_cmd: a command object to build cmdline for @type wmi_cmd: wmi_base_command.Cmd @return: commandline of the target command @rtype: basestring ''' wmi_clsname = wmi_cmd.get_wmi_class_name() return 'SELECT %s FROM %s' % (', '.join(wmi_cmd.fields), wmi_clsname) def process(self, wmi_cmd): r''' @param wmi_cmd: a WMI command to execute @type wmi_cmd: wmi_base_command.Cmd @return: command execution result object @rtype: wmi.Result ''' cmdline = self.get_cmdline(wmi_cmd) self.client.setNamespace(wmi_cmd.NAMESPACE) reslutset = None exception = None try: reslutset = self.client.executeQuery(cmdline) except JException, e: exception = e parse_items_ = partial(parse_items, wmi_cmd) return Result( reslutset, comp(build_default_handler(wmi_cmd.WMI_CLASS, Parser()), raise_on_empty_items, parse_items_, operator.attrgetter('result_set'), raise_on_none_resultset, raise_on_error), exception)
def _discoverInstanceDetails(client, baseTopology): r'@types: BaseSapJmxClient, str, System, osh, osh -> oshv' system, hostname, clusterOSH, systemOsh = baseTopology inst, servers = ServerProcessQuery().getSystemDetails(client) if not inst.hostname and hostname: inst = sap.Instance.replaceHostname(inst, hostname) instanceReporter = sap_jee.InstanceReporter(sap_jee.InstanceBuilder()) # report host by resolved IPs hostname = inst.hostname if not hostname: logger.warn("Failed to determine hostname for %s" % inst) return ObjectStateHolderVector() dnsResolver = netutils.JavaDnsResolver() vector = ObjectStateHolderVector() try: ips = dnsResolver.resolveIpsByHostname(hostname) except netutils.ResolveException: logger.warn("Failed to resolve hostname of %s" % inst) else: hostReporter = sap.HostReporter(sap.HostBuilder()) hostOSH, vector = hostReporter.reportHostWithIps(*ips) # report instance pdo = sap_jee.InstanceBuilder.InstancePdo(inst, system) instOsh = instanceReporter.reportInstancePdo(pdo, hostOSH) vector.add(instOsh) #report sap system systemOsh.setStringAttribute('data_note', 'This SAP System link to ' + hostOSH.getAttributeValue('host_key')) vector.add(systemOsh) # report j2ee_cluster -membership-> sap_app_server linkReporter = sap.LinkReporter() vector.add(linkReporter.reportMembership(clusterOSH, instOsh)) vector.add(linkReporter.reportMembership(systemOsh, instOsh)) # report server processes oshs = [_reportServerProcess(s, inst, instOsh) for s in servers] each(vector.add, oshs) # discover applications serverToOshs = filter(comp(_isWorkerProcess, first), zip(servers, oshs)) for server, osh in serverToOshs: id_ = server.id appNameToOsh = Sf(discoverEjbApplications)(client, id_, osh, clusterOSH, vector) Sf(buildWebApplications)(client, id_, osh, clusterOSH, appNameToOsh, vector) return vector
def get_local_databases(executor, shell_interpreter, instance_name, db2_home_path=None, db2cmdline=None): r"""@types: command.CmdExecutor, shell_interpreter.Interpreter, unicode, file_topology.Path -> list(db2_model.Database) @raise ExecuteError: on DB2INSTANCE variable setting failure @raise command.ExecuteException: on db2 cmdb execution failure """ db2cmd = __get_configured_db2_cmd(shell_interpreter, instance_name, db2_home_path) get_type = Db2.DatabaseEntry.entry_type.fget is_local = fptools.comp(DatabaseEntryTypes.is_local, get_type) if db2cmdline: local_db_entries = filter(is_local, Db2(db2cmdline).file_list_db_directory() | executor) else: local_db_entries = filter(is_local, db2cmd.list_db_directory() | executor) db_by_name = fptools.groupby(Db2.DatabaseEntry.name.fget, local_db_entries) return _parse_databases(db_by_name.iteritems())
def reportPartition(self, partition, container_osh, node_osh=None, pg_oshs=None): r'@types: db2.topology.BaseBuilder.PartitionPdo, ObjectStateHolder, ObjectStateHolder, [ObjectStateHolder(db2_partition_group)] -> ObjectStateHolderVector' vector = ObjectStateHolderVector() osh = self._builder.buildPartition(partition) osh.setContainer(container_osh) vector.add(osh) if node_osh: vector.add(self._link_reporter.report_containment(node_osh, osh)) if pg_oshs: report_membership = self._link_reporter.report_membership report_membership = partiallyApply(report_membership, fptools._, osh) report_membership = comp(vector.add, report_membership) map(report_membership, pg_oshs) return osh, vector
def build(self, port): if port is None: raise ValueError("port is None") interfaceOsh = modeling.createInterfaceOSH(port.getMac(), name=port.getName()) if interfaceOsh: _roleMethodFn = fptools.comp(self.roleMethods.get, lambda r: r.getId()) roleMethods = itertools.ifilter( None, itertools.imap(_roleMethodFn, port.iterRoles())) for method in roleMethods: method(interfaceOsh) return interfaceOsh
def parse_vpd_option(lines): '''parses `fcmsutil <device_filename> vpd` command output returning the descriptor object @param lines: output of `fcmsutil <device_filename> vpd` command splited by lines @type lines: list of str or unicode @return: descriptor of fcmsutil command output or None on parse failure @rtype: FcmsutilVpdOptionDescriptor ''' lines = lines[3:] separator = '\:' sep_pattern = re.compile('\s*%s\s*' % separator) split_by_sep = fptools.comp(second, sep_pattern.split, fptools.methodcaller('strip')) lines = ifilter(identity, lines) values = map(split_by_sep, lines) if len(values) == 13: return FcmsutilVpdOptionDescriptor(*values)
def parse_remote_all_option(lines): '''parses `fcmsutil <device_filename> remote all` command output returning the descriptor object @param lines: output of `fcmsutil <device_filename> remote all` command splited by lines @type lines: list of str or unicode @return: descriptor of fcmsutil command output or None on parse failure @rtype: list[RemoteOptionDescriptor] ''' result = [] if len(lines) > 1: separator = '=' sep_pattern = re.compile('\s*%s\s*' % separator) split_by_sep = fptools.comp(sep_pattern.split, methodcaller('strip')) lines = ifilter(identity, lines) grouped = [] _kwargs = {} for keyval in imap(split_by_sep, lines): if len(keyval) == 2: key, value = keyval if key in _kwargs: grouped.append(_kwargs) _kwargs = {} _kwargs[key] = value grouped.append(_kwargs) for item in grouped: target_n_port_id = item.get('Target N_Port_id is') target_loop_id = item.get('Target Loop_id is') target_state = item.get('Target state') symbolic_port_name = item.get('Symbolic Port Name') symbolic_node_name = item.get('Symbolic Node Name') port_type = item.get('Port Type') fcp_2_support = item.get('FCP-2 Support') target_port_wwn = item.get('Target Port World Wide Name') target_node_wwn = item.get('Target Node World Wide Name') result.append( RemoteOptionDescriptor(target_n_port_id, target_loop_id, target_state, symbolic_port_name, symbolic_node_name, port_type, fcp_2_support, target_port_wwn, target_node_wwn)) return tuple(result)
def __init__(self, cmdline=None, device_filename=None, bin_path='fcmsutil', handler=None): ''' @param cmdline: commandline to use for this command @type cmdline: str or unicode @param device_filename: the Fibre Channel device special file associated with the Fibre Channel HBA port. The device file has the format /dev/FC_driverX, where X is the instance number of the Fibre Channel HBA port, as reported by the ioscan output @type device_filename: str or unicode @param bin_path: path to fcmsutil binary @type bin_path: str ot unicode @param handler: handler to use for current command @type handler: callable[command.Result] -> ?. The default handler returns FcmsutilDescriptor object ''' if not cmdline and not device_filename: raise ValueError('Neither cmdline nor device_filename are provided') cmdline = cmdline or ' '.join((bin_path, device_filename)) handler = handler or comp(parse_fcmsutil, self.get_default_handler()) command.BaseCmd.__init__(self, cmdline, handler=handler)
def _discoverRfcDestinations(sapUtils, systemOsh, config): r'@types: SapUtils, osh, flow.DiscoveryConfigBuilder -> oshv' if not config.discoverRFCConnections: return ObjectStateHolderVector() logger.info('Discover RFC connections') getRfcCmd = sap_abap_discoverer.GetRfcDestinationsRfcCommand() connections = Sfn(getRfcCmd.getAllRfcConnections)(sapUtils) or () logger.info("Found %s possible RFC connections" % len(connections)) connections = filter(comp(sap_abap_discoverer.isEnglishVersion, third), connections) logger.info("Found %s RFC connections with EN language" % len(connections)) connByName = applyMapping(first, connections) destinations = getRfcCmd.getAllRfcDestinations(sapUtils) logger.info("Found %s RFC destinations" % len(destinations)) # get destinations with valid host destinations = [d for d in destinations if _isDestFull(d)] logger.info("Found %s destinations with host available" % len(destinations)) destinationsByHost = groupby(lambda d: d.targetHost, destinations) ips = map(Sfn(_resolve), destinationsByHost.iterkeys()) pairIpToDestinations = zip(ips, destinationsByHost.itervalues()) resolved, notResolved = partition(first, pairIpToDestinations) if notResolved: skippedDestsCount = sum([len(dests) for ip, dests in notResolved]) logger.debug("%s destinations skipped due to not resolved %s hosts" % (skippedDestsCount, len(notResolved))) vector = ObjectStateHolderVector() for ip, destinations in resolved: # TODO: # 1) query RFC connections (to get description) only for these # destinations as it will reduce amount of data fetched from system # One query for connections returns ~8K rows of data, while we are # interested in less than ~50 or even less # 2) another improvement query only records in English language countOfDests = len(destinations) host = first(destinations).targetHost reportDst = Sfn(_reportRfcDestination) logger.debug("%s destinations resolved for %s" % (countOfDests, host)) vectors = (reportDst(dst, ip, connByName, systemOsh) for dst in destinations) each(vector.addAll, ifilter(None, vectors)) return vector
def get_vendor_by_device_id(self, device_id, executor): '''Returns vendor name by device id @param device_id: id of device in form <domain>:<bus>:<slot> @type device_id: basestring @param executor: instance of a command executor @type executor: command.Executor @return: vendor name @rtype: basestring ''' handler = comp(*reversed((command.cmdlet.raiseOnNonZeroReturnCode, command.cmdlet.raiseWhenOutputIsNone, command.cmdlet.stripOutput, fptools.methodcaller('splitlines'), _parse_lspci))) lspci = command.UnixBaseCmd("lspci -v -m -n -s %s" % device_id, handler=handler) result = executor.process(lspci) return vendors.find_name_by_id_in_hex(result.get('vendor'))
def parse_fcmsutil(lines): '''parses `fcmsutil <device_filename>` command output returning the descriptor object @param lines: output of `fcmsutil <device_filename>` command splited by lines @type lines: list of str or unicode @return: descriptor of fcmsutil command output or None on parse failure @rtype: FcmsutilDescriptor ''' separator = '=' sep_pattern = re.compile('\s*%s\s*' % separator) split_by_sep = fptools.comp(second, sep_pattern.split, fptools.methodcaller('strip')) lines = ifilter(identity, lines) values = map(split_by_sep, lines) topology_index = 7 if len(values) > 7 and not values[topology_index] == 'PRIVATE_LOOP': values.insert(11, None) if len(values) == 26: return FcmsutilDescriptor(*values)
def __init__(self, cmdline=None, device_filename=None, bin_path='fcmsutil', handler=None): ''' @param cmdline: commandline to use for this command @type cmdline: str or unicode @param device_filename: the Fibre Channel device special file associated with the Fibre Channel HBA port. The device file has the format /dev/FC_driverX, where X is the instance number of the Fibre Channel HBA port, as reported by the ioscan output @type device_filename: str or unicode @param bin_path: path to fcmsutil binary @type bin_path: str ot unicode @param handler: handler to use for current command @type handler: callable[command.Result] -> ?. The default handler returns FcmsutilDescriptor object ''' if not cmdline and not device_filename: raise ValueError( 'Neither cmdline nor device_filename are provided') cmdline = cmdline or ' '.join((bin_path, device_filename)) handler = handler or comp(parse_fcmsutil, self.get_default_handler()) command.BaseCmd.__init__(self, cmdline, handler=handler)