def DiscoveryMain(Framework): logger.info('Start Phase 2 ... Apply Mapping transformation to Atrium CIs') userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\Atrium\\' inputFilesDirectory = File(userExtUcmdbDir + 'inter\\') inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results\\' directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName)-4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn('Intermediate XML not found or invalid. Perhaps no data was received from Atrium or an error occurred in the atrium_query script.') return ## Generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) integrationAPI = IntegrationAPI(ip, "atrium_map.py") integrationAPI.processDir(userExtUcmdbDir) logger.info('End Phase 2 ... Apply Mapping transformation to Atrium CIs')
def syncNmapPortConfigFile(agentPath): ''' Sync nmap port config with global probe's "port number to port name" mapping ''' logger.debug('synchronizing nmap port config file') portConfigFilename = agentPath + CollectorsParameters.getDiscoveryConfigFolder( ) + CollectorsParameters.FILE_SEPARATOR + 'portNumberToPortName.xml' mamservice = File(portConfigFilename) nmapservice = File(agentPath + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'nmap-services') if nmapservice.lastModified() > mamservice.lastModified(): return nmapFile = FileOutputStream(nmapservice) document = SAXBuilder(0).build(mamservice) # document = parse(portConfigFilename) ports = XmlWrapper(document.getRootElement().getChildren('portInfo')) for port in ports: if int(port.getAttributeValue("discover")): portNumber = port.getAttributeValue("portNumber") portName = port.getAttributeValue("portName") portProtocol = port.getAttributeValue("portProtocol") nmapFile.write("%s\t%s/%s\r\n" % (portName, portNumber, portProtocol)) nmapFile.close()
def DiscoveryMain(Framework): logger.info('Start Phase 2 ....Apply Mapping file to ARIS CIs') userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\ARIS\\' inputFilesDirectory = File(userExtUcmdbDir + 'inter\\') inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results\\' directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName)-4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn('Intermediate XML not found or invalid. Perhaps no data was received from ARIS or an error occurred in the Pull_from_ARIS script.') return ## Connect to the UCMDB Server, retrieve the results of the TQL ## and generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir) logger.info('End Phase 2 ....Apply Mapping file to ARIS CIs')
def DiscoveryMain(Framework): fileSeparator = File.separator # Destination Data userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + fileSeparator + 'TQLExport' + fileSeparator + 'hpsim' + fileSeparator inputFilesDirectory = File(userExtUcmdbDir + 'inter' + fileSeparator) inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results' + fileSeparator directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName)-4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn('Intermediate XML not found or invalid. Perhaps no data was received from SIM or an error occurred in the SIM_Discovery script.') return ## Connect to the UCMDB Server, retrieve the results of the TQL ## and generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir) info('End exportTQL_for_SIM_to_UCMDB.py')
def DiscoveryMain(Framework): logger.info('Start ', SCRIPT_NAME) logger.info('Start Phase 1 ... Query Remedy Atrium for data') # dryrunMode = Framework.getParameter('DryRunMode') # Get BMC Namespace bmcNamespace = Framework.getParameter('BMC_NameSpace') if bmcNamespace == None or bmcNamespace == "": bmcNamespace = "BMC.CORE" # Get chunk size - size of data in every query to Remedy/Atrium maxPerCall = Framework.getParameter('ChunkSize') if maxPerCall != None and maxPerCall.isnumeric(): maxPerCall = int(maxPerCall) else: maxPerCall = 500 # Get MAX CI size - size of data in every query to Remedy/Atrium maxCIs = Framework.getParameter('MaxCIs') if maxCIs != None and maxCIs.isnumeric(): maxCIs = int(maxCIs) else: maxCIs = 100000 ucmdbServerIp = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) # File and directory names userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\Atrium\\' intermediatesDir = userExtDir + 'inter\\' mapingFilesListFileName = userExtDir + 'tqls.txt' mappingFileNameList = checkDiscoveryResources(mapingFilesListFileName, userExtDir, Framework, intermediatesDir) if not mappingFileNameList: return None # GET ARS context - login information, etc. that is needed to make ARS connection context = getArsContext(Framework, ucmdbServerIp) if context != None: for mappingFileName in mappingFileNameList: (classList, linkList) = getMapping(userExtDir + 'data\\' + mappingFileName + '.xml', bmcNamespace, ucmdbServerIp) if (dryrunMode != None): dryrunMode = dryrunMode.lower() if dryrunMode == 'true': logger.info('[NOTE] UCMDB Integration is running in DryRun Mode, No query executed against ATRIUM.') debugPrint(4, '[' + SCRIPT_NAME + ':DiscoveryMain] Got classList: <%s>' % classList) debugPrint(4, '[' + SCRIPT_NAME + ':DiscoveryMain] Got linkList: <%s>' % linkList) return pullDataFromAtrium(context, classList, linkList, maxPerCall, maxCIs, intermediatesDir, mappingFileName) else: logger.error("Unable to create Remedy/Atrium login context. Check that username, password, server and port are defined correctly.") return None logger.info('End ', SCRIPT_NAME)
def testScript(): userExtUcmdbDir = 'E:\\data\\Desktop\\Pull_From_Remedy_backup\\' + 'TQLExport\\Atrium\\' inputFilesDirectory = File(userExtUcmdbDir + 'inter\\') inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results\\' directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName) - 4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn( 'Intermediate XML not found or invalid. Perhaps no data was received from Atrium or an error occurred in the atrium_query script.' ) return ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir)
def getWindowsShellFileVer(shell, path): # path - file name with full path for which we'll look Version Information # shell - NTCMD shell fileVerVBS = 'getfilever.vbs' localFile = (CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + fileVerVBS) remoteFile = shell.copyFileIfNeeded(localFile) if not remoteFile: logger.warn('Failed copying file ' + fileVerVBS) return None errString = 'No version information available.' resultBuffer = shell.execCmd('Cscript.exe /nologo ' + fileVerVBS + ' \"' + path + '\"') if resultBuffer.find(errString) != -1 or shell.getLastCmdReturnCode() != 0: logger.warn('Failed getting file version info for file %s' % path) return None fileVersion = re.search('\s*([\d., ]+).*', resultBuffer) if fileVersion: return fileVersion.group(1).strip() else: logger.warn('Failed getting file version info for file %s' % path) return None
def DiscoveryMain(Framework): try: logger.debug('Replicating toplogy from ServiceNow') connectionDataManager = FrameworkBasedConnectionDataManager(Framework) mappingFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), 'servicenow') mappingFileManager = OldMappingFileManager(mappingFileFolder) global chunkSize chunkSize = getStepSizeFromFramework(Framework) mappingFile = getMappingFileFromFramework(Framework) if mappingFile: return replicateTopologyUsingMappingFile( os.path.join(mappingFileFolder, mappingFile), connectionDataManager, mappingFileManager) else: return replicateTopologyFromServiceNow(connectionDataManager, mappingFileManager) except: Framework.reportError( 'Failed to pull data from ServiceNow. See RemoteProcess log on the Probe for details' ) logger.errorException('Failed to pull data from ServiceNow')
def buildFullPathToScenario(self, scenarioFileName): return ( CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + scenarioFileName )
def queryRegistry(self, client, regKey, valueName): if not (client and regKey and valueName): logger.warn('registry query is incomplete') return logger.debug('RegistryBasedPlugin.queryRegistry') ntcmdErrStr = 'Remote command returned 1(0x1)' queryStr = ' query "%s" /v "%s"' % (regKey, valueName) system32Link = client.createSystem32Link() or '' buffer = client.execCmd(system32Link + "reg.exe" + queryStr) if client.getLastCmdReturnCode() != 0 or buffer.find(ntcmdErrStr) != -1: localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'reg_mam.exe' remoteFile = client.copyFileIfNeeded(localFile) if not remoteFile: logger.warn('Failed copying reg_mam.exe to the destination') return buffer = client.execCmd(remoteFile + queryStr) if not buffer or client.getLastCmdReturnCode() != 0: logger.warn("Failed getting registry info.") return match = re.search(r'%s\s+%s\s+\w+\s+(.*)' % (regKey.replace('\\', '\\\\'), valueName), buffer, re.I) client.removeSystem32Link() if match: val = match.group(1) return val.strip() logger.warn('Cannot parse registry key')
def queryRegistryViaShell(shellUtils, key, filter=None): missingKeyError = 'The system was unable to find the specified registry key or value' filter = filter and " | find \"%s\"" % filter or '' query = "query %s /S%s" % (key, filter) cmd = ' '.join(['reg', query]) output, code = getCommandOutput(shellUtils, cmd) if output and re.search(missingKeyError, output): return if code != 0: output = None if not output: localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + CollectorsParameters.FILE_SEPARATOR + 'reg_mam.exe' remoteFile = shellUtils.copyFileIfNeeded(localFile) if not remoteFile: logger.warn("Failed copying re_mam.exe") return cmd = ' '.join([remoteFile, query]) output, code = getCommandOutput(shellUtils, cmd) if output and re.search(missingKeyError, output) or code != 0: output = None return output
def osh_createIpOsh(lparOsh, tcpStacks): ipstoexclude = ['127.0.0.1'] # tcpStacks [ip, network, mask, interface name, status, type, mac address]str_name = 'name' str_name = 'name' str_mac_address = 'mac_address' _vector = ObjectStateHolderVector() for mac, tcpentry in tcpStacks.items(): ipAddress = tcpentry[0].strip() if ipAddress not in ipstoexclude: ipOsh = modeling.createIpOSH(ipAddress) probeName = CollectorsParameters.getValue(CollectorsParameters.KEY_COLLECTORS_PROBE_NAME) if isNotNull(probeName): ipOsh.setAttribute('ip_probename', probeName) containedOsh = modeling.createLinkOSH('contained', lparOsh, ipOsh) _vector.add(lparOsh) _vector.add(ipOsh) _vector.add(containedOsh) # create interface ---------------------------------------------------- ifOsh = ObjectStateHolder('interface') interfacename = tcpentry[3].strip() ifOsh.setAttribute(str_name, interfacename) # default the mac address attribute to linkName and update later if MAC found ifOsh.setAttribute(str_mac_address, mac) # if MAC not found for set #linkName as key ifOsh.setContainer(lparOsh) _vector.add(ifOsh) if tcpStacks.has_key(mac): parentLinkOsh = modeling.createLinkOSH('containment', ifOsh, ipOsh) _vector.add(parentLinkOsh) return _vector
def queryRegistryViaShell(shellUtils, key, filter = None): missingKeyError = 'The system was unable to find the specified registry key or value' filter = filter and " | find \"%s\"" % filter or '' query = "query %s /S%s" % (key, filter) cmd = ' '.join(['reg', query]) output, code = getCommandOutput(shellUtils, cmd) if output and re.search(missingKeyError, output): return if code != 0: output = None if not output: localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'reg_mam.exe' remoteFile = shellUtils.copyFileIfNeeded(localFile) if not remoteFile: logger.warn("Failed copying re_mam.exe") return cmd = ' '.join([remoteFile, query]) output, code = getCommandOutput(shellUtils, cmd) if output and re.search(missingKeyError, output) or code != 0: output = None return output
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() logger.debug('Start nmap_osfingerprint.py') ip = Framework.getDestinationAttribute('ip_address') timeout = Framework.getParameter('nmap_host_timeout') if not str(timeout).isdigit(): msg = "Timeout parameter value must be a digit" logger.debug(msg) errormessages.resolveAndReport(msg, ClientsConsts.LOCAL_SHELL_PROTOCOL_NAME, Framework) return OSHVResult timeout = int(timeout) * 1000 scanKnownPortsOnly = Boolean.parseBoolean( Framework.getParameter('scan_known_ports_only')) portstoscan = Framework.getParameter('scan_these_ports_only') doServiceFingerprints = Boolean.parseBoolean( Framework.getParameter('Perform_Port_Fingerprints')) createApp = Boolean.parseBoolean( Framework.getParameter('Create_Application_CI')) discoverOsName = Boolean.parseBoolean( Framework.getParameter('discover_os_name')) nmapLocation = Framework.getParameter('nmap_location') #discover_UDP_Ports = int(Framework.getParameter('Discover_UDP_Ports')) discoverUdpPorts = 0 agent_root_dir = CollectorsParameters.BASE_PROBE_MGR_DIR agent_ext_dir = agent_root_dir + CollectorsParameters.getDiscoveryResourceFolder( ) + CollectorsParameters.FILE_SEPARATOR tmp_file_name = agent_ext_dir + string.replace( ip, '.', '_') + time.strftime("%H%M%S", time.gmtime( time.time())) + 'nmap.xml' syncNmapPortConfigFile(agent_root_dir) logger.debug('temp file for storing nmap results: ', tmp_file_name) try: client = Framework.createClient( ClientsConsts.LOCAL_SHELL_PROTOCOL_NAME) try: performNmapDiscover(client, ip, tmp_file_name, timeout, agent_ext_dir, scanKnownPortsOnly, portstoscan, doServiceFingerprints, discoverUdpPorts, nmapLocation) if os.path.exists(tmp_file_name): logger.debug('start processing the nmap results') processNmapResult(tmp_file_name, OSHVResult, discoverOsName, doServiceFingerprints, createApp, Framework) else: raise ValueError, 'Error nmap result file is missing: %s' % tmp_file_name finally: client.close() File(tmp_file_name).delete() except Exception, e: msg = str(e.getMessage()) logger.debug(msg) errormessages.resolveAndReport(msg, ClientsConsts.LOCAL_SHELL_PROTOCOL_NAME, Framework)
def discoverMemory(shell, myVec, hostOSH): ''' Discover physical memory by NTCMD Shell, oshVector, osh @command: meminfo ''' cmdMemInfo = 'meminfo' ntcmdErrStr = 'Remote command returned 1(0x1)' localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'meminfo.exe' remoteFile = shell.copyFileIfNeeded(localFile) if not remoteFile: logger.warn('Failed copying %s' % cmdMemInfo) return buffer = shell.execCmd(remoteFile)#V@@CMD_PERMISION ntcmd protocol execution logger.debug('Output of ', remoteFile, ': ', buffer) if buffer.find(ntcmdErrStr) != -1: logger.warn('Failed getting memory info') else: logger.debug('Got memory info - parsing...') buffer = buffer.strip() size = 0 try: matchSize = re.search('Total: (\d+) KB', buffer) if matchSize: size = int(matchSize.group(1)) memory.report(myVec, hostOSH, size) except: logger.errorException('Error in discoverMemory()')
def discovery(Framework, connectionManager=None): connectionDataManager = connectionManager try: logger.debug('Replicating topology from HP ucs') if not connectionDataManager: connectionDataManager = FrameworkBasedConnectionDataManager( Framework) if not connectionDataManager.validate(): return mappingFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), MAPPING_CONFIG_FOLDER) mappingFileManager = UCSMappingFileManager(mappingFileFolder) mappingFile = getMappingFileFromFramework(Framework) if mappingFile: return replicateTopologyUsingMappingFile( os.path.join(mappingFileFolder, mappingFile), connectionDataManager, mappingFileManager) else: Framework.reportError('No mapping file found.') logger.errorException("No mapping file found.") except Exception, e: Framework.reportError('%s' % e) logger.errorException('%s' % e)
def DiscoveryMain(Framework): connectionDataManager = None try: logger.debug('Replicating topology from HP OneView') connectionDataManager = FrameworkBasedConnectionDataManager(Framework) if not connectionDataManager.validate(): return mappingFileFolder = os.path.join(CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), HP_ONE_VIEW_CONFIG_FOLDER) mappingFileManager = OneviewMappingFileManager(mappingFileFolder) mappingFile = getMappingFileFromFramework(Framework) if mappingFile: return replicateTopologyUsingMappingFile(os.path.join(mappingFileFolder, mappingFile), connectionDataManager, mappingFileManager) else: Framework.reportError('No mapping file found.') logger.errorException("No mapping file found.") except: Framework.reportError('Failed to pull data from OneView.') logger.errorException('Failed to pull data from OneView.') finally: if connectionDataManager: connectionDataManager.closeClient()
def DiscoveryMain(Framework): connectionDataManager = None try: logger.debug('Replicating topology from HP OneView') connectionDataManager = FrameworkBasedConnectionDataManager(Framework) if not connectionDataManager.validate(): return mappingFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), HP_ONE_VIEW_CONFIG_FOLDER) mappingFileManager = OneviewMappingFileManager(mappingFileFolder) mappingFile = getMappingFileFromFramework(Framework) if mappingFile: return replicateTopologyUsingMappingFile( os.path.join(mappingFileFolder, mappingFile), connectionDataManager, mappingFileManager) else: Framework.reportError('No mapping file found.') logger.errorException("No mapping file found.") except: Framework.reportError('Failed to pull data from OneView.') logger.errorException('Failed to pull data from OneView.') finally: if connectionDataManager: connectionDataManager.closeClient()
def doQuery(self, queryStr): cmdRemoteAgent = self.DEFAULT_REG_TOOL + queryStr ntcmdErrStr = 'Remote command returned 1(0x1)' timeout = 180000 buffer = self.shell.execCmd(cmdRemoteAgent, timeout) # @@CMD_PERMISION ntcmd protocol execution logger.debug('Outputing ', cmdRemoteAgent, ': ...') reg_mamRc = self.shell.getLastCmdReturnCode() if (reg_mamRc != 0) or (buffer.find(ntcmdErrStr) != -1): logger.debug('reg ended unsuccessfully with return code:%d, error:%s' % (reg_mamRc, buffer)) logger.debug('Failed getting services info using reg.exe trying the reg_mam.exe') localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + self.REG_MAM_REG_TOOL remoteFile = self.shell.copyFileIfNeeded(localFile) cmdRemote = self.REG_MAM_REG_TOOL if not remoteFile: logger.warn('Failed copying %s' % cmdRemote) return cmdRemoteAgent = remoteFile + queryStr buffer = self.shell.execCmd(cmdRemoteAgent, timeout) # @@CMD_PERMISION ntcmd protocol execution regRc = self.shell.getLastCmdReturnCode() if (regRc != 0) or (buffer.find(ntcmdErrStr) != -1): errMessage = 'NTCMD: Failed getting services info.' errobj = errorobject.createError(errorcodes.FAILED_GETTING_INFORMATION, ['NTCMD', 'services info'], errMessage) logger.reportWarningObject(errobj) logger.debug('Failed getting services info, reg_mam.exe ended with %d, error:%s' % (regRc, buffer)) return return buffer
def createCMDBConnection(i__LocalShell): logger.info('************* START createCMDBConnection *************') l__HostName = CollectorsParameters.getValue( CollectorsParameters.KEY_SERVER_NAME) l__HTTPPort = int( CollectorsParameters.getValue( CollectorsParameters.KEY_SERVER_PORT_HTTP)) l__HTTPSPort = int( CollectorsParameters.getValue( CollectorsParameters.KEY_SERVER_PORT_HTTPS)) l__ProtocolParameters = ProtocolDictionaryManager.getProtocolParameters( 'genericprotocol', netutils.resolveIP(i__LocalShell, l__HostName)) l__UserName = '' for l__Protocol in l__ProtocolParameters: if l__Protocol.getProtocolAttribute( 'protocol_username') == C__INTEGRATION_USER: l__UserName = l__Protocol.getProtocolAttribute('protocol_username') l__UserPassword = l__Protocol.getProtocolAttribute( 'protocol_password') break if not l__UserName: logger.error('Error Username Protocol not initialized') return None #logger.debug('Accessing uCMDB = ',(l__HostName, l__HTTPPort, l__UserName)) # try http first try: logger.debug('Attempting HTTP connection') l__Provider = UcmdbServiceFactory.getServiceProvider( 'http', l__HostName, l__HTTPPort) except: logger.debug('HTTP connection failed, trying HTTPS') UcmdbServiceFactory.initSSL() l__Provider = UcmdbServiceFactory.getServiceProvider( 'https', l__HostName, l__HTTPSPort) l__Credentials = l__Provider.createCredentials(l__UserName, l__UserPassword) l__ClientContext = l__Provider.createClientContext("UD") o__UcmdbService = l__Provider.connect(l__Credentials, l__ClientContext) logger.info('************* END createCMDBConnection *************') return o__UcmdbService
def DiscoveryMain(Framework): logger.info('Start Phase 1 ... Pull from ARIS') # Set global framework global theFramework theFramework = Framework ## Make sure we have an input data file from ARIS ARISfileName = Framework.getParameter('ARIS_XML_file') or None ARISfile = File(ARISfileName) if not (ARISfile and ARISfile.exists() and ARISfile.canRead()): excInfo = ('ARIS XML input file is not specified or is invalid!') Framework.reportError(excInfo) logger.error(excInfo) return None ## Check that the language parameter is set - default to US English requestedLocaleID = Framework.getParameter('ARISLocaleId') or '&LocaleId.USen;' if not requestedLocaleID: logger.warn('ARIS LocaleID parameter is not set...defaulting to US English') requestedLocaleID = '&LocaleId.USen;' # File and directory names userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\ARIS\\' intermediatesDir = userExtDir + 'inter\\' mapingFilesListFileName = userExtDir + 'tqls.txt' mappingFileNameList = checkDiscoveryResources(mapingFilesListFileName, userExtDir, Framework, intermediatesDir) if not mappingFileNameList: return None ## Get attribute names from mapping file(s) ## This is a list of extended attributes to be retrieved from ARIS for mappingFileName in mappingFileNameList: (requestedSourceObjectTypeList, requestedSourceRelationshipTypeList) = getMapping(userExtDir + 'data\\' + mappingFileName + '.xml') if requestedSourceObjectTypeList and requestedSourceRelationshipTypeList: arisObjectMap = processARISXML(ARISfile, requestedSourceObjectTypeList, requestedSourceRelationshipTypeList, requestedLocaleID) intermediateXmlDoc = None if arisObjectMap: intermediateXmlDoc = buildIntermediateXML(arisObjectMap) intermediateXmlLocation = intermediatesDir + mappingFileName + '.xml' else: Framework.reportWarning('No CIs found in the ARIS XML file') if intermediateXmlDoc: try: xmlOutputter = XMLOutputter() xmlOutputter.output(intermediateXmlDoc, FileOutputStream(intermediateXmlLocation)) except: excInfo = logger.prepareJythonStackTrace('') Framework.reportError('Error writing intermediate file: <%s>' % intermediateXmlLocation) logger.warn('[' + SCRIPT_NAME + ':DiscoveryMain] Exception: <%s>' % excInfo) pass else: Framework.reportWarning('Error creating intermediate XML') else: logger.warn('[' + SCRIPT_NAME + ':DiscoveryMain] Unable to process mapping file: <%s>' % mappingFileName) Framework.reportError(' Unable to process mapping file: <%s>' % mappingFileName) logger.info('End Phase 1.... Pull from ARIS')
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() DebugMode = Framework.getParameter('DebugMode') userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + '\\' filePathDir = userExtDir + 'TQLExport\\ARIS\\results\\' directory = File(filePathDir) files = directory.listFiles() if files == None: logger.warn( 'Results XML not found. Perhaps no data was received from ARIS or an error occurred in the Pull_From_ARIS script.' ) return try: ## Start the work for file in files: if file != None or file != '': builder = SAXBuilder() doc = builder.build(file) # Process CIs # info( "Start processing CIs to update in the destination server..." ) allObjects = doc.getRootElement().getChild('data').getChild( 'objects').getChildren('Object') (objVector, ciDict) = processObjects(allObjects) OSHVResult.addAll(objVector) # Process Relations # info( "Start processing Relationships to update in the destination server..." ) allLinks = doc.getRootElement().getChild('data').getChild( 'links').getChildren('link') linkVector = processLinks(allLinks, ciDict) OSHVResult.addAll(linkVector) except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) info(concatenate('Failure: ():\n', stacktrace)) if (DebugMode != None): DebugMode = DebugMode.lower() if DebugMode == "true": mam_utils.info( '[NOTE] UCMDB Integration is running in DEBUG mode. No data will be pushed to the destination server.' ) print OSHVResult.toXmlString() return None else: #print OSHVResult.toXmlString() return OSHVResult
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() fileSeparator = File.separator DebugMode = Framework.getParameter('DebugMode') userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + fileSeparator filePathDir = userExtDir + 'TQLExport' + fileSeparator + 'hpsim' + fileSeparator + 'results' + fileSeparator directory = File(filePathDir) files = directory.listFiles() if files == None: logger.warn('Results XML not found. Perhaps no data was received from SIM or an error occurred in the SIM_Discovery script.') return ## Read ignored Ci types from integration configuration ignoredCiTypes = [] rawIgnoredCiTypes = Framework.getParameter('IgnoredCiTypes') tempIgnoredCiTypes = eval(rawIgnoredCiTypes) if tempIgnoredCiTypes is not None: for item in tempIgnoredCiTypes: item != 'None' and ignoredCiTypes.append(item) ## Identify UCMDB version ucmdbVersion = modeling.CmdbClassModel().version() try: ## Start the work for file in files: if file != None or file != '': builder = SAXBuilder() doc = builder.build(file) # Process CIs # info("Start processing CIs to update in the destination server...") allObjects = doc.getRootElement().getChild('data').getChild('objects').getChildren('Object') (objVector, ciDict) = processObjects(allObjects, ignoredCiTypes, ucmdbVersion) OSHVResult.addAll(objVector) # Process Relations # info("Start processing Relationships to update in the destination server...") allLinks = doc.getRootElement().getChild('data').getChild('links').getChildren('link') linkVector = processLinks(allLinks, ciDict) OSHVResult.addAll(linkVector) except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) info(concatenate('Failure: ():\n', stacktrace)) if (DebugMode != None): DebugMode = DebugMode.lower() if DebugMode == "true": mam_utils.info ('[NOTE] UCMDB Integration is running in DEBUG mode. No data will be pushed to the destination server.') print OSHVResult.toXmlString() return None else: #print OSHVResult.toXmlString() return OSHVResult
def osh_createIpOsh(iSeriesOsh, defaultIp): _vector = ObjectStateHolderVector() # Create IP OSH ------------------------------------ ipOsh = modeling.createIpOSH(defaultIp) ipOsh.setAttribute('ip_probename', CollectorsParameters.getValue(CollectorsParameters.KEY_COLLECTORS_PROBE_NAME)) _vector.add(ipOsh) _vector.add(iSeriesOsh) linkOsh = modeling.createLinkOSH('containment', iSeriesOsh, ipOsh) _vector.add(linkOsh) return _vector
def DiscoveryMain(Framework): logger.info('Start Phase 3 ... Push transformed data to UCDMB') OSHVResult = ObjectStateHolderVector() DebugMode = Framework.getParameter('DebugMode') DateParsePattern = Framework.getParameter('DateParsePattern') userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + '\\' if (DebugMode != None): DebugMode = DebugMode.lower() if DebugMode == "true": logger.info( '[NOTE] UCMDB Integration is running in DEBUG mode. No data will be pushed to the destination server.' ) return filePathDir = userExtDir + 'TQLExport\\Atrium\\results\\' directory = File(filePathDir) files = directory.listFiles() try: for file in files: if file != None or file != '': builder = SAXBuilder() doc = builder.build(file) # Process CIs # logger.info( "Start processing CIs to update in the destination server..." ) allObjects = doc.getRootElement().getChild('data').getChild( 'objects').getChildren('Object') (objVector, ciDict) = processObjects(allObjects, DateParsePattern) OSHVResult.addAll(objVector) # Process Relations # logger.info( "Start processing Relationships to update in the destination server..." ) allLinks = doc.getRootElement().getChild('data').getChild( 'links').getChildren('link') linkVector = processLinks(allLinks, ciDict) OSHVResult.addAll(linkVector) #print OSHVResult.toXmlString() except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) logger.info('Failure in processing data %s' % stacktrace) logger.info('Ending Push to UCMDB') logger.info('End Phase 3 ... Push transformed data to UCDMB') return OSHVResult
def enable_shell_proxy(self): localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + \ CollectorsParameters.FILE_SEPARATOR + 'pdadmin_proxy.bat' remote_file = self.shell.copyFileIfNeeded(localFile) if not remote_file: raise ValueError("Failed to set up pdadmin call proxy.") self.binary_name = 'pdadmin_proxy.bat' m = re.search('\$(.+)pdadmin_proxy.bat', remote_file) if not m: raise ValueError("Failed to set up pdadmin call proxy.") self.prefix = '%SystemRoot%' + m.group(1)
def syncNmapPortConfigFile(agentPath): ''' Sync nmap port config with global probe's "port number to port name" mapping ''' logger.debug('synchronizing nmap port config file') portConfigFilename = agentPath + CollectorsParameters.getDiscoveryConfigFolder() + CollectorsParameters.FILE_SEPARATOR + 'portNumberToPortName.xml' mamservice = File(portConfigFilename) nmapservice = File(agentPath + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'nmap-services') if nmapservice.lastModified() > mamservice.lastModified(): return nmapFile = FileOutputStream(nmapservice) document = SAXBuilder(0).build(mamservice) # document = parse(portConfigFilename) ports = XmlWrapper(document.getRootElement().getChildren('portInfo')) for port in ports: if int(port.getAttributeValue("discover")): portNumber = port.getAttributeValue("portNumber") portName = port.getAttributeValue("portName") portProtocol = port.getAttributeValue("portProtocol") nmapFile.write("%s\t%s/%s\r\n" % (portName, portNumber, portProtocol)) nmapFile.close()
def DiscoveryMain(Framework): logger.info("Start Phase 2 ....Apply Mapping file to ARIS CIs") userExtUcmdbDir = ( CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + "\\TQLExport\\ARIS\\" ) inputFilesDirectory = File(userExtUcmdbDir + "inter\\") inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + "results\\" directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if files != None: for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName) - 4 :].lower() == ".xml" and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn( "Intermediate XML not found or invalid. Perhaps no data was received from ARIS or an error occurred in the Pull_from_ARIS script." ) return ## Connect to the UCMDB Server, retrieve the results of the TQL ## and generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir) logger.info("End Phase 2 ....Apply Mapping file to ARIS CIs")
def getNatIPFromConfigurationFile(): """ Read IP or IP range from configuration file. @return: A list contains IPAddress objects and IPNetwork objects """ NATIPConfigurationFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder()) NATIPConfigurationFile = os.path.join(NATIPConfigurationFileFolder, 'NATIpAddress.xml') if not os.path.exists(NATIPConfigurationFile): logger.info("There is no NAT IP address defined.") return # Read tags from xml file builder = SAXBuilder() configDoc = builder.build(NATIPConfigurationFile) rootElement = configDoc.getRootElement() ipElements = rootElement.getChildren('Ip') ipRangeElements = rootElement.getChildren('IpRange') NAT_IPs = [] # Read IPAddress, add valid one to NAT_IPs list if ipElements: for ipElement in ipElements: ip = ipElement.getText() if ip_addr.isValidIpAddress(ip): ipObj = ip_addr.IPAddress(ip) NAT_IPs.append(ipObj) # Read IP Ranges, create IPNetwork and add to NAT_IPs list if ipRangeElements: for ipRangeElement in ipRangeElements: ip_range_raw = ipRangeElement.getText() ips = ip_range_raw.split('-') ip_start = ips[0] ip_end = ips[1] if ip_addr.isValidIpAddress(ip_start) and ip_addr.isValidIpAddress( ip_end): ip_start = ip_addr.IPAddress(ip_start) ip_end = ip_addr.IPAddress(ip_end) ips = ip_addr.summarize_address_range(ip_start, ip_end) logger.debug(ips) NAT_IPs.extend(ips) else: logger.warn( "IP Range should start and end with valid IP address") return NAT_IPs
def checkLock(Framework): probe = CollectorsParameters.getValue(CollectorsParameters.KEY_PROBE_MGR_IP) if (probe is None) or (len(str(probe)) == 0): logger.debug('Probe manager ip is not specified in the DiscoveryProbe.properties file, using probe ID') probe = CollectorsParameters.getValue(CollectorsParameters.KEY_COLLECTORS_PROBE_NAME) jobType = INVENTORY_JOB_TYPE jobId = Framework.getDiscoveryJobId() lockTime = System.currentTimeMillis() lockExpiration = System.currentTimeMillis() + LOCK_AGE_PERIOD_MILLISECONDS lock = Lock(probe, jobType, jobId, lockTime, lockExpiration) logger.debug('Checking remote lock with current lock:', str(lock.getLockInfo())) triggerid = Framework.getTriggerCIData('id') logger.debug('Checking lock for probe ', probe, ' and jobid ', jobId, ' and triggerid ', triggerid) client = Framework.getConnectedClient() options = getClientOptionsMap(client) lockOption = options.get(ScannerNodeLock) if (lockOption is None) or (len(lockOption.strip()) == 0): logger.debug('Lock on scanner node for probe "' + lock.probe + '" and job "' + lock.jobId + '" is not exists') return 0 remoteLock = extractLock(lockOption) logger.debug('Found remote lock:', str(remoteLock.getLockInfo())) if remoteLock.isLockExpired(): logger.debug('Lock on remote node is already expired, renewing lock on the node') options = HashMap() options.put(ScannerNodeLock, lock.getLockInfo()) client.setOptionsMap(options) elif not lock.isSameLock(remoteLock): logger.debug( 'Lock on remote node is owned by another probe/job (' + remoteLock.probe + '/' + remoteLock.jobId + ')') if remoteLock.jobType == CALLHOME_JOB_TYPE: return ScannerNodeLockedByCallHome return 0 return 1
def DiscoveryMain(Framework): logger.info('Start Phase 2 ... Apply Mapping transformation to Atrium CIs') userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + '\\TQLExport\\Atrium\\' inputFilesDirectory = File(userExtUcmdbDir + 'inter\\') inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results\\' directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName) - 4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn( 'Intermediate XML not found or invalid. Perhaps no data was received from Atrium or an error occurred in the atrium_query script.' ) return ## Generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) integrationAPI = IntegrationAPI(ip, "atrium_map.py") integrationAPI.processDir(userExtUcmdbDir) logger.info('End Phase 2 ... Apply Mapping transformation to Atrium CIs')
def DiscoveryMain(Framework): logger.info('Start Phase 2 ....Apply Mapping file to Troux CIs') # Destination Data userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\Troux\\' outfilePathDir = userExtUcmdbDir + 'results\\' infilePathDir = userExtUcmdbDir + 'inter' directory = File(outfilePathDir) outfiles = directory.listFiles() directory = File(infilePathDir) infiles = directory.listFiles() ## Clean up the output directory before we run if (outfiles != None): for file in outfiles: file.delete() ## We can only process if Phase 1 created a Intermediate file to process ## Connect to the UCMDB Server, retrieve the results of the Mapping File ## and generate the output XML files in results directory try: ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir) except: pass #Framework.reportWarning(msg) #logger.warnException(msg) logger.info('End Phase 2 ....Apply Mapping file to Troux CIs')
def osh_createIpOsh(iSeriesOsh, defaultIp): _vector = ObjectStateHolderVector() # Create IP OSH ------------------------------------ ipOsh = modeling.createIpOSH(defaultIp) ipOsh.setAttribute( 'ip_probename', CollectorsParameters.getValue( CollectorsParameters.KEY_COLLECTORS_PROBE_NAME)) _vector.add(ipOsh) _vector.add(iSeriesOsh) linkOsh = modeling.createLinkOSH('containment', iSeriesOsh, ipOsh) _vector.add(linkOsh) return _vector
def executeSoftwareQueryByPath(client, reg_path, prefix=''): ''' Shell, str, str = '' -> list(list(str), str) @command: <prefix>reg query <reg_path>\Uninstall /S @command: reg_mam query <reg_path>\Uninstall /S ''' ntcmdErrStr = 'Remote command returned 1(0x1)' non64BitOsErrStr = 'The system was unable to find the specified registry key or value' queryStr = ' query '+reg_path+'\Uninstall /S' #First trying the default reg.exe(might not work on Win2k or NT) if len(prefix)>0 and (not prefix.endswith('\\')): prefix += '\\' cmdRemoteAgent = prefix+'reg' + queryStr buffer = client.execCmd(cmdRemoteAgent,120000)#@@CMD_PERMISION ntcmd protocol execution logger.debug('Outputting ', cmdRemoteAgent, ': ...') reg_mamRc = client.getLastCmdReturnCode() if (reg_mamRc != 0) or (buffer.find(ntcmdErrStr) != -1): if (reg_mamRc == 1) and (buffer.find('ERROR: More data is available.') != -1): errMsg = 'reg command returned \'More data is available\' error, not all software might be reported' logger.warn(errMsg) pass else: logger.debug('reg query command ended unsuccessfully with return code:%d, error:%s' % (reg_mamRc,buffer)) logger.debug('Failed getting software info using default reg.exe trying the reg_mam.exe') cmdRemote = 'reg_mam' localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'reg_mam.exe' remoteFile = client.copyFileIfNeeded(localFile) if not remoteFile: logger.warn('Failed copying %s' % cmdRemote) return [[], ''] cmdRemoteAgent = remoteFile + queryStr buffer = client.execCmd(cmdRemoteAgent,120000)#@@CMD_PERMISION ntcmd protocol execution regRc = client.getLastCmdReturnCode() if (regRc != 0 ) or (buffer.find(ntcmdErrStr) != -1): if (regRc == 1) and (buffer.find('ERROR: More data is available.') != -1): errMsg = 'reg_mam command returned \'More data is available\' error, not all software might be reported' logger.warn(errMsg) pass else: if buffer.find(non64BitOsErrStr) == -1: logger.debug('Failed getting software info, reg.exe ended with %d, error:%s' % (regRc,buffer)) return [[], ''] logger.debug('got software buffer from remote registry - parsing...') keys = buffer.split(reg_path) return [keys, buffer]
def DiscoveryMain(Framework): logger.info('Start Phase 2 ....Apply Mapping file to Troux CIs') # Destination Data userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + '\\TQLExport\\Troux\\' outfilePathDir = userExtUcmdbDir + 'results\\' infilePathDir = userExtUcmdbDir + 'inter' directory = File(outfilePathDir) outfiles = directory.listFiles() directory = File(infilePathDir) infiles = directory.listFiles() ## Clean up the output directory before we run if (outfiles != None): for file in outfiles: file.delete() ## We can only process if Phase 1 created a Intermediate file to process ## Connect to the UCMDB Server, retrieve the results of the Mapping File ## and generate the output XML files in results directory try: ip = CollectorsParameters.getValue( CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir) except: pass #Framework.reportWarning(msg) #logger.warnException(msg) logger.info('End Phase 2 ....Apply Mapping file to Troux CIs')
def DiscoveryMain(Framework): fileSeparator = File.separator # Destination Data userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + fileSeparator + 'TQLExport' + fileSeparator + 'hpsim' + fileSeparator inputFilesDirectory = File(userExtUcmdbDir + 'inter' + fileSeparator) inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results' + fileSeparator directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName) - 4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn( 'Intermediate XML not found or invalid. Perhaps no data was received from SIM or an error occurred in the SIM_Discovery script.' ) return ## Connect to the UCMDB Server, retrieve the results of the TQL ## and generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir) info('End exportTQL_for_SIM_to_UCMDB.py')
def queryRegistry(self, regQuery): ntcmdErrStr = 'Remote command returned 1(0x1)' queryStr = " query "+regQuery buffer = self.shell.execCmd(self.prefix64bit + "reg.exe " + queryStr) if self.shell.getLastCmdReturnCode() != 0 or buffer.find(ntcmdErrStr) != -1: localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR + 'reg_mam.exe' remoteFile = self.shell.copyFileIfNeeded(localFile) if not remoteFile: logger.warn('Failed copying reg_mam.exe to the destination') return buffer = self.shell.execCmd(remoteFile + queryStr) if not buffer or self.shell.getLastCmdReturnCode() != 0: logger.warn("Failed getting registry info.") return return buffer
def getNatIPFromConfigurationFile(): """ Read IP or IP range from configuration file. @return: A list contains IPAddress objects and IPNetwork objects """ NATIPConfigurationFileFolder = os.path.join(CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder()) NATIPConfigurationFile = os.path.join(NATIPConfigurationFileFolder, 'NATIpAddress.xml') if not os.path.exists(NATIPConfigurationFile): logger.info("There is no NAT IP address defined.") return # Read tags from xml file builder = SAXBuilder() configDoc = builder.build(NATIPConfigurationFile) rootElement = configDoc.getRootElement() ipElements = rootElement.getChildren('Ip') ipRangeElements = rootElement.getChildren('IpRange') NAT_IPs = [] # Read IPAddress, add valid one to NAT_IPs list if ipElements: for ipElement in ipElements: ip = ipElement.getText() if ip_addr.isValidIpAddress(ip): ipObj = ip_addr.IPAddress(ip) NAT_IPs.append(ipObj) # Read IP Ranges, create IPNetwork and add to NAT_IPs list if ipRangeElements: for ipRangeElement in ipRangeElements: ip_range_raw = ipRangeElement.getText() ips = ip_range_raw.split('-') ip_start = ips[0] ip_end = ips[1] if ip_addr.isValidIpAddress(ip_start) and ip_addr.isValidIpAddress(ip_end): ip_start = ip_addr.IPAddress(ip_start) ip_end = ip_addr.IPAddress(ip_end) ips = ip_addr.summarize_address_range(ip_start, ip_end) logger.debug(ips) NAT_IPs.extend(ips) else: logger.warn("IP Range should start and end with valid IP address") return NAT_IPs
def queryRegistry(self, regQuery): ntcmdErrStr = 'Remote command returned 1(0x1)' queryStr = " query " + regQuery buffer = self.shell.execCmd(self.prefix64bit + "reg.exe " + queryStr) if self.shell.getLastCmdReturnCode() != 0 or buffer.find( ntcmdErrStr) != -1: localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + CollectorsParameters.FILE_SEPARATOR + 'reg_mam.exe' remoteFile = self.shell.copyFileIfNeeded(localFile) if not remoteFile: logger.warn('Failed copying reg_mam.exe to the destination') return buffer = self.shell.execCmd(remoteFile + queryStr) if not buffer or self.shell.getLastCmdReturnCode() != 0: logger.warn("Failed getting registry info.") return return buffer
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() DebugMode = Framework.getParameter('DebugMode') userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\' filePathDir = userExtDir + 'TQLExport\\ARIS\\results\\' directory = File(filePathDir) files = directory.listFiles() if files == None: logger.warn('Results XML not found. Perhaps no data was received from ARIS or an error occurred in the Pull_From_ARIS script.') return try: ## Start the work for file in files: if file != None or file != '': builder = SAXBuilder () doc = builder.build(file) # Process CIs # info("Start processing CIs to update in the destination server...") allObjects = doc.getRootElement().getChild('data').getChild('objects').getChildren('Object') (objVector, ciDict) = processObjects(allObjects) OSHVResult.addAll(objVector) # Process Relations # info("Start processing Relationships to update in the destination server...") allLinks = doc.getRootElement().getChild('data').getChild('links').getChildren('link') linkVector = processLinks(allLinks, ciDict) OSHVResult.addAll(linkVector) except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) info(concatenate('Failure: ():\n', stacktrace)) if (DebugMode != None): DebugMode = DebugMode.lower() if DebugMode == "true": mam_utils.info ('[NOTE] UCMDB Integration is running in DEBUG mode. No data will be pushed to the destination server.') print OSHVResult.toXmlString() return None else: #print OSHVResult.toXmlString() return OSHVResult
def getUCSURLs(): mappingFileFolder = os.path.join(CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), ucs_pull_base.MAPPING_CONFIG_FOLDER) ucsURLListFile = os.path.join(mappingFileFolder, URL_LIST_FILE) if not os.path.exists(ucsURLListFile): logger.error('UCS URL list file not found:', ucsURLListFile) return None listFile = open(ucsURLListFile) lines = listFile.readlines() lines = map(str.strip, lines) def validLine(line): return line and not line.startswith('#') and line.startswith('http') return filter(validLine, lines)
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() logger.debug('Start nmap_osfingerprint.py') ip = Framework.getDestinationAttribute('ip_address') timeout = Framework.getParameter('nmap_host_timeout') if not str(timeout).isdigit(): msg = "Timeout parameter value must be a digit" logger.debug(msg) errormessages.resolveAndReport(msg, ClientsConsts.LOCAL_SHELL_PROTOCOL_NAME, Framework) return OSHVResult timeout = int(timeout) * 1000 scanKnownPortsOnly = Boolean.parseBoolean(Framework.getParameter('scan_known_ports_only')) portstoscan = Framework.getParameter('scan_these_ports_only') doServiceFingerprints =Boolean.parseBoolean(Framework.getParameter('Perform_Port_Fingerprints')) createApp = Boolean.parseBoolean(Framework.getParameter('Create_Application_CI')) discoverOsName =Boolean.parseBoolean(Framework.getParameter('discover_os_name')) nmapLocation = Framework.getParameter('nmap_location') #discover_UDP_Ports = int(Framework.getParameter('Discover_UDP_Ports')) discoverUdpPorts = 0 agent_root_dir=CollectorsParameters.BASE_PROBE_MGR_DIR agent_ext_dir = agent_root_dir + CollectorsParameters.getDiscoveryResourceFolder() + CollectorsParameters.FILE_SEPARATOR tmp_file_name = agent_ext_dir + string.replace(ip,'.','_') + time.strftime("%H%M%S",time.gmtime(time.time())) + 'nmap.xml' syncNmapPortConfigFile(agent_root_dir) logger.debug('temp file for storing nmap results: ', tmp_file_name) try: client = Framework.createClient(ClientsConsts.LOCAL_SHELL_PROTOCOL_NAME) try: performNmapDiscover(client, ip, tmp_file_name,timeout,agent_ext_dir,scanKnownPortsOnly,portstoscan,doServiceFingerprints, discoverUdpPorts, nmapLocation) if os.path.exists(tmp_file_name): logger.debug('start processing the nmap results') processNmapResult(tmp_file_name, OSHVResult, discoverOsName, doServiceFingerprints, createApp, Framework) else: raise ValueError, 'Error nmap result file is missing: %s' % tmp_file_name finally: client.close() File(tmp_file_name).delete() except Exception, e: msg = str(e.getMessage()) logger.debug(msg) errormessages.resolveAndReport(msg, ClientsConsts.LOCAL_SHELL_PROTOCOL_NAME, Framework)
def DiscoveryMain(Framework): try: logger.debug('Replicating toplogy from ServiceNow') connectionDataManager = FrameworkBasedConnectionDataManager(Framework) mappingFileFolder = os.path.join(CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), 'servicenow') mappingFileManager = OldMappingFileManager(mappingFileFolder) global chunkSize chunkSize = getStepSizeFromFramework(Framework) mappingFile = getMappingFileFromFramework(Framework) if mappingFile: return replicateTopologyUsingMappingFile(os.path.join(mappingFileFolder, mappingFile), connectionDataManager, mappingFileManager) else: return replicateTopologyFromServiceNow(connectionDataManager, mappingFileManager) except: Framework.reportError('Failed to pull data from ServiceNow. See RemoteProcess log on the Probe for details') logger.errorException('Failed to pull data from ServiceNow')
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() configFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder()) uriEndpointConf = os.path.join(configFileFolder, URL_FILE_NAME) logger.debug("uriEndpointConf:", uriEndpointConf) if not os.path.exists(uriEndpointConf): logger.error('UriEndpoint configuration file not found:', uriEndpointConf) return None listFile = open(uriEndpointConf) try: url_dom = minidom.parseString(listFile.read()) nodeList = url_dom.getElementsByTagName('uri-endpoint') length = nodeList.length ips = [] while length > 0: node = nodeList.item(nodeList.length - length) if node.getElementsByTagName("url"): url = node.getElementsByTagName( "url")[0].childNodes[0].nodeValue ips = resolveIpAddress(url) if node.getElementsByTagName("type"): type = node.getElementsByTagName( "type")[0].childNodes[0].nodeValue if node.getElementsByTagName("ip-address"): ip_address = node.getElementsByTagName( "ip-address")[0].childNodes[0].nodeValue ips.append(IPAddress(ip_address)) if url and node and type and ips: OSHVResult.addAll(reportCIs(url, type, ips)) length = length - 1 except: msg = "Failed to read file:" + str(URL_FILE_NAME) logger.reportWarning(msg) logger.error(msg, str(sys.exc_info()[1])) finally: listFile.close() return OSHVResult
def getUCSURLs(): mappingFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), ucs_pull_base.MAPPING_CONFIG_FOLDER) ucsURLListFile = os.path.join(mappingFileFolder, URL_LIST_FILE) if not os.path.exists(ucsURLListFile): logger.error('UCS URL list file not found:', ucsURLListFile) return None listFile = open(ucsURLListFile) lines = listFile.readlines() lines = map(str.strip, lines) def validLine(line): return line and not line.startswith('#') and line.startswith('http') return filter(validLine, lines)
def DiscoveryMain(Framework): logger.info('Start Phase 3 ... Push transformed data to UCDMB') OSHVResult = ObjectStateHolderVector() DebugMode = Framework.getParameter('DebugMode') DateParsePattern = Framework.getParameter('DateParsePattern') userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\' if (DebugMode != None): DebugMode = DebugMode.lower() if DebugMode == "true": logger.info ('[NOTE] UCMDB Integration is running in DEBUG mode. No data will be pushed to the destination server.') return filePathDir = userExtDir + 'TQLExport\\Atrium\\results\\' directory = File(filePathDir) files = directory.listFiles() try: for file in files: if file != None or file != '': builder = SAXBuilder () doc = builder.build(file) # Process CIs # logger.info("Start processing CIs to update in the destination server...") allObjects = doc.getRootElement().getChild('data').getChild('objects').getChildren('Object') (objVector, ciDict) = processObjects(allObjects, DateParsePattern) OSHVResult.addAll(objVector) # Process Relations # logger.info("Start processing Relationships to update in the destination server...") allLinks = doc.getRootElement().getChild('data').getChild('links').getChildren('link') linkVector = processLinks(allLinks, ciDict) OSHVResult.addAll(linkVector) #print OSHVResult.toXmlString() except: stacktrace = traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) logger.info('Failure in processing data %s' % stacktrace) logger.info('Ending Push to UCMDB') logger.info('End Phase 3 ... Push transformed data to UCDMB') return OSHVResult
def discovery(Framework, connectionManager=None): connectionDataManager = connectionManager try: logger.debug('Replicating topology from HP ucs') if not connectionDataManager: connectionDataManager = FrameworkBasedConnectionDataManager(Framework) if not connectionDataManager.validate(): return mappingFileFolder = os.path.join(CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder(), MAPPING_CONFIG_FOLDER) mappingFileManager = UCSMappingFileManager(mappingFileFolder) mappingFile = getMappingFileFromFramework(Framework) if mappingFile: return replicateTopologyUsingMappingFile(os.path.join(mappingFileFolder, mappingFile), connectionDataManager, mappingFileManager) else: Framework.reportError('No mapping file found.') logger.errorException("No mapping file found.") except Exception, e: Framework.reportError('%s' % e) logger.errorException('%s' % e)
def DiscoveryMain(Framework): OSHVResult = ObjectStateHolderVector() configFileFolder = os.path.join( CollectorsParameters.BASE_PROBE_MGR_DIR, CollectorsParameters.getDiscoveryConfigFolder() ) uriEndpointConf = os.path.join(configFileFolder, URL_FILE_NAME) logger.debug("uriEndpointConf:", uriEndpointConf) if not os.path.exists(uriEndpointConf): logger.error("UriEndpoint configuration file not found:", uriEndpointConf) return None listFile = open(uriEndpointConf) try: url_dom = minidom.parseString(listFile.read()) nodeList = url_dom.getElementsByTagName("uri-endpoint") length = nodeList.length ips = [] while length > 0: node = nodeList.item(nodeList.length - length) if node.getElementsByTagName("url"): url = node.getElementsByTagName("url")[0].childNodes[0].nodeValue ips = resolveIpAddress(url) if node.getElementsByTagName("type"): type = node.getElementsByTagName("type")[0].childNodes[0].nodeValue if node.getElementsByTagName("ip-address"): ip_address = node.getElementsByTagName("ip-address")[0].childNodes[0].nodeValue ips.append(IPAddress(ip_address)) if url and node and type and ips: OSHVResult.addAll(reportCIs(url, type, ips)) length = length - 1 except: msg = "Failed to read file:" + str(URL_FILE_NAME) logger.reportWarning(msg) logger.error(msg, str(sys.exc_info()[1])) finally: listFile.close() return OSHVResult
def testScript(): userExtUcmdbDir = 'E:\\data\\Desktop\\Pull_From_Remedy_backup\\' + 'TQLExport\\Atrium\\' inputFilesDirectory = File(userExtUcmdbDir + 'inter\\') inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results\\' directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName)-4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn('Intermediate XML not found or invalid. Perhaps no data was received from Atrium or an error occurred in the atrium_query script.') return ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) exportTQL(ip, userExtUcmdbDir)
def osh_createIpOsh(lparOsh, tcpStacks): ipstoexclude = ['127.0.0.1'] # tcpStacks [ip, network, mask, interface name, status, type, mac address]str_name = 'name' str_name = 'name' str_mac_address = 'mac_address' _vector = ObjectStateHolderVector() for mac, tcpentry in tcpStacks.items(): ipAddress = tcpentry[0].strip() if ipAddress not in ipstoexclude: ipOsh = modeling.createIpOSH(ipAddress) probeName = CollectorsParameters.getValue( CollectorsParameters.KEY_COLLECTORS_PROBE_NAME) if isNotNull(probeName): ipOsh.setAttribute('ip_probename', probeName) containedOsh = modeling.createLinkOSH('contained', lparOsh, ipOsh) _vector.add(lparOsh) _vector.add(ipOsh) _vector.add(containedOsh) # create interface ---------------------------------------------------- ifOsh = ObjectStateHolder('interface') interfacename = tcpentry[3].strip() ifOsh.setAttribute(str_name, interfacename) # default the mac address attribute to linkName and update later if MAC found ifOsh.setAttribute( str_mac_address, mac) # if MAC not found for set #linkName as key ifOsh.setContainer(lparOsh) _vector.add(ifOsh) if tcpStacks.has_key(mac): parentLinkOsh = modeling.createLinkOSH('containment', ifOsh, ipOsh) _vector.add(parentLinkOsh) return _vector
def DiscoveryMain(Framework): try: cmdbServerHost = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) cmdbServerIp = None if netutils.isValidIp(cmdbServerHost): cmdbServerIp = cmdbServerHost else: cmdbServerIp = netutils.getHostAddress(cmdbServerHost, cmdbServerHost) protocol = getNnmProtocol(Framework, cmdbServerIp) cmdbConnection = getCmdbConnectionDetails(Framework, protocol, cmdbServerHost, cmdbServerIp) nnmConnection = getNnmConnectionDetails(Framework, protocol) logger.debug(str(cmdbConnection)) cmdbToNnmIds = getUCMDBIDs(cmdbConnection, nnmConnection.serverIp, Framework) logger.debug(str(nnmConnection)) updateNNM(cmdbToNnmIds, nnmConnection, Framework) except GeneralException, ex: Framework.reportError(str(ex))
def osh_createTcpConnectionsOsh(lparOsh, primaryIP, knownPortsConfigFile, connections): str_containment = 'containment' _vector = ObjectStateHolderVector() ignoreLocalConnections = 0 ## ER: parameterize probeName = CollectorsParameters.getValue( CollectorsParameters.KEY_COLLECTORS_PROBE_NAME) for conn in connections: dstPort = '' dstAddr = '' srcAddr = '' srcPort = '' id = conn[5] #(dstAddr, dstPort) = _getIpPortFromSocket(localSocket, primaryIP) dstAddr = conn[2].strip() if dstAddr == '0.0.0.0' or dstAddr == '127.0.0.1': dstAddr = primaryIP.strip() dstPort = conn[3].strip() state = conn[9].strip() #(srcAddr, srcPort) = _getIpPortFromSocket(foreignSocket, primaryIP) if upper(state) == 'ESTABLISH': srcAddr = conn[0].strip() srcPort = conn[1].strip() if srcAddr == '127.0.0.1': srcAddr = primaryIP.strip() if ignoreLocalConnections and (srcAddr == dstAddr): continue if isNotNull(dstAddr): destination = '%s:%s' % (dstAddr, dstPort) logger.debug('[', state, '] Current connection: ', srcAddr, ' -> ', destination) # create destination (server) IP and Host -------------------------- dstIpOsh = modeling.createIpOSH(dstAddr) if isNotNull(probeName): dstIpOsh.setAttribute('ip_probename', probeName) dstHostOsh = None if isNotNull(lparOsh): dstHostOsh = lparOsh else: dstHostOsh = modeling.createHostOSH(dstAddr) dstContainedLinkOsh = modeling.createLinkOSH( str_containment, dstHostOsh, dstIpOsh) _vector.add(dstIpOsh) _vector.add(dstHostOsh) _vector.add(dstContainedLinkOsh) # create destination service address object ------------------------ portTypeEnum = TCP_PORT_TYPE_ENUM portName = knownPortsConfigFile.getTcpPortName(int(dstPort)) if upper(state) == 'UDP': portTypeEnum = UDP_PORT_TYPE_ENUM portName = knownPortsConfigFile.getUdpPortName(int(dstPort)) if isNull(portName): portName = dstPort serverPortOsh = modeling.createServiceAddressOsh( dstHostOsh, dstAddr, int(dstPort), portTypeEnum, portName) _vector.add(serverPortOsh) if isNotNull(srcAddr): # create source (client) IP and Host --------------------------- srcIpOsh = modeling.createIpOSH(srcAddr) if isNotNull(probeName): srcIpOsh.setAttribute('ip_probename', probeName) srcHostOsh = modeling.createHostOSH(srcAddr) srcContainedLinkOsh = modeling.createLinkOSH( str_containment, srcHostOsh, srcIpOsh) _vector.add(srcIpOsh) _vector.add(srcHostOsh) _vector.add(srcContainedLinkOsh) # create client-server links ----------------------------------- _vector.add( _createClientServerLinkOsh(dstPort, serverPortOsh, portName, lower(state), srcIpOsh)) # create client server dependency links ------------------------ _vector.add( _createClientServerDependencyLinkOsh( dstHostOsh, dstPort, srcHostOsh, portName)) return _vector
def __init__(self, fileName, Framework, fileEncoding=None): DataSource.__init__(self) self.fileName = fileName self.Framework = Framework self.encoding = fileEncoding and fileEncoding or CollectorsParameters.getDefaultOEMEncoding()
def discoverDisk(client, myVec, hostOSH): cmdDiskInfo = 'diskinfo.exe' localFile = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder( ) + CollectorsParameters.FILE_SEPARATOR + 'diskinfo.exe' remoteFile = client.copyFileIfNeeded(localFile) if not remoteFile: logger.debug('Failed copying %s' % cmdDiskInfo) return ntcmdErrStr = 'Remote command returned 1(0x1)' buffer = client.execCmd( remoteFile, DISKINFO_TIMEOUT) # V@@CMD_PERMISION ntcmd protocol execution logger.debug('Output of ', remoteFile, ': ', buffer) if buffer.find(ntcmdErrStr) != -1: logger.warn('Failed getting disk info') else: logger.debug('Got disk info - parsing...') disks = buffer.split('\n') for disk in disks: disk = disk.strip() name = '' size = 0 usedSize = None diskType = '' try: # Get disk size matchSize = re.search('Size: (\d+) MB', disk) if matchSize: size = int(matchSize.group(1)) matchFreeSize = re.search('Free: (\d+) MB', disk) if matchFreeSize: freeSize = int(matchFreeSize.group(1)) usedSize = size - freeSize # Get disk type matchType = re.search('Type: (.*)', disk) if matchType: diskType = matchType.group(1) diskType = diskType.strip() if (diskType == 'FLOPPY' and size > 5): diskType = 'FLASH' # Get disk name matchName = re.search(r'Name: (\w):\\,', disk) if matchName: name = matchName.group(1) name = name.strip() # Create DISK OSH if name != '': if diskType in STORAGE_NAME_TO_STORAGE_TYPE: storageType = STORAGE_NAME_TO_STORAGE_TYPE[diskType] else: storageType = modeling.OTHER_STORAGE_TYPE diskOsh = modeling.createDiskOSH(hostOSH, name, storageType, size, name=name, usedSize=usedSize) myVec.add(diskOsh) except: logger.errorException('Error in discoverDisk()')