def onProxyConnectionSuccess(self): """ On proxy connection success """ self.info("Connection successful") if self.wsSupport: self.info("Do ws handshake") if self.getTypeClientAgent() == NetLayerLib.TYPE_AGENT_AGENT: wspath = Settings.get( 'Server', 'websocket-path' ) if self.sslSupport: wspath = Settings.get( 'Server', 'websocket-secure-path' ) else: wspath = Settings.get( 'Server', 'websocket-path-probe' ) if self.sslSupport: wspath = Settings.get( 'Server', 'websocket-secure-path-probe' ) self.handshakeWebSocket(resource=wspath, hostport=self.controllerIp) else: self.doRegistration()
def moveNewFile (self, data): """ Move the file from the temp unix to the test result storage Deprecated function thanks to the migration to python3 on probe side @type data: @param data: """ self.trace(" moving file" ) try: testResult = data['result-path'] fileName = data['filename'] self.trace( 'move %s to %s' % ( fileName, testResult ) ) # move file testsResultPath = '%s%s' % ( Settings.getDirExec(),Settings.get( 'Paths', 'testsresults' ) ) shutil.copyfile( src = '/tmp/%s' % fileName, dst = '%s/%s/%s' % (testsResultPath, testResult, fileName) ) except Exception as e: self.error( "unable to move the new file: %s" % e ) else: try: # now notify all connected users size_ = os.path.getsize( '%s/%s/%s' % (testsResultPath, testResult, fileName) ) if testResult.startswith('/'): testResult = testResult[1:] tmp = testResult.split('/', 1) projectId = tmp[0] tmp = tmp[1].split('/', 1) mainPathTozip= tmp[0] subPathTozip = tmp[1] if Settings.getInt( 'Notifications', 'archives'): m = [ { "type": "folder", "name": mainPathTozip, "project": "%s" % projectId, "content": [ { "type": "folder", "name": subPathTozip, "project": "%s" % projectId, "content": [ { "project": "%s" % projectId, "type": "file", "name": fileName, 'size': str(size_) } ]} ] } ] notif = {} notif['archive'] = m notif['stats-repo-archives'] = { 'nb-zip':1, 'nb-trx':0, 'nb-tot': 1, 'mb-used': RepoArchives.instance().getSizeRepo(folder=RepoArchives.instance().testsPath), 'mb-free': RepoArchives.instance().freeSpace(p=RepoArchives.instance().testsPath) } data = ( 'archive', ( None, notif) ) ESI.instance().notifyByUserAndProject(body = data, admin=True, monitor=False, tester=True, projectId="%s" % projectId) except Exception as e: self.error( "unable to notify users for this new file: %s" % e ) # clean temp dir try: os.remove( '/tmp/%s' % fileName ) except Exception as e: pass
def getNbTester(self): """ Returns the number of testers present in database @return: nb testers @rtype: int """ self.trace( 'get nb tester from db' ) return self.getNbUserOfType(userType=Settings.get( 'Server', 'level-tester'))
def __init__(self): """ Repository manager log reports files """ RepoManager.RepoManager.__init__(self, pathRepo='%s%s' % ( Settings.getDirExec(), Settings.get( 'Paths', 'reports' ) ), extensionsSupported = [ RepoManager.TEST_RESULT_EXT, RepoManager.TXT_EXT, RepoManager.CAP_EXT, RepoManager.ZIP_EXT, RepoManager.PNG_EXT ] )
def getNbAdmin(self): """ Returns the number of admins present in database @return: nb admins @rtype: int """ self.trace( 'get nb admin from db' ) return self.getNbUserOfType(userType=Settings.get( 'Server', 'level-admin'))
def querySQL ( self, query, insertData=False, columnName=False, debugCaller=False): """ Make a SQL query, a new connection made each time. @param query: sql query @type query: string @return: response from table @rtype: tuple """ ret = False rows = None try: conn = MySQLdb.connect ( host = Settings.get( 'MySql', 'ip') , user = Settings.get( 'MySql', 'user'), passwd = Settings.get( 'MySql', 'pwd'), db = Settings.get( 'MySql', 'db'), unix_socket=Settings.get( 'MySql', 'sock') ) cursor = conn.cursor() if debugCaller: self.trace( "SQL QUERY: %s - %s" % (caller(), query) ) else: self.trace( "SQL QUERY: %s" % (query) ) cursor.execute ( query ) if insertData: rows = cursor.lastrowid else: if columnName: rows = [] for row in cursor.fetchall(): fields = map(lambda x:x[0], cursor.description) rows.append( dict(zip(fields,row)) ) else: rows = cursor.fetchall() cursor.close () conn.commit () conn.close () ret = True except MySQLdb.Error as e: self.error( "unable to execute sql query: %s" % e ) return ret, rows
def addPyInitFile(self, pathFile, descr="", helper="", allmodules="", adps=False, mainInit=False): """ Add the default __init__ file of the repository @type archivePath: @param archivePath: @type descr: @param descr: @return: @rtype: """ HEADER = '' tpl_path = "%s/%s/adapter_header.tpl" % ( Settings.getDirExec(), Settings.get( 'Paths', 'templates' ) ) try: fd = open( tpl_path , "r") HEADER = fd.read() fd.close() except Exception as e: self.error( 'unable to read template adapter header: %s' % str(e) ) try: if mainInit: default_init = MAIN_INIT % (HEADER, descr, helper, allmodules) else: default_init = ADP_INIT % (HEADER, descr, helper) if adps: default_init = ADPS_INIT % (HEADER, Settings.get( 'Default', 'current-adapters' ), Settings.get( 'Default', 'current-libraries' ), descr, helper) f = open( '%s/__init__.py' % pathFile, 'w') f.write( default_init ) f.close() except Exception as e: self.error( e ) return False return True
def checkGlobalSyntax(self): """ Check syntax and more of all adapters @return: @rtype: tuple """ __cmd__ = "%s %s/Core/docgenerator.py %s %s False False True True" % ( Settings.get( 'Bin', 'python' ), Settings.getDirExec(), Settings.getDirExec(), "%s/%s" % (Settings.getDirExec(), Settings.get( 'Paths', 'tmp' ) ) ) p = os.popen(__cmd__) msg_err = p.readlines() if len(msg_err) == 0: return True, '' else: msg_err = '\n'.join(msg_err).replace(".py", "") return False, msg_err
def __getScreen(self): """ Internal function to retreive the screen from the device """ __adbexe__ = '%s\%s\%s' % (Settings.getDirExec(), Settings.get('Paths', 'bin'), Settings.get('BinWin', 'adb-exe')) __ret__ = '%s\screncapture.png' % self.getTemp() __ret2__ = '%s\layout.xml' % self.getTemp() __cmd__ = '"%s" pull /data/local/tmp/screncapture.png "%s"' % ( __adbexe__, __ret__) __cmd2__ = '"%s" pull /data/local/tmp/local/tmp/layout.xml "%s"' % ( __adbexe__, __ret2__) req = urllib2.Request('http://127.0.0.1:%s/jsonrpc/0' % self.localPort) req.add_header('Content-Type', 'application/json; charset=utf-8') try: response = urllib2.urlopen( req, b'{"jsonrpc":"2.0","method":"takeScreenshot","id":1, "params": [ "screncapture.png", 1.0, 90] }' ) response2 = urllib2.urlopen( req, b'{"jsonrpc":"2.0","method":"dumpWindowHierarchy","id":1, "params": [ true, "layout.xml" ] }' ) subprocess.call(__cmd2__, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except Exception as e: self.error("error on adb get screen thread: %s" % e) else: ret = subprocess.call(__cmd__, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if not ret: self.onScreenCaptured(filename=__ret__, xml=__ret2__)
def listEthsNew(self): """ Discovers all network interfaces of the server New function because ifconfig is deprecated [ current]# ip addr 1: lo: <LOOPBACK,UP,LOWER_UP> mtu 16436 qdisc noqueue link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00 inet 127.0.0.1/8 scope host lo 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast qlen 1000 link/ether 52:54:00:2a:d0:49 brd ff:ff:ff:ff:ff:ff inet 204.62.14.177/24 brd 204.62.14.255 scope global eth0 3: eth1: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast qlen 1000 link/ether 52:54:00:17:18:79 brd ff:ff:ff:ff:ff:ff inet 10.9.1.132/8 brd 10.255.255.255 scope global eth1 """ eths = [] ipaddr = subprocess.check_output(Settings.get('Bin', 'ipaddr'), stderr=subprocess.STDOUT, shell=True) ipaddr = ipaddr.strip() if sys.version_info > (3, ): ipaddr = ipaddr.decode('utf8') self.trace('ipaddr: %s' % ipaddr) try: eth_tmp = {} for line in ipaddr.splitlines(): if 'link/ether' in line: line_tmp = line.strip().split( ' ' ) # link/loopback 00:00:00:00:00:00 brd 00:00:00:00:00:00 eth_tmp.update({'mac': line_tmp[1]}) if 'inet ' in line: line_tmp = line.strip().split( ' ' ) # inet 204.62.14.177/24 brd 204.62.14.255 scope global eth0 eth_tmp.update({'name': line_tmp[-1:][0]}) eth_tmp.update({'ip': line_tmp[1].split('/')[0]}) if 'brd ' in line: eth_tmp.update({'broadcast': line_tmp[3]}) else: eth_tmp.update({'broadcast': '255.0.0.0'}) eth_tmp.update({'mask': "/%s" % line_tmp[1].split('/')[1]}) eths.append(eth_tmp) eth_tmp = {} except Exception as e: self.error('unable to read network interfaces: %s' % e) # adding this network eths.append({'name': 'all', 'ip': '0.0.0.0', 'mask': '255.0.0.0'}) # save all eths self.networkInterfaces = eths
def checkProjectsAuthorizationV2(self, user, projectId): """ Check if the project id provided is authorized for the user """ ret = False rows = [] sql = 'SELECT r.project_id, p.name FROM `%s-users` u,`%s-relations-projects` r, `%s-projects` p WHERE u.login="******" and u.id=r.user_id AND r.project_id=p.id ;' % ( Settings.get( 'MySql', 'table-prefix'), Settings.get( 'MySql', 'table-prefix'), Settings.get( 'MySql', 'table-prefix'), user ) retDb, rows = DbManager.instance().querySQL( query=sql, columnName=True ) if not retDb: self.error( 'unable to get project from db for the user %s: %s' % (user, str(retDb)) ) else: projectAuthorized = False for prj in rows: if "%s" % prj["project_id"] == "%s" % projectId: projectAuthorized = True self.trace( '[Login=%s] [ProjectID=%s] authorized projects list: %s' % (user, projectId, rows) ) ret = projectAuthorized return (ret,rows)
def getProjects(self, user, b64=True): """ Return projects """ # get user id prjs = [] sql = 'SELECT r.project_id, p.name FROM `%s-users` u,`%s-relations-projects` r, `%s-projects` p ' % ( Settings.get( 'MySql', 'table-prefix'), Settings.get( 'MySql', 'table-prefix'), Settings.get( 'MySql', 'table-prefix') ) sql += 'WHERE u.login="******" and u.id=r.user_id AND r.project_id=p.id ;' % ( user ) ret, rows = DbManager.instance().querySQL( query=sql, columnName=True ) if not ret: self.error( 'unable to get project from db for the user %s: %s' % (user, str(ret)) ) else: self.trace( "List of projects for user %s: %s" % (user,rows) ) prjs = rows return prjs
def apiAuthorization(self, login, password): """ Check authorization for rest api """ self.trace('Rest authorization called for Login=%s' % (login)) expires = '' # check if this login exists on the database usersDb = UsersManager.instance().cache() if not login in usersDb: self.trace("Login=%s account not found" % login) return (self.CODE_NOT_FOUND, expires) user_profile = usersDb[login] # account disable ? if not user_profile['active']: self.trace("%s account not active" % login) return (self.CODE_DISABLED, expires) # check password, create a sha1 hash with salt: sha1( salt + sha1(password) ) sha1 = hashlib.sha1() _pwd = "%s%s" % (Settings.get('Misc', 'salt'), password) sha1.update(_pwd.encode('utf8')) if user_profile['password'] != sha1.hexdigest(): self.trace("incorrect password for %s account" % login) return (self.CODE_FAILED, expires) session_id = self.generateSessionid() user_profile['last_activity'] = time.time() lease = int(Settings.get('Users_Session', 'max-expiry-age')) #in seconds end = time.gmtime(user_profile['last_activity'] + lease) expires = time.strftime("%a, %d-%b-%Y %T GMT", end) self.userSessions.update({session_id: user_profile}) self.trace('Rest authorized for Login=%s SessionId=%s Expires=%s' % (login, session_id, expires)) return (session_id, expires)
def getStatisticsFromDb(self): """ """ prefix = Settings.get( 'MySql', 'table-prefix') sql = """SELECT COUNT(*) AS total_projects FROM `%s-projects`""" % (prefix) dbRet, dbRows = DbManager.instance().querySQL( query = sql, columnName=True ) if not dbRet: self.error( "unable to get statitics for projects" ) return (self.context.CODE_ERROR, "unable to get statitics for projects") return (self.context.CODE_OK, dbRows[0] )
def __init__(self, controllerIp, controllerPort, toolName, toolDesc, defaultTool, supportProxy=0, proxyIp=None, proxyPort=None, sslSupport=True): """ Constructor for probe """ GenericTool.Tool.__init__(self, controllerIp, controllerPort, toolName, toolDesc, defaultTool, supportProxy=supportProxy, proxyIp=proxyIp, proxyPort=proxyPort, sslSupport=sslSupport, toolType = "Probe") self.__type__ = __TYPE__ self.__args__ = [ 'files' ] self.binTail = Settings.get( 'BinLinux', 'tail' ) self.__pids__ = {}
def __init__(self, listeningAddress, agentName='ASI', sslSupport=False, wsSupport=False, tsi=None, context=None): """ Construct Agent Server Interface @param listeningAddress: @type listeningAddress: @param agentName: @type agentName: string """ NetLayerLib.ServerAgent.__init__( self, listeningAddress=listeningAddress, agentName=agentName, keepAliveInterval=Settings.getInt('Network', 'keepalive-interval'), inactivityTimeout=Settings.getInt('Network', 'inactivity-timeout'), responseTimeout=Settings.getInt('Network', 'response-timeout'), selectTimeout=Settings.get('Network', 'select-timeout'), sslSupport=sslSupport, wsSupport=wsSupport, certFile='%s/%s' % (Settings.getDirExec(), Settings.get('Agent_Channel', 'channel-ssl-cert')), keyFile='%s/%s' % (Settings.getDirExec(), Settings.get('Agent_Channel', 'channel-ssl-key')), pickleVer=Settings.getInt('Network', 'pickle-version')) self.tsi = tsi self.context = context self.__mutex = threading.RLock() self.__mutexNotif = threading.RLock() self.agentsRegistered = {} self.agentsPublicIp = {}
def __init__ (self): """ Statistics Manager for tests """ self.__mutex__ = threading.RLock() self.dbt_testcases = '%s-testcases-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_testunits = '%s-testunits-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_testabstracts = '%s-testabstracts-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_testsuites = '%s-testsuites-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_testplans = '%s-testplans-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_testglobals = '%s-testglobals-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_scripts = '%s-scripts-stats' % Settings.get( 'MySql', 'table-prefix') self.dbt_writing = '%s-writing-stats' % Settings.get( 'MySql', 'table-prefix') self.notifyUsers = Settings.getInt( 'Notifications', 'statistics')
def generateSutLibraries(): """ Return help for all sut libraries """ global latestlibRead2 pkg = __import__("SutLibraries") descr_pkg = getattr(pkg, '__DESCRIPTION__') for libname in pkg.__all__: latestlibRead2 = libname pkg = __import__("SutLibraries.%s" % libname) ret = [] # list of libraries for libname in pkg.__all__: sub_mods = getattr(pkg, libname) default_libs = False generic_libs = False descr_libs = getattr(sub_mods, '__DESCRIPTION__') help_libs = getattr(sub_mods, '__HELPER__') if help_libs: ret_mod = DocInspect.inspectLibrary( package=sub_mods, modules=toInspect(help_libs, sub_mods)) # read default adapter from config file, new in v10.1 if libname == Settings.get("Default", "current-libraries"): default_libs =True # read default adapter from config file, new in v12 if libname == Settings.get("Default", "generic-libraries"): generic_libs =True # end of new cur_libs = { 'name': libname , 'type': 'libraries', 'desc': descr_libs, 'modules': ret_mod, 'is-default': default_libs, 'is-generic': generic_libs } ret.append( cur_libs ) descr_pkg = { 'name': pkg.__name__ , 'libraries': ret , 'type': 'package-libraries', 'desc': descr_pkg } return [descr_pkg]
def generateHelps(self): """ Generate the cache of the documentation @return: @rtype: """ self.trace("Generating help cache...") ret = False details = '' try: # args: path /tas, path /var/tmp/, sut installed __cmd__ = "%s %s/ServerEngine/DocBuild.py %s %s %s False %s False" % ( Settings.get('Bin', 'python'), Settings.getDirExec(), Settings.getDirExec(), "%s/%s" % (Settings.getDirExec(), Settings.get('Paths', 'tmp')), SUTADAPTERS_INSTALLED, SUTLIBADAPTERS_INSTALLED) self.trace(__cmd__) __cmd_args__ = shlex.split(__cmd__) p = subprocess.Popen(__cmd_args__, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() if out: self.trace("Helper (out): %s" % out) if err: self.error("Helper (err): %s" % err) if p.returncode == 1: self.error('Unable to generate help cache') if sys.version_info > (3, ): details = err.decode("utf8") else: details = err else: self.info('Documentation cache successfully generated') ret = True except Exception as e: self.error(e) return (ret, details)
def querySQL(query, db=Settings.get('MySql', 'db')): """ Query database """ try: conn = MySQLdb.connect(host=Settings.get('MySql', 'ip'), user=Settings.get('MySql', 'user'), passwd=Settings.get('MySql', 'pwd'), db=db, unix_socket=Settings.get('MySql', 'sock')) cursor = conn.cursor() cursor.execute(query) cursor.close() conn.commit() conn.close() except MySQLdb.Error, e: print("[querySQL] %s" % str(e)) sys.exit(1)
def isUp(): """ Check if the web server if ready (apache) """ try: # init the http timeoutVal = Settings.getInt('Boot', 'timeout-http-server') http = httplib2.Http(timeout=timeoutVal, disable_ssl_certificate_validation=True, ca_certs="%s/Libs/cacerts.txt" % Settings.getDirExec()) http.add_credentials(Settings.get('Web', 'login'), Settings.get('Web', 'password')) http.force_exception_to_status_code = True scheme = 'http' portHttp = Settings.get('Web', 'http-port') if Settings.getInt('Web', 'https'): scheme = 'https' portHttp = Settings.get('Web', 'https-port') uri = '%s://%s:%s/%s/index.php' % (scheme, Settings.get( 'Web', 'fqdn'), portHttp, Settings.get('Web', 'path')) timeout = False go = False startTime = time.time() while (not go) and (not timeout): # timeout elapsed ? if (time.time() - startTime) >= timeoutVal: timeout = True else: # test the web server Logger.debug("Get index: %s" % uri) resp, content = http.request(uri, "GET") if resp['status'] == '200': Logger.debug("200 OK received") go = True else: Logger.debug("response incorrect (%s)" % resp['status']) Logger.debug("response (%s)" % resp) Logger.debug("response content (%s)" % content) Logger.debug("retry in %s second" % Settings.get('Web', 'retry-connect')) time.sleep(int(Settings.get('Web', 'retry-connect'))) if timeout: raise Exception("timeout") except Exception as e: raise Exception("server web not ready: %s" % str(e))
def querySQL(query, db=Settings.get('MySql', 'db')): """ @param query: sql query @type query: string """ try: conn = MySQLdb.connect(host=Settings.get('MySql', 'ip'), user=Settings.get('MySql', 'user'), passwd=Settings.get('MySql', 'pwd'), db=db, unix_socket=Settings.get('MySql', 'sock')) # cursor = conn.cursor() cursor.execute(query) cursor.close() # conn.commit() conn.close() ret = True except MySQLdb.Error, e: print("[querySQL] %s" % str(e)) sys.exit(1)
def updateSession(self, sessionId): """ """ if sessionId in self.userSessions: self.userSessions[sessionId]['last_activity'] = time.time() lease = int(Settings.get('Users_Session', 'max-expiry-age')) #in seconds end = time.gmtime(self.userSessions[sessionId]['last_activity'] + lease) expires = time.strftime("%a, %d-%b-%Y %T GMT", end) return expires return ''
def getProjectsFromDB(self): """ Delete all projects """ # init some shortcut prefix = Settings.get( 'MySql', 'table-prefix') escape = MySQLdb.escape_string # get all projects sql = """SELECT * FROM `%s-projects`""" % ( prefix) dbRet, dbRows = DbManager.instance().querySQL( query = sql, columnName=True ) if not dbRet: self.error( "unable to read project's table" )
def createBackup(self, backupName): """ Create a backup of all adapters @type backupName: @param backupName: @return: @rtype: """ ret = self.context.CODE_ERROR try: backupIndex = self.getLastBackupIndex( pathBackups=self.destBackup ) backupDate = self.getTimestamp() backupFilename = '%s%s_%s_%s' % ( self.prefixBackup, backupIndex, backupName, backupDate ) # new in v14.0.0: create tar gz if Settings.getInt( 'Backups', 'adapters-dest-tar-gz' ): self.trace( "backup adapters to %s/%s.tar.gz" % (self.destBackup,backupFilename) ) DEVNULL = open(os.devnull, 'w') __cmd__ = "%s cvfz %s/%s.tar.gz -C %s ." % (Settings.get( 'Bin', 'tar' ), self.destBackup, backupFilename, self.testsPath) ret = subprocess.call(__cmd__, shell=True, stdout=DEVNULL, stderr=DEVNULL) if ret: raise Exception("unable to tar sut adapter pkg") ret = self.context.CODE_OK # create a zip file if Settings.getInt( 'Backups', 'adapters-dest-zip' ): self.trace( "backup adapters to %s/%s.zip" % (self.destBackup,backupFilename) ) zipped = self.zipFolder(folderPath=self.testsPath, zipName="%s.zip" % backupFilename, zipPath=self.destBackup, ignoreExt=['.pyc', '.pyo']) ret = zipped if zipped == self.context.CODE_OK: self.info( "backup adapters successfull: %s" % backupFilename ) # now notify all connected admin users backupSize = os.path.getsize( "%s/%s.zip" % (self.destBackup, backupFilename) ) notif = {} notif['repo-adapters'] = {} notif['repo-adapters']['backup'] = {'name': backupName, 'date': backupDate, 'size': backupSize, 'fullname': "%s.zip" % backupFilename } data = ( 'repositories', ( None, notif) ) ESI.instance().notifyAllAdmins(body = data) else: self.error( "backup adapters %s failed" % backupFilename ) except Exception as e: raise Exception( "[createBackup] %s" % str(e) ) return ret
def __init__(self, controllerIp, controllerPort, toolName, toolDesc, defaultTool, supportProxy=0, proxyIp=None, proxyPort=None, sslSupport=True, sikulixIp="127.0.0.1", sikulixPort=50001): """ Dummy agent @param controllerIp: controller ip/host @type controllerIp: string @param controllerPort: controller port @type controllerPort: integer @param toolName: agent name @type toolName: string @param toolDesc: agent description @type toolDesc: string @param defaultTool: True if the agent is started by the server, False otherwise @type defaultTool: boolean """ GenericTool.Tool.__init__(self, controllerIp, controllerPort, toolName, toolDesc, defaultTool, supportProxy=supportProxy, proxyIp=proxyIp, proxyPort=proxyPort, sslSupport=sslSupport) self.__type__ = __TYPE__ self.__mutex__ = threading.RLock() # if sys.platform == "win32" : # self.binJava = Settings.get( 'BinWin', 'java' ) # elif sys.platform == "linux2": # if not os.path.exists( Settings.get( 'BinLinux', 'java' ) ): # raise Exception('java is not installed') # if not os.path.exists( Settings.get( 'BinLinux', 'sikulix' ) ): # raise Exception('sikulix is not installed') # else: # raise Exception( 'System %s not supported' % sys.platform ) self.sikulixIp = sikulixIp self.sikulixPort = sikulixPort self.sikulixProcess = None # get the home folder of the user if sys.platform == "win32" : homepath = os.path.expanduser(os.getenv('USERPROFILE')) elif sys.platform == "linux2": homepath = os.getenv('HOME') self.nameFolder= Settings.get('Common', 'acronym-server').lower() self.homeFolder = "%s\\%s" % (homepath, self.nameFolder) self.urlHost = "http://%s:%s" % (self.sikulixIp, self.sikulixPort)
def getInstalled(self, b64=False): """ Returns all registered probes @return: all registered probes @rtype: list """ self.trace("get probes installed") pluginsInstalled = [] if os.path.exists( '%s/%s/Embedded/' % (Settings.getDirExec(), Settings.get('Paths', 'tools'))): files = os.listdir( '%s/%s/Embedded/' % (Settings.getDirExec(), Settings.get('Paths', 'tools'))) for f in files: if f.endswith('Probe.py'): p = {} # open plugin to get probe type and description fp = open( '%s/%s/Embedded/%s' % (Settings.getDirExec(), Settings.get('Paths', 'tools'), f), 'r') data = fp.read() fp.close() # probeType = data.split('__TYPE__="""') if len(probeType) == 2: probeType = probeType[1].split('"""', 1)[0] p['type'] = probeType probeDescr = data.split('__DESCRIPTION__="""') if len(probeDescr) == 2: probeDescr = probeDescr[1].split('"""', 1)[0] p['description'] = probeDescr if len(p) > 0: pluginsInstalled.append(p) return pluginsInstalled
def resetPwdUserInDB(self, userId): """ Reset a password in database """ self.trace( 'Reset user`\'s password in database Id=%s' % userId ) # init some shortcut prefix = Settings.get( 'MySql', 'table-prefix') escape = MySQLdb.escape_string userId = str(userId) # find user by id sql = """SELECT * FROM `%s-users` WHERE id='%s'""" % ( prefix, escape(userId) ) dbRet, dbRows = DbManager.instance().querySQL( query = sql, columnName=True ) if not dbRet: self.error( "unable to read user id" ) return (self.context.CODE_ERROR, "unable to read user id") if not len(dbRows): return (self.context.CODE_NOT_FOUND, "this user id does not exist") # disconnect user before # todo # update password emptypwd = hashlib.sha1() emptypwd.update( '' ) sha1 = hashlib.sha1() sha1.update( "%s%s" % ( Settings.get( 'Misc', 'salt'), emptypwd.hexdigest() ) ) sql = """UPDATE `%s-users` SET password='******' WHERE id='%s'""" % (prefix, sha1.hexdigest(), userId) dbRet, _ = DbManager.instance().querySQL( query = sql ) if not dbRet: self.error("unable to reset pwd") return (self.context.CODE_ERROR, "unable to reset pwd") # new in v19, refresh the cache self.loadCache() return (self.context.CODE_OK, "" )
def getStatisticsFromDb(self): """ Get statistics users from database """ prefix = Settings.get( 'MySql', 'table-prefix') sql1 = """SELECT COUNT(*) from `%s-users`""" % (prefix) sql = """SELECT COUNT(*) AS total_connections, (%s) AS total_users FROM `%s-users-stats`""" % ( sql1, prefix) dbRet, dbRows = DbManager.instance().querySQL( query = sql, columnName=True ) if not dbRet: self.error( "unable to get statitics for users" ) return (self.context.CODE_ERROR, "unable to get statitics for users") return (self.context.CODE_OK, dbRows[0] )
def getRn(self, b64=False): """ Returns the release notes probes @return: @rtype: """ self.trace("read tools rn") if not self.TOOLS_INSTALLED: return '' else: return Context.instance().getRn( pathRn="%s/%s/" % (Settings.getDirExec(), Settings.get('Paths', 'tools')))