def stopping(self): """ On stopping the server """ self.info("Stopping server...") # cleanup all child processes for f in os.listdir("%s/%s" % (Settings.getDirExec(), Settings.get('Paths', 'run'))): if f.endswith(".pid"): pid = f.split(".pid")[0] # kill the process if pid.isdigit(): self.info('Stopping chid processes %s...' % pid) try: while True: os.kill(int(pid), signal.SIGTERM) time.sleep(0.1) except OSError: pass time.sleep(1) # just to be sure, delete a second time try: os.remove("%s/%s/%s.pid" % (Settings.getDirExec(), Settings.get('Paths', 'run'), pid)) except Exception: pass
def __init__(self, listeningAddress, agentName='ESI', sslSupport=False, wsSupport=False, context=None): """ Event server interface @param listeningAddress: @type listeningAddress: @param agentName: agent name used on request @type agentName: string """ NetLayerLib.ServerAgent.__init__( self, listeningAddress=listeningAddress, agentName=agentName, keepAliveInterval=Settings.getInt('Network', 'keepalive-interval'), inactivityTimeout=Settings.getInt('Network', 'inactivity-timeout'), responseTimeout=Settings.getInt('Network', 'response-timeout'), selectTimeout=Settings.get('Network', 'select-timeout'), sslSupport=sslSupport, wsSupport=wsSupport, certFile='%s/%s' % (Settings.getDirExec(), Settings.get('Client_Channel', 'channel-ssl-cert')), keyFile='%s/%s' % (Settings.getDirExec(), Settings.get('Client_Channel', 'channel-ssl-key')), pickleVer=Settings.getInt('Network', 'pickle-version')) self.__mutex__ = threading.RLock() self.context = context
def installAdapter(self, name): """ """ RepoAdapters.initialize(context=None) folder_lib = "%s/%s/%s" % (Settings.getDirExec(), Settings.get('Paths', 'packages-sutadapters'), name) folder_lib = os.path.normpath(folder_lib) if os.path.exists(folder_lib): try: # install dependancies according to the plugin with pip pip_list = "%s/deps/pip_list.txt" % folder_lib pip_list = os.path.normpath(pip_list) if os.path.exists(pip_list) and os.path.getsize(pip_list) > 0: if platform.system() == "Windows": cmd = "%s -m pip install -r \"%s\"" % (Settings.get( 'Bin', 'python-win'), pip_list) else: cmd = "%s -m pip install -r %s" % (Settings.get( 'Bin', 'python'), pip_list) subprocess.call(cmd, shell=True) # system detect yum_list = "%s/deps/yum_list.txt" % folder_lib yum_list = os.path.normpath(yum_list) if os.path.exists(yum_list) and os.path.getsize(yum_list) > 0: if os.path.exists("/etc/os-release"): os_id = "" with open("/etc/os-release") as f: for line in f: if "=" in line: k, v = line.rstrip().split("=") if k == "ID": os_id = v.strip('"') break if "centos" in os_id or "rhel" in os_id: cmd = "yum install `cat %s | tr '\n' ' '`" % yum_list subprocess.call(cmd, shell=True) RepoAdapters.instance().updateMainInit() # install samples according to the plugin if os.path.exists("%s/samples/" % folder_lib): folder_sample = "%s/%s/1/Samples/Adapter_%s" % ( Settings.getDirExec(), Settings.get('Paths', 'tests'), name) shutil.copytree("%s/samples/" % folder_lib, folder_sample) print("Sut Adapter installation process terminated") except Exception as e: print("unable to install adapter: %s" % e) else: print("Sut Adapter (%s) not found!" % name) RepoAdapters.instance().updateMainInit()
def isUp(self): """ Try to connect to the database Detect the version of the mysql server """ db_name = "%s/%s/%s" % (Settings.getDirExec(), Settings.get('Paths', 'var'), Settings.get('Database', 'db')) sqlite3.connect(db_name) self.trace("database connection successful")
def getStaticArgs(envTmp=False): """ """ te_args = """root = r'%s/../' tests_result_path = r'%s' controller_ip = '%s' controller_port = %s """ % (os.path.normpath( Settings.getDirExec()), os.path.normpath(getTestsPath(envTmp=envTmp)), Settings.get('Bind', 'ip-tsi'), Settings.get('Bind', 'port-tsi')) return te_args
def __init__(self): """ """ threading.Thread.__init__(self) self.event = threading.Event() self.mutex = threading.RLock() self.running = True self.lease = int(Settings.get('Users_Session', 'max-expiry-age')) # in seconds self.expire = int(Settings.get('Users_Session', 'timeout-cleanup')) # in seconds
def doLdapAuth(self, login, password): """ perform bind ldap authentication with multiple ldaps server and ssl mode """ auth_success = False # get ldap settings ldap_host_list = json.loads(Settings.get('Users_Session', 'ldap-host')) ldap_dn_list = json.loads(Settings.get('Users_Session', 'ldap-dn')) # define ldap server(s) servers_list = [] for host in ldap_host_list: use_ssl = False ldap_port = 386 # parse the url to extract scheme host and port url_parsed = urllib.parse.urlparse(host) if url_parsed.scheme == "ldaps": use_ssl = True ldap_port = 636 if ":" in url_parsed.netloc: ldap_host, ldap_port = url_parsed.netloc.split(":") else: ldap_host = url_parsed.netloc server = ldap3.Server(ldap_host, port=int(ldap_port), use_ssl=use_ssl) servers_list.append(server) last_auth_err = "" for bind_dn in ldap_dn_list: c = ldap3.Connection(servers_list, user=bind_dn % login, password=password) # perform the Bind operation auth_success = c.bind() last_auth_err = c.result if auth_success: break if not auth_success: self.trace(last_auth_err) return auth_success
def initialize(logPathFile=None, level="INFO", size="5", nbFiles="10", noSettings=False): """ Initialize """ global LG if not noSettings: if logPathFile is not None: file = logPathFile else: file = "%s/%s/%s" % (Settings.getDirExec(), Settings.get(section='Paths', key='logs'), Settings.get(section='Trace', key='file')) level = Settings.get(section='Trace', key='level') size = Settings.get(section='Trace', key='max-size-file') maxBytes = int(size.split('M')[0]) * 1024 * 1024 nbFilesMax = Settings.getInt(section='Trace', key='nb-backup-max') else: file = logPathFile level = level size = size maxBytes = size nbFilesMax = nbFiles LG = logging.getLogger('Logger') if level == 'DEBUG': # write everything messages LG.setLevel(logging.DEBUG) elif level == 'ERROR': # write anything that is an error or worse. LG.setLevel(logging.ERROR) elif level == 'INFO': # write anything that is an info message or worse. LG.setLevel(logging.INFO) handler = logging.handlers.RotatingFileHandler(file, maxBytes=maxBytes, backupCount=nbFilesMax) # format='%(asctime)-6s: %(name)s - %(levelname)s - %(module)s - # %(funcName)s - %(lineno)d - %(message)s', formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") handler.setFormatter(formatter) LG.addHandler(handler)
def addPyInitFile(self, pathFile, descr="", helper="", allmodules="", adps=False, mainInit=False): """ Add the default __init__ file of the repository """ HEADER = '' tpl_path = "%s/%s/adapter_header.tpl" % ( Settings.getDirExec(), Settings.get('Paths', 'templates')) try: fd = open(tpl_path, "r") HEADER = fd.read() fd.close() except Exception as e: self.error('unable to read template adapter header: %s' % str(e)) try: if mainInit: default_init = MAIN_INIT % (HEADER, descr, helper, allmodules) else: default_init = ADP_INIT % (HEADER, descr) f = open('%s/__init__.py' % pathFile, 'w') f.write(default_init) f.close() except Exception as e: self.error(e) return False return True
def trace(self, txt): """ Trace message """ if Settings.instance() is not None: if Settings.get('Trace', 'debug-level') == 'VERBOSE': Logger.ClassLogger.trace(self, txt=txt)
def getTestsPath(envTmp=False): """ Get the path of all tests result @return: @rtype: string """ if envTmp: trPath = '%s%s' % (Settings.getDirExec(), Settings.get('Paths', 'testsresults-tmp')) else: trPath = '%s%s' % (Settings.getDirExec(), Settings.get('Paths', 'testsresults')) # normalize the path and return it return os.path.normpath(trPath)
def __init__(self): """ Repository manager for public test files """ RepoManager.RepoManager.__init__( self, pathRepo='%s%s' % (Settings.getDirExec(), Settings.get('Paths', 'public')), extensionsSupported=[])
def __init__(self, listeningAddress, agentName='ASI', sslSupport=False, wsSupport=False, tsi=None, context=None): """ Construct Agent Server Interface @param listeningAddress: @type listeningAddress: @param agentName: @type agentName: string """ NetLayerLib.ServerAgent.__init__( self, listeningAddress=listeningAddress, agentName=agentName, keepAliveInterval=Settings.getInt('Network', 'keepalive-interval'), inactivityTimeout=Settings.getInt('Network', 'inactivity-timeout'), responseTimeout=Settings.getInt('Network', 'response-timeout'), selectTimeout=Settings.get('Network', 'select-timeout'), sslSupport=sslSupport, wsSupport=wsSupport, certFile='%s/%s' % (Settings.getDirExec(), Settings.get('Agent_Channel', 'channel-ssl-cert')), keyFile='%s/%s' % (Settings.getDirExec(), Settings.get('Agent_Channel', 'channel-ssl-key')), pickleVer=Settings.getInt('Network', 'pickle-version')) self.tsi = tsi self.context = context self.__mutex = threading.RLock() self.__mutexNotif = threading.RLock() self.agentsRegistered = {} self.agentsPublicIp = {}
def updateSession(self, sessionId): """ """ if sessionId in self.userSessions: self.userSessions[sessionId]['last_activity'] = time.time() lease = int(Settings.get('Users_Session', 'max-expiry-age')) # in seconds end = time.gmtime(self.userSessions[sessionId]['last_activity'] + lease) expires = time.strftime("%a, %d-%b-%Y %T GMT", end) return expires return ''
def __init__(self, context): """ Storage data adapters """ RepoManager.RepoManager.__init__( self, pathRepo='%s%s' % (Settings.getDirExec(), Settings.get('Paths', 'tmp')), context=context) self.context = context self.prefixAdapters = "adapter" self.prefixAdaptersAll = "private_storage" self.adpDataPath = os.path.normpath("%s/AdaptersData" % self.testsPath) self.initStorage()
def __init__(self, context): """ Repository manager for archives files """ RepoManager.RepoManager.__init__(self, pathRepo='%s%s' % (Settings.getDirExec(), Settings.get('Paths', 'testsresults')), extensionsSupported=[RepoManager.TEST_RESULT_EXT, RepoManager.TXT_EXT, RepoManager.CAP_EXT, RepoManager.ZIP_EXT, RepoManager.PNG_EXT, RepoManager.JPG_EXT], context=context) self.context = context self.cacheUuids = {} self.cachingUuid() self.trace("nb entries in testresult cache: %s" % len(self.cacheUuids))
def __init__(self, context): """ Construct Adpaters Manager """ RepoManager.RepoManager.__init__( self, pathRepo='%s/%s/' % (Settings.getDirExec(), Settings.get('Paths', 'packages-sutadapters')), extensionsSupported=[RepoManager.PY_EXT, RepoManager.TXT_EXT], context=context) self.context = context # update main init file self.updateMainInit()
def __init__(self, context): """ Class Projects Manager """ self.tb_projects = 'projects' self.repoTests = '%s/%s' % (Settings.getDirExec(), Settings.get('Paths', 'tests')) self.context = context # load projects in cache, new in v19 self.__cache = [] self.loadCache() # Initialize the repository self.info('Deploying folders projects and reserved folders...') self.createDirProjects()
def reconfigureLevel(): """ Reconfigure the level log """ try: global LG level = Settings.get(section='Trace', key='level') if level == 'DEBUG': # write everything messages LG.setLevel(logging.DEBUG) elif level == 'ERROR': # write anything that is an error or worse. LG.setLevel(logging.ERROR) elif level == 'INFO': # write anything that is an info message or worse. LG.setLevel(logging.INFO) except Exception as e: sys.stdout.write("error: %s" % e)
def __init__(self, listeningAddress, agentName='TSI', context=None): """Constructs TCP Server Inferface""" NetLayerLib.ServerAgent.__init__( self, listeningAddress=listeningAddress, agentName=agentName, keepAliveInterval=Settings.getInt('Network', 'keepalive-interval'), inactivityTimeout=Settings.getInt('Network', 'inactivity-timeout'), responseTimeout=Settings.getInt('Network', 'response-timeout'), selectTimeout=Settings.get('Network', 'select-timeout'), pickleVer=Settings.getInt('Network', 'pickle-version')) self.context = context self.__mutex__ = threading.RLock() self.__fifoThread = None self.tests = {} # {'task-id': Boolean} # test register, # with background running or not self.testsConnected = {} # all tests connected
def getHelps(self): """ Returns the documentation cache """ self.trace("get helps") ret = '' try: complete_path = '%s/%s/documentations.dat' % (Settings.getDirExec(), Settings.get('Paths', 'var')) if os.path.exists(complete_path): fd = open(complete_path, "rb") data = fd.read() fd.close() ret = base64.b64encode(zlib.compress(data)) else: self.error('documentation cache does not exist') except Exception as e: self.error("unable to get helps: %s" % e) return ret.decode('utf8')
def __init__(self, context): """ Repository manager for tests files """ RepoManager.RepoManager.__init__( self, pathRepo='%s%s' % (Settings.getDirExec(), Settings.get('Paths', 'tests')), extensionsSupported=[ RepoManager.TEST_SUITE_EXT, RepoManager.TEST_PLAN_EXT, RepoManager.TEST_CONFIG_EXT, RepoManager.TEST_DATA_EXT, RepoManager.TEST_UNIT_EXT, RepoManager.PNG_EXT, #RepoManager.TEST_YAML_EXT, RepoManager.TEST_GLOBAL_EXT ], context=context) self.context = context
rows = [] for row in c.fetchall(): fields = map(lambda x: x[0], c.description) rows.append(dict(zip(fields, row))) c.close() conn.commit() conn.close() except Exception as e: print("[query] %s - %s" % (str(e), query)) sys.exit(1) return rows db_name = "%s/%s/%s" % (Settings.getDirExec(), Settings.get('Paths', 'var'), Settings.get('Database', 'db')) def error(msg): """ """ print("ERROR: %s" % msg) def str_presenter(dumper, data): if isinstance(data, str) and "\n" in data: return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|') return dumper.represent_scalar('tag:yaml.org,2002:str', data) yaml.add_representer(str, str_presenter)
def apiAuthorization(self, login, password): """ Check authorization for rest api """ self.trace('Rest authorization called for Login=%s' % (login)) expires = '' # check if this login exists on the database cache_users = UsersManager.instance().cache() if login not in cache_users: self.trace("Login=%s account not found" % login) return (self.CODE_NOT_FOUND, expires) user_profile = cache_users[login] # account disable ? if not user_profile['active']: self.trace("%s account not active" % login) return (self.CODE_DISABLED, expires) # 2 methods to authenticate the user # make a hash of the password and look inside the server # or communicate with a remote ldap authenticator if Settings.getInt('Users_Session', 'ldap-authbind') and LDAP_INSTALLED: auth_success = self.doLdapAuth(login, password) if not auth_success: self.trace("ldap auth failed for %s account" % login) return (self.CODE_FAILED, expires) elif Settings.getInt('Users_Session', 'ldap-authbind') and not LDAP_INSTALLED: self.error("python ldap3 library is not installed on your system") return (self.CODE_FAILED, expires) else: # check password, create a sha1 hash with salt: sha1(salt + # sha1(password)) sha0 = hashlib.sha1() sha0.update(password.encode('utf8')) sha1 = hashlib.sha1() _pwd = "%s%s" % (self.cfg_db["auth-salt"], sha0.hexdigest()) sha1.update(_pwd.encode('utf8')) sha3 = hashlib.sha1() _pwd2 = "%s%s" % (self.cfg_db["auth-salt"], password.encode('utf8')) sha3.update(_pwd2.encode('utf8')) pwd_matched = False if user_profile['password'] == sha1.hexdigest(): pwd_matched = True # keep this mode only for backward compatibility if user_profile['password'] == sha3.hexdigest(): pwd_matched = True if not pwd_matched: self.trace("incorrect password for %s account" % login) return (self.CODE_FAILED, expires) session_id = self.generateSessionid() user_profile['last_activity'] = time.time() lease = int(Settings.get('Users_Session', 'max-expiry-age')) # in seconds end = time.gmtime(user_profile['last_activity'] + lease) expires = time.strftime("%a, %d-%b-%Y %T GMT", end) self.userSessions.update({session_id: user_profile}) self.trace('Rest authorized for Login=%s SessionId=%s Expires=%s' % (login, session_id, expires)) return (session_id, expires)
def querySQL(self, query, insertData=False, columnName=False, debugCaller=False, args=(), arg1=None, arg2=None, arg3=None, arg4=None, arg5=None, arg6=None, arg7=None, arg8=None, arg9=None, arg10=None, arg11=None, arg12=None): """ """ ret = False rows = None db_name = "%s/%s/%s" % (Settings.getDirExec(), Settings.get('Paths', 'var'), Settings.get('Database', 'db')) try: conn = sqlite3.connect(db_name) cursor = conn.cursor() if Settings.get('Trace', 'debug-level') == 'VERBOSE': if debugCaller: self.trace("SQL QUERY: %s - %s" % (caller(), query)) else: self.trace("SQL QUERY: %s" % (query)) sql_args = args if arg1 is not None: sql_args += (arg1, ) if arg2 is not None: sql_args += (arg2, ) if arg3 is not None: sql_args += (arg3, ) if arg4 is not None: sql_args += (arg4, ) if arg5 is not None: sql_args += (arg5, ) if arg6 is not None: sql_args += (arg6, ) if arg7 is not None: sql_args += (arg7, ) if arg8 is not None: sql_args += (arg8, ) if arg9 is not None: sql_args += (arg9, ) if arg10 is not None: sql_args += (arg10, ) if arg11 is not None: sql_args += (arg11, ) if arg12 is not None: sql_args += (arg12, ) cursor.execute(query, sql_args) if insertData: rows = cursor.lastrowid else: if columnName: rows = [] for row in cursor.fetchall(): fields = map(lambda x: x[0], cursor.description) rows.append(dict(zip(fields, row))) else: rows = cursor.fetchall() cursor.close() conn.commit() conn.close() ret = True except Exception as e: self.error("unable to execute sqlite3 query: %s" % e) return ret, rows
""" try: conn = sqlite3.connect(db) c = conn.cursor() c.execute(query) c.close() conn.commit() conn.close() except Exception as e: print("[query] %s - %s" % (str(e), query)) sys.exit(1) db_name = "%s/%s/%s" % (Settings.getDirExec(), Settings.get( 'Paths', 'var'), Settings.get('Database', 'db')) def error(msg): """ """ print("ERROR: %s" % msg) class CliFunctions(Logger.ClassLogger): """ """ def __init__(self, parent): """ """ self.parent = parent
def get(section, key): """ Return value according to the key """ return Settings.get(section, key)
# MA 02110-1301 USA # ------------------------------------------------------------------- from ea.libs import Settings import pickle import sys import DocInspect sys.path.insert(0, '../../../') # initialize settings module to read the settings.ini file Settings.initialize(path="./") cache_pathfile = "%s/%s/documentations.dat" % (Settings.getDirExec(), Settings.get('Paths', 'var')) def extractTestExecutor(lib): """ """ pkg_te = __import__("ea.testexecutorlib", fromlist=[lib]) descr_pkg = getattr(pkg_te, '__DESCRIPTION__') lib_obj = getattr(pkg_te, lib) lib_descr = getattr(lib_obj, '__DESCRIPTION__') classes = getattr(lib_obj, '__HELPER__') pkg_desc = DocInspect.describePackage(pkg_te, modules=[(lib, classes, lib_descr)], descr=descr_pkg)
clsmembers = inspect.getmembers(module, inspect.isclass) for c, o in clsmembers: funcs = getYamlDecorators(cls=o, deco=deco) if len(funcs): helper.append((c, funcs)) return helper # params for public functions swagger = [] swagger_version = "2.0" swagger_email = SWAGGER_EMAIL swagger_licence = "LGPL 2.1" swagger_info = [ ("description", "Control your test server with %s API" % Settings.get('Server', 'name')), ("version", Settings.getVersion()), ("title", "Swagger Tester - %s" % Settings.get('Server', 'name')), ] swagger_schemes = [ "https" ] swagger_host = '127.0.0.1' swagger_base_path = "/rest" swagger_paths = ["paths:"] swagger_tab = 2 # extract yaml python code in rest server interface py_tab = 4
def show_data_storage(self): """show data storage path""" storage = "%s%s" % (Settings.getDirExec(), Settings.get('Paths', 'var')) sys.stdout.write("%s\n" % storage)