def test_result_error(self): def fn_raises_error(op): def deep(): op.logger.info('Im deep and going deeper') deeper() def deeper(): op.logger.info('Im in deep and continue') abyss() def abyss(): op.logger.info('Im in abyss and trying to open file') open('/non/existed/file') deep() op = operation.OperationAPI().create('test_result_error', fn_raises_error) op.run() result = assert_op_result('failed') eq_(result['name'], 'test_result_error') eq_(len(result['logs']), 4) ok_(result['logs'][-1].endswith( 'Reason: [Errno 2] No such file or directory: \'/non/existed/file\'' )) ok_('deep()' in result['trace']) ok_('deeper()' in result['trace']) ok_('abyss()' in result['trace'])
def __init__(self): self._cnf = bus.cnf self._op_api = operation.OperationAPI() self._queryenv = bus.queryenv_service ini = self._cnf.rawini self._role_name = ini.get(config.SECT_GENERAL, config.OPT_ROLE_NAME) self.redis_instances = redis_service.RedisInstances()
def __init__(self, path=None): self.cfg = haproxy.HAProxyConfManager() #haproxy.HAProxyCfg(path) open(self.cfg.conf_path, 'w').close() # clear conf file self.svc = haproxy.HAProxyInitScript(path) self.naming_mgr = SectionNamingMgr() self._op_api = operation.OperationAPI() self._proxies_table = {}
def __init__(self): self.mysql = mysql_svc.MySQL() cnf_ctl = MysqlCnfController() if __mysql__['behavior'] in ('mysql2', 'percona') else None # mariadb dont do old presets ServiceCtlHandler.__init__(self, __mysql__['behavior'], self.mysql.service, cnf_ctl) self.preset_provider = mysql_svc.MySQLPresetProvider() preset_service.services[__mysql__['behavior']] = self.preset_provider bus.on(init=self.on_init, reload=self.on_reload) bus.define_events( 'before_mysql_data_bundle', 'mysql_data_bundle', # @param host: New master hostname 'before_mysql_change_master', # @param host: New master hostname # @param log_file: log file to start from # @param log_pos: log pos to start from 'mysql_change_master' 'before_slave_promote_to_master', 'slave_promote_to_master' ) self._mysql_api = mysql_api.MySQLAPI() self._op_api = operation_api.OperationAPI() self._backup_id = None self._data_bundle_id = None self.on_reload()
def __init__(self, **kwds): self._update_self_dict(kwds) self.pkgmgr = pkgmgr.create_pkgmgr(self.repo_url) self.daemon = initdv2.Daemon('scalarizr') self.op_api = operation.OperationAPI() self.dist = '{name} {release} {codename}'.format(**linux.os) self.state = 'noop' self.shutdown_ev = threading.Event() self.early_bootstrapped = False
def test_result(self): result_data = {'embed': 'data'} def fn(op): return result_data op = operation.OperationAPI().create('test_result', fn) op.run() result = assert_op_result('completed') eq_(result['result'], result_data)
def test_exclusive(self): finished = threading.Event() started = threading.Event() def fn(op): started.set() finished.wait() api = operation.OperationAPI() api.run('test_exclusive', fn, exclusive=True, async=True) started.wait() @raises(operation.AlreadyInProgressError) def asserts(): api.run('test_exclusive', fn) asserts() finished.set() time.sleep(.01) # Interrupt thread
def check_cancel(self, asserts=None, op_func=None, cancel_func=None): started = threading.Event() canceled = threading.Event() def fn(op): ok_(not op.canceled) started.set() canceled.wait() ok_(op.canceled) if op_func: op_func() op = operation.OperationAPI().create('test_cancel', fn, cancel_func=cancel_func) op.run_async() started.wait() op.cancel() canceled.set() time.sleep(.01) # Interrupt thread if asserts: asserts()
def __init__(self): self._mysql_init = mysql_svc.MysqlInitScript() self._op_api = operation.OperationAPI()
def __init__(self): self._op_api = operation_api.OperationAPI()
def __init__(self): super(LifeCycleHandler, self).__init__() self._logger = logging.getLogger(__name__) self._op_api = operation.OperationAPI() self._system_api = system_api.SystemAPI() self._hostname_assigned = False self._reboot_finish_lock = threading.Lock() bus.define_events( # Fires before HostInit message is sent # @param msg "before_host_init", # Fires after HostInit message is sent "host_init", # Fires when HostInitResponse received # @param msg "host_init_response", # Fires before HostUp message is sent # @param msg "before_host_up", # Fires after HostUp message is sent "host_up", # Fires before RebootStart message is sent # @param msg "before_reboot_start", # Fires after RebootStart message is sent "reboot_start", # Fires before RebootFinish message is sent # @param msg "before_reboot_finish", # Fires after RebootFinish message is sent "reboot_finish", # Fires before Restart message is sent # @param msg: Restart message "before_restart", # Fires after Restart message is sent "restart", # Fires before Hello message is sent # @param msg "before_hello", # Fires after Hello message is sent "hello", # Fires after HostDown message is sent # @param msg "before_host_down", # Fires after HostDown message is sent "host_down", # # Service events # # Fires when behaviour is configured # @param service_name: Service name. Ex: mysql "service_configured") bus.on(init=self.on_init, start=self.on_start, reload=self.on_reload, shutdown=self.on_shutdown) self.on_reload()
def __init__(self): self._op_api = operation.OperationAPI() self.postgresql = postgresql_svc.PostgreSql() #?
def __init__(self): self._op_api = operation.OperationAPI() self.queryenv = bus.queryenv_service
def __init__(self): self._op_api = operation.OperationAPI() self.postgresql = postgresql_svc.PostgreSql() #? self.service = postgresql_svc.PgSQLInitScript()
except (Exception, BaseException), e: if not raise_exc: self._logger.exception(e) self.send_message( Messages.DEPLOY_RESULT, dict(status='error', last_error=str(e), deploy_task_id=msg_body['deploy_task_id'])) if raise_exc: raise finally: self._logger.removeHandler(self._log_hdlr) if define_operation: op_api = operation.OperationAPI() op_api.run('deploy', handler) else: handler(bus.init_op) class DeployLogHandler(logging.Handler): def __init__(self, deploy_task_id=None): logging.Handler.__init__(self, logging.INFO) self.deploy_task_id = deploy_task_id self._msg_service = bus.messaging_service def emit(self, record): msg = self._msg_service.new_message( Messages.DEPLOY_LOG, body=dict(deploy_task_id=self.deploy_task_id,
def __init__(self): self._op_api = operation.OperationAPI() self._queryenv = bus.queryenv_service self.redis_instances = redis_service.RedisInstances()