class ClientCommandContainer(CommandContainer): def __init__(self, conf, **kwargs): self.conf = PyConfigParser() self.conf.load_from_conf(conf) self.conf.load_from_dict(kwargs) def set_hub(self, username=None, password=None, auto_login=True, proxy_user=None): if username: if password is None: password = password_prompt(default_value=password) self.conf["AUTH_METHOD"] = "password" self.conf["USERNAME"] = username self.conf["PASSWORD"] = password if proxy_user: self.conf["PROXY_USER"] = proxy_user cacert = self.conf.get('CA_CERT') if cacert and not os.path.exists(cacert): self.error( 'CA_CERT configuration points to non-existing file: %s' % cacert) self.hub = HubProxy(conf=self.conf, auto_login=auto_login)
def __init__(self, conf, client_type=None, logger=None, transport=None, auto_login=True, timeout=120, **kwargs): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath( os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: transport_args = {'timeout': timeout} if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) if hasattr(ssl, 'create_default_context') and self._conf.get( 'CA_CERT'): ssl_context = ssl.create_default_context() ssl_context.load_verify_locations( cafile=self._conf['CA_CERT']) transport_args['context'] = ssl_context elif (hasattr(ssl, '_create_unverified_context') and not self._conf.get('SSL_VERIFY', True)): # Python 2.6 doesn't have context argument for xmlrpclib.ServerProxy # therefore transport needs to be modified ssl_context = ssl._create_unverified_context() transport_args['context'] = ssl_context else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass(**transport_args) self._hub = xmlrpc_client.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC")) if auto_login: self._login()
class BeakerDistro(object): def __init__(self, logger=None): self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf) def search(self, params): return self.hub.distros.filter(params)
class BkrConn(object): """ Make connection to Beaker """ enabled = True doc = xml.dom.minidom.Document() def __init__(self, logger=None, conf=None, **kwargs): self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf, **kwargs)
def __init__(self, params, logger=None): # params from AnsibleModule argument_spec below self.ids = params['ids'] provision_params = params['provision_params'] # Set wait methods from provision params, with reasonable defaults self.wait_time = provision_params.get('attempt_wait_time', 60) self.max_attempts = provision_params.get('max_attempts', 60) # set up beaker connection self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf)
class ClientCommandContainer(CommandContainer): def __init__(self, conf, **kwargs): self.conf = PyConfigParser() self.conf.load_from_conf(conf) self.conf.load_from_dict(kwargs) def set_hub(self, username=None, password=None, auto_login=True): if username: if password is None: password = password_prompt(default_value=password) self.conf["AUTH_METHOD"] = "password" self.conf["USERNAME"] = username self.conf["PASSWORD"] = password self.hub = HubProxy(conf=self.conf, auto_login=auto_login)
def __init__(self, conf, client_type=None, logger=None, transport=None, auto_login=True, auto_logout=True, **kwargs): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath( os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._auto_logout = auto_logout self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass() self._hub = xmlrpclib.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC")) if auto_login: self._login()
def __init__(self): """Object initialization.""" self._name = PROVISIONER_KEY self.conf = PyConfigParser() self.poll_sleep = 30 # seconds self.keypair = None self.STATUS_MAP = { "Reserved": STATUS_ACTIVE, "New": STATUS_PROVISIONING, "Scheduled": STATUS_PROVISIONING, "Queued": STATUS_PROVISIONING, "Processed": STATUS_PROVISIONING, "Waiting": STATUS_PROVISIONING, "Installing": STATUS_PROVISIONING, "Running": STATUS_PROVISIONING, "Cancelled": STATUS_DELETED, "Aborted": STATUS_ERROR, "Completed": STATUS_OTHER, }
def __init__(self): """Object initialization.""" self._name = PROVISIONER_KEY self.dsp_name = "Beaker" self.strategy = STRATEGY_ABORT self.conf = PyConfigParser() self.poll_sleep = 30 # seconds self.pubkey = None self.status_map = { "Reserved": STATUS_ACTIVE, "New": STATUS_PROVISIONING, "Scheduled": STATUS_PROVISIONING, "Queued": STATUS_PROVISIONING, "Processed": STATUS_PROVISIONING, "Waiting": STATUS_PROVISIONING, "Installing": STATUS_PROVISIONING, "Running": STATUS_PROVISIONING, "Cancelled": STATUS_DELETED, "Aborted": STATUS_ERROR, "Completed": STATUS_OTHER, }
class ClientCommandContainer(CommandContainer): def __init__(self, conf, **kwargs): self.conf = PyConfigParser() self.conf.load_from_conf(conf) self.conf.load_from_dict(kwargs) def set_hub(self, username=None, password=None, auto_login=True, proxy_user=None): if username: if password is None: password = password_prompt(default_value=password) self.conf["AUTH_METHOD"] = "password" self.conf["USERNAME"] = username self.conf["PASSWORD"] = password if proxy_user: self.conf["PROXY_USER"] = proxy_user cacert = self.conf.get('CA_CERT') if cacert and not os.path.exists(cacert): raise BeakerClientConfigurationError('CA_CERT configuration points to non-existing file: %s' % cacert) self.hub = HubProxy(conf=self.conf, auto_login=auto_login)
def __init__(self, conf, client_type=None, logger=None, transport=None, auto_login=True, timeout=120, **kwargs): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath(os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: transport_args = {'timeout': timeout} if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) if hasattr(ssl, 'create_default_context') and self._conf.get('CA_CERT'): ssl_context = ssl.create_default_context() ssl_context.load_verify_locations(cafile=self._conf['CA_CERT']) transport_args['context'] = ssl_context elif (hasattr(ssl, '_create_unverified_context') and not self._conf.get('SSL_VERIFY', True)): # Python 2.6 doesn't have context argument for xmlrpclib.ServerProxy # therefore transport needs to be modified ssl_context = ssl._create_unverified_context() transport_args['context'] = ssl_context else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass(**transport_args) self._hub = xmlrpclib.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC")) if auto_login: self._login()
def __init__( self, conf, client_type=None, logger=None, transport=None, auto_login=True, auto_logout=True, **kwargs ): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath(os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._auto_logout = auto_logout self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: transport_args = {} if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) if hasattr(ssl, "create_default_context") and self._conf.get("CA_CERT"): ssl_context = ssl.create_default_context() ssl_context.load_verify_locations(cafile=self._conf["CA_CERT"]) transport_args["context"] = ssl_context else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass(**transport_args) self._hub = xmlrpclib.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC"), ) if auto_login: self._login()
def __init__(self, conf, **kwargs): self.conf = PyConfigParser() self.conf.load_from_conf(conf) self.conf.load_from_dict(kwargs)
class BeakerProvider(Provider): """Beaker Provider.""" def __init__(self): """Object initialization.""" self._name = PROVISIONER_KEY self.dsp_name = "Beaker" self.strategy = STRATEGY_ABORT self.conf = PyConfigParser() self.poll_sleep = 30 # seconds self.pubkey = None self.status_map = { "Reserved": STATUS_ACTIVE, "New": STATUS_PROVISIONING, "Scheduled": STATUS_PROVISIONING, "Queued": STATUS_PROVISIONING, "Processed": STATUS_PROVISIONING, "Waiting": STATUS_PROVISIONING, "Installing": STATUS_PROVISIONING, "Running": STATUS_PROVISIONING, "Cancelled": STATUS_DELETED, "Aborted": STATUS_ERROR, "Completed": STATUS_OTHER, } async def init(self, distros, max_attempts, reserve_duration, pubkey): """Initialize provider with data from Beaker configuration.""" logger.info(f"{self.dsp_name}: Initializing provider") self.distros = distros # eg: 240 attempts * 30s timeout - 2h timeout for job to complete self.max_attempts = max_attempts self.reserve_duration = reserve_duration self.pubkey = pubkey login_start = datetime.now() default_config = os.path.expanduser( os.environ.get("BEAKER_CONF", "/etc/beaker/client.conf") # TODO use provc ) # get the beaker config for initialization of hub self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf) login_end = datetime.now() login_duration = login_end - login_start logger.info(f"{self.dsp_name}: Init duration {login_duration}") async def validate_hosts(self, reqs): """Validate that host requirements are well specified.""" for req in reqs: req_dstr = req.get("distro") if not req.get("meta_distro") and req_dstr not in self.distros: raise ValidationError( f"{self.dsp_name} provider does not support " f"'{req_dstr}' distro in provisioning config") return async def prepare_provisioning(self, reqs): """Prepare provisioning.""" pass async def can_provision(self, hosts): """Check that hosts can be provisioned.""" return True def _allow_ssh_key(self, pubkey): with open(os.path.expanduser(pubkey), "r") as key_file: key_content = key_file.read() return [ """%%post mkdir -p /root/.ssh cat >>/root/.ssh/authorized_keys << "__EOF__" %s__EOF__ restorecon -R /root/.ssh chmod go-w /root /root/.ssh /root/.ssh/authorized_keys %%end""" % "".join(key_content) ] def _req_to_bkr_job(self, req): """Transform requirement to beaker job xml.""" specs = deepcopy(req) # work with own copy, do not modify the input # Job attributes: specs.update({"retention_tag": "audit"}) specs.update({"product": "[internal]"}) specs.update({"whiteboard": "This job has been created using mrack."}) # RecipeSet attributes specs.update({"priority": "Normal"}) # Add allowed keys specs.update({"ks_append": self._allow_ssh_key(self.pubkey)}) # Use ks_meta specs.update({"ks_meta": "harness='restraint-rhts beakerlib-redhat'"}) # Recipe task definition specs.update( { # we use dummy task because beaker reuire a task in recipe "tasks": [{ "name": "/distribution/dummy", "role": "STANDALONE" }] }) # Create recipe with the specifications recipe = BeakerRecipe(**specs) recipe.addBaseRequires(**specs) # Specify the architecture arch_node = xml_doc().createElement("distro_arch") arch_node.setAttribute("op", "=") arch_node.setAttribute("value", specs["arch"]) recipe.addDistroRequires(arch_node) # Add ReserveSys element to reserve system after provisioning recipe.addReservesys(duration=str(self.reserve_duration)) for task in specs["tasks"]: recipe.addTask(task=task["name"], role=task["role"]) # Create RecipeSet and add our Recipe to it. recipe_set = BeakerRecipeSet(**specs) recipe_set.addRecipe(recipe) # Create job instance and inject created RecipeSet to it job = BeakerJob(**specs) job.addRecipeSet(recipe_set) return job async def create_server(self, req): """Issue creation of a server. req - dict of server requirements The req object can contain following additional attributes: * 'name': name for the VM * 'distro': beaker distribution to use * 'arch': architecture to request from beaker * 'variant': variant of the system """ logger.info(f"{self.dsp_name}: Creating server") job = self._req_to_bkr_job(req) # Generate the job job_id = self.hub.jobs.upload(job.toxml()) # schedule beaker job return (job_id, req["name"]) def prov_result_to_host_data(self, prov_result): """Transform provisioning result to needed host data.""" try: ip_address = socket.gethostbyname(prov_result["system"]) except socket.gaierror: ip_address = None result = { "id": prov_result["JobID"], "name": prov_result["req_name"], "addresses": [ip_address], "status": prov_result["status"], "fault": prov_result["result"] if prov_result["result"] != "Pass" else None, } return result def _get_recipe_info(self, beaker_id): """Get info about the recipe for beaker job id.""" bkr_job_xml = self.hub.taskactions.to_xml(beaker_id).encode("utf8") resources = [] for recipe in eTree.fromstring(bkr_job_xml).iter("recipe"): resources.append({ "system": recipe.get("system"), "status": recipe.get("status"), "result": recipe.get("result"), "rid": recipe.get("id"), "id": recipe.get("job_id"), }) return resources[0] if len(resources) == 1 else [] async def wait_till_provisioned(self, resource): """Wait for Beaker provisioning result.""" beaker_id, req_name = resource resource = {} attempts = 0 prev_status = "" while attempts < self.max_attempts: attempts += 1 resource = self._get_recipe_info(beaker_id) status = resource["status"] if prev_status != status: logger.info(f"{self.dsp_name}: Job {beaker_id} has changed " f"status ({prev_status} -> {status})") prev_status = status if self.status_map.get(status) == STATUS_PROVISIONING: await asyncio.sleep(self.poll_sleep) elif self.status_map.get(status) == STATUS_ACTIVE: break elif self.status_map.get(status) in [STATUS_ERROR, STATUS_DELETED]: logger.warning( f"{self.dsp_name}: Job {beaker_id} has errored with status " f"{status} and result {resource['result']}") break else: logger.error( f"{self.dsp_name}: Job {beaker_id} has swithced to unexpected " f"status {status} with result {resource['result']}") break resource.update({"JobID": beaker_id, "req_name": req_name}) return resource async def delete_host(self, host_id): """Delete provisioned hosts based on input from provision_hosts.""" logger.info( f"{self.dsp_name}: Deleting host by cancelling Job {host_id}") return self.hub.taskactions.stop(host_id, "cancel", "Job has been stopped by mrack.") def to_host(self, provisioning_result, username=None): """Transform provisioning result into Host object.""" return super().to_host(provisioning_result, username="******")
class HubProxy(object): """ A Hub client (thin ServerProxy wrapper). """ def __init__(self, conf, client_type=None, logger=None, transport=None, auto_login=True, timeout=120, **kwargs): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath( os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: transport_args = {'timeout': timeout} if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) if hasattr(ssl, 'create_default_context') and self._conf.get( 'CA_CERT'): ssl_context = ssl.create_default_context() ssl_context.load_verify_locations( cafile=self._conf['CA_CERT']) transport_args['context'] = ssl_context elif (hasattr(ssl, '_create_unverified_context') and not self._conf.get('SSL_VERIFY', True)): # Python 2.6 doesn't have context argument for xmlrpclib.ServerProxy # therefore transport needs to be modified ssl_context = ssl._create_unverified_context() transport_args['context'] = ssl_context else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass(**transport_args) self._hub = xmlrpc_client.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC")) if auto_login: self._login() def __del__(self): if hasattr(self._transport, "retry_count"): self._transport.retry_count = 0 def __getattr__(self, name): try: return getattr(self._hub, name) except: raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name)) def _login(self, force=False): """Login to the hub. - self._hub instance is created in this method - session information is stored in a cookie in self._transport """ if self._auth_method == "none" or not self._auth_method: return login_method_name = "_login_%s" % self._auth_method if not hasattr(self, login_method_name): raise ImproperlyConfigured("Unknown authentication method: %s" % self._auth_method) self._logger and self._logger.info("Creating new session...") try: login_method = getattr(self, login_method_name) login_method() self._logged_in = True except KeyboardInterrupt: raise except Exception as ex: self._logger and self._logger.error( "Failed to create new session: %s" % ex) raise else: self._logger and self._logger.info("New session created.") def _logout(self): """No-op for backwards compatibility.""" pass def _login_password(self): """Login using username and password.""" username = self._conf.get("USERNAME") password = self._conf.get("PASSWORD") proxyuser = self._conf.get("PROXY_USER") if not username: raise AuthenticationError("USERNAME is not set") self._hub.auth.login_password(username, password, proxyuser) def _login_oauth2(self): """Login using OAuth2 access token.""" access_token = self._conf.get("ACCESS_TOKEN") if not access_token: raise AuthenticationError("ACCESS_TOKEN is not set") self._hub.auth.login_oauth2(access_token) def _login_krbv(self): """ Login using kerberos credentials (uses python-gssapi). """ def get_server_principal(service=None, realm=None): """ Convert hub url to kerberos principal. """ hostname = urlparse.urlparse(self._hub_url)[1] # remove port from hostname hostname = hostname.split(":")[0] if realm is None: # guess realm: last two parts from hostname realm = ".".join(hostname.split(".")[-2:]).upper() if service is None: service = "HTTP" return '%s/%s@%s' % (service, hostname, realm) # read default values from settings principal = self._conf.get("KRB_PRINCIPAL") keytab = self._conf.get("KRB_KEYTAB") service = self._conf.get("KRB_SERVICE") realm = self._conf.get("KRB_REALM") ccache = self._conf.get("KRB_CCACHE") proxyuser = self._conf.get("PROXY_USER") krb5kdc_err_s_principal_unknown = 2529638919 # Server not found in Kerberos database name = None if principal: name = gssapi.Name(principal, gssapi.NameType.kerberos_principal) store = None # Default ccache if keytab: # Make sure we are using always APP ccache or user specified ccache # instead of MIT krb5 default one with keytabs. Default ccache can be occupied by # system application store = { 'client_keytab': keytab, 'ccache': ccache or tempfile.NamedTemporaryFile(prefix='krb5cc_bkr_').name } elif ccache: store = {'ccache': ccache} creds = gssapi.Credentials(name=name, store=store, usage='initiate') target_name = gssapi.Name(get_server_principal(service, realm)) try: res = gssapi.raw.init_sec_context( target_name, creds, flags=( gssapi.RequirementFlag.out_of_sequence_detection | gssapi.RequirementFlag.replay_detection | gssapi.RequirementFlag.mutual_authentication | # This is a hack which causes GSSAPI to give us back a raw # KRB_AP_REQ token value, without GSSAPI header wrapping, which # is what the Beaker server is expecting in auth.login_krbv. gssapi.RequirementFlag.dce_style)) except gssapi.raw.GSSError as ex: if ex.min_code == krb5kdc_err_s_principal_unknown: # pylint: disable=no-member ex.message += ". Make sure you correctly set KRB_REALM (current value: %s)." % realm ex.args = (ex.message, ) raise ex if six.PY2: req_enc = base64.encodestring(res.token) else: req_enc = base64.encodebytes(res.token) # pylint: disable=maybe-no-member try: req_enc = str(req_enc, 'utf-8') # bytes to string except TypeError: pass self._hub.auth.login_krbv(req_enc, proxyuser)
def __init__(self, params={}, logger=None): self.__dict__ = params.copy() self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf)
class HubProxy(object): """A Hub client (thin ServerProxy wrapper).""" def __init__(self, conf, client_type=None, logger=None, transport=None, auto_login=True, timeout=120, **kwargs): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath( os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: transport_args = {'timeout': timeout} if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) if hasattr(ssl, 'create_default_context') and self._conf.get( 'CA_CERT'): ssl_context = ssl.create_default_context() ssl_context.load_verify_locations( cafile=self._conf['CA_CERT']) transport_args['context'] = ssl_context else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass(**transport_args) self._hub = xmlrpclib.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC")) if auto_login: self._login() def __del__(self): if hasattr(self._transport, "retry_count"): self._transport.retry_count = 0 def __getattr__(self, name): try: return getattr(self._hub, name) except: raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name)) def _login(self, force=False): """Login to the hub. - self._hub instance is created in this method - session information is stored in a cookie in self._transport """ if self._auth_method == "none" or not self._auth_method: return login_method_name = "_login_%s" % self._auth_method if not hasattr(self, login_method_name): raise ImproperlyConfigured("Unknown authentication method: %s" % self._auth_method) self._logger and self._logger.info("Creating new session...") try: login_method = getattr(self, login_method_name) login_method() self._logged_in = True except KeyboardInterrupt: raise except Exception, ex: self._logger and self._logger.error( "Failed to create new session: %s" % ex) raise else:
config_file = os.environ.get("BEAKER_CLIENT_CONF", None) if not config_file: user_conf = os.path.expanduser('~/.beaker_client/config') old_conf = os.path.expanduser('~/.beaker') if os.path.exists(user_conf): config_file = user_conf elif os.path.exists(old_conf): config_file = old_conf sys.stderr.write("%s is deprecated for config, please use %s instead\n" % (old_conf, user_conf)) elif os.path.exists('/etc/beaker/client.conf'): config_file = "/etc/beaker/client.conf" else: pass conf = PyConfigParser() if config_file: conf.load_from_file(config_file) _host_filter_presets = None def host_filter_presets(): global _host_filter_presets if _host_filter_presets is not None: return _host_filter_presets _host_filter_presets = {} config_files = \ sorted(glob.glob(pkg_resources.resource_filename('bkr.client', 'host-filters/*.conf'))) + \ sorted(glob.glob('/etc/beaker/host-filters/*.conf')) user_config_file = os.path.expanduser('~/.beaker_client/host-filter')
class BeakerTargets(object): def __init__(self, params, logger=None): # params from AnsibleModule argument_spec below self.ids = params['ids'] provision_params = params['provision_params'] # Set wait methods from provision params, with reasonable defaults self.wait_time = provision_params.get('attempt_wait_time', 60) self.max_attempts = provision_params.get('max_attempts', 60) # set up beaker connection self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf) def get_system_statuses(self): """ Checks on the status of a set of Beaker jobs (ids) and returns their hostname once the jobs have reached their defined status. """ attempts = 0 while attempts < self.max_attempts: job_results, all_count = self._check_jobs() pass_count = 0 for resource in job_results: result = resource['result'] status = resource['status'] print >> stderr, "status: %s, result: %s" % (status, result) if status not in ['Cancelled', 'Aborted']: if result == 'Pass': pass_count += 1 elif result in ['Fail', 'Warn', 'Panic', 'Completed']: raise Exception("System failed with state" " '{0}'".format(result)) elif status == 'Aborted': raise Exception("System aborted") elif status == 'Cancelled': raise Exception("System canceled") attempts += 1 if pass_count == all_count: return job_results sleep(self.wait_time) # max attempts exceeded, cancel jobs and fail for job_id in _jprefix(self.ids): self.hub.taskactions.stop(job_id, 'cancel', 'Provision request timed out') # Fail with error msg, include results from last attempt to include # in topology outputs even if provisioning failed so a destroy still # cancels jobs msg = ("{0} system(s) never completed in {1} polling attempts, jobs " "have been cancelled: {2}".format(all_count - pass_count, attempts, ', '.join(self.ids))) raise Exception(msg, job_results) def _check_jobs(self): """ Get state of a job in Beaker """ jobs = _jprefix(self.ids) resources = [] bkrcmd = BeakerCommand('BeakerCommand') bkrcmd.check_taskspec_args(jobs) for task in jobs: myxml = self.hub.taskactions.to_xml(task) myxml = myxml.encode('utf8') root = eT.fromstring(myxml) # Using getiterator() since its backward compatible with py26 for recipe in root.getiterator('recipe'): resources.append({ 'family': recipe.get('family'), 'distro': recipe.get('distro'), 'arch': recipe.get('arch'), 'variant': recipe.get('variant'), 'system': recipe.get('system'), 'status': recipe.get('status'), 'result': recipe.get('result'), 'id': recipe.get('job_id') }) return resources, len(resources)
class BeakerTargets(object): def __init__(self, params={}, logger=None): self.__dict__ = params.copy() self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf) def _get_url(self, bkr_id): """ Constructs the Beaker URL for the job related to the provided Beaker ID. That ID should be all numeric, unless the structure of Beaker changes in the future. If that's the case, then the ID should be appropriately URL encoded to be appended to the end of a URL properly. """ base = self.conf.get('HUB_URL', '') if base == '': raise Exception("Unable to construct URL") if base[-1] != '/': base += '/' return base + 'jobs/' + bkr_id def get_system_statuses(self): """ Checks on the status of a set of Beaker jobs (ids) and returns their hostname once the jobs have reached their defined status. """ attempts = 0 pass_count = 0 all_count = len(self.ids) while attempts < self.max_attempts: job_results = self._check_jobs(self.ids) pass_count = 0 for resource in job_results['resources']: result = resource['result'] status = resource['status'] print >> stderr, "status: %s, result: %s" % (status, result) if status not in ['Cancelled', 'Aborted']: if (result == 'Pass' or (result == 'Warn' and self.skip_no_system)): pass_count += 1 elif result in ['Fail', 'Warn', 'Panic', 'Completed']: raise Exception("System failed with state" " '{0}'".format(result)) elif status == 'Aborted': if result == 'Warn' and self.skip_no_system: pass_count += 1 else: raise Exception("System aborted") elif status == 'Cancelled': raise Exception("System canceled") attempts += 1 if pass_count == all_count: return job_results['resources'] sleep(WAIT_TIME) raise Exception("{0} system(s) never completed in {1}" " polling attempts. {2}".format( all_count - pass_count, attempts, dumps(job_results))) def _check_jobs(self, ids): """ Get state of a job in Beaker """ jobs = ["J:" + _id for _id in ids] results = {} resources = [] bkrcmd = BeakerCommand('BeakerCommand') bkrcmd.check_taskspec_args(jobs) for task in jobs: myxml = self.hub.taskactions.to_xml(task) myxml = myxml.encode('utf8') root = eT.fromstring(myxml) # TODO: Using getiterator() since its backward compatible # with Python 2.6 # This is deprectated in 2.7 and we should be using iter() for job in root.getiterator('job'): results.update({ 'job_id': job.get('id'), 'results': job.get('result') }) for recipe in root.getiterator('recipe'): resources.append({ 'family': recipe.get('family'), 'distro': recipe.get('distro'), 'arch': recipe.get('arch'), 'variant': recipe.get('variant'), 'system': recipe.get('system'), 'status': recipe.get('status'), 'result': recipe.get('result'), 'id': recipe.get('job_id') }) results.update({'resources': resources}) return results
class BeakerTargets(object): def __init__(self, params, logger=None): # params from AnsibleModule argument_spec below self.ids = params['ids'] provision_params = params['provision_params'] # Set wait methods from provision params, with reasonable defaults self.wait_time = provision_params.get('attempt_wait_time', 60) self.max_attempts = provision_params.get('max_attempts', 60) # set up beaker connection self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf) def get_system_statuses(self): """ Checks on the status of a set of Beaker jobs (ids) and returns their hostname once the jobs have reached their defined status. """ attempts = 0 while attempts < self.max_attempts: job_results, all_count = self._check_jobs() pass_count = 0 for resource in job_results: result = resource['result'] status = resource['status'] print >> stderr, "status: %s, result: %s" % (status, result) if status not in ['Cancelled', 'Aborted']: if result == 'Pass': pass_count += 1 elif result in ['Fail', 'Warn', 'Panic', 'Completed']: raise Exception("System failed with state" " '{0}'".format(result)) elif status == 'Aborted': raise Exception("System aborted") elif status == 'Cancelled': raise Exception("System canceled") attempts += 1 if pass_count == all_count: return job_results sleep(self.wait_time) # max attempts exceeded, cancel jobs and fail for job_id in _jprefix(self.ids): self.hub.taskactions.stop(job_id, 'cancel', 'Provision request timed out') # Fail with error msg, include results from last attempt to include # in topology outputs even if provisioning failed so a destroy still # cancels jobs msg = ("{0} system(s) never completed in {1} polling attempts, jobs " "have been cancelled: {2}".format( all_count - pass_count, attempts, ', '.join(self.ids))) raise Exception(msg, job_results) def _check_jobs(self): """ Get state of a job in Beaker """ jobs = _jprefix(self.ids) resources = [] bkrcmd = BeakerCommand('BeakerCommand') bkrcmd.check_taskspec_args(jobs) for task in jobs: myxml = self.hub.taskactions.to_xml(task) myxml = myxml.encode('utf8') root = eT.fromstring(myxml) # Using getiterator() since its backward compatible with py26 for recipe in root.getiterator('recipe'): resources.append({'family': recipe.get('family'), 'distro': recipe.get('distro'), 'arch': recipe.get('arch'), 'variant': recipe.get('variant'), 'system': recipe.get('system'), 'status': recipe.get('status'), 'result': recipe.get('result'), 'rid': recipe.get('id'), 'id': recipe.get('job_id')}) return resources, len(resources)
class HubProxy(object): """A Hub client (thin ServerProxy wrapper).""" def __init__(self, conf, client_type=None, logger=None, transport=None, auto_login=True, timeout=120, **kwargs): self._conf = PyConfigParser() self._hub = None # load default config default_config = os.path.abspath(os.path.join(os.path.dirname(__file__), "default.conf")) self._conf.load_from_file(default_config) # update config with another one if conf is not None: self._conf.load_from_conf(conf) # update config with kwargs self._conf.load_from_dict(kwargs) # initialize properties self._client_type = client_type or "client" self._hub_url = self._conf["HUB_URL"] self._auth_method = self._conf["AUTH_METHOD"] self._logger = logger self._logged_in = False if transport is not None: self._transport = transport else: transport_args = {'timeout': timeout} if self._hub_url.startswith("https://"): TransportClass = retry_request_decorator(SafeCookieTransport) if hasattr(ssl, 'create_default_context') and self._conf.get('CA_CERT'): ssl_context = ssl.create_default_context() ssl_context.load_verify_locations(cafile=self._conf['CA_CERT']) transport_args['context'] = ssl_context else: TransportClass = retry_request_decorator(CookieTransport) self._transport = TransportClass(**transport_args) self._hub = xmlrpclib.ServerProxy( "%s/%s/" % (self._hub_url, self._client_type), allow_none=True, transport=self._transport, verbose=self._conf.get("DEBUG_XMLRPC")) if auto_login: self._login() def __del__(self): if hasattr(self._transport, "retry_count"): self._transport.retry_count = 0 def __getattr__(self, name): try: return getattr(self._hub, name) except: raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name)) def _login(self, force=False): """Login to the hub. - self._hub instance is created in this method - session information is stored in a cookie in self._transport """ if self._auth_method == "none" or not self._auth_method: return login_method_name = "_login_%s" % self._auth_method if not hasattr(self, login_method_name): raise ImproperlyConfigured("Unknown authentication method: %s" % self._auth_method) self._logger and self._logger.info("Creating new session...") try: login_method = getattr(self, login_method_name) login_method() self._logged_in = True except KeyboardInterrupt: raise except Exception, ex: self._logger and self._logger.error("Failed to create new session: %s" % ex) raise else:
if not user_config_file: user_conf = os.path.expanduser('~/.beaker_client/config') old_conf = os.path.expanduser('~/.beaker') if os.path.exists(user_conf): user_config_file = user_conf elif os.path.exists(old_conf): user_config_file = old_conf sys.stderr.write("%s is deprecated for config, please use %s instead\n" % (old_conf, user_conf)) else: pass system_config_file = None if os.path.exists('/etc/beaker/client.conf'): system_config_file = '/etc/beaker/client.conf' conf = PyConfigParser() if system_config_file: conf.load_from_file(system_config_file) if user_config_file: conf.load_from_file(user_config_file) _host_filter_presets = None def host_filter_presets(): global _host_filter_presets if _host_filter_presets is not None: return _host_filter_presets _host_filter_presets = {} config_files = \ sorted(glob.glob(pkg_resources.resource_filename('bkr.client', 'host-filters/*.conf'))) + \
old_conf = os.path.expanduser('~/.beaker') if os.path.exists(user_conf): user_config_file = user_conf elif os.path.exists(old_conf): user_config_file = old_conf sys.stderr.write( "%s is deprecated for config, please use %s instead\n" % (old_conf, user_conf)) else: pass system_config_file = None if os.path.exists('/etc/beaker/client.conf'): system_config_file = '/etc/beaker/client.conf' conf = PyConfigParser() if system_config_file: conf.load_from_file(system_config_file) if user_config_file: conf.load_from_file(user_config_file) _host_filter_presets = None def host_filter_presets(): global _host_filter_presets if _host_filter_presets is not None: return _host_filter_presets _host_filter_presets = {} config_files = (sorted(
def __init__(self, logger=None, conf=None, **kwargs): self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf, **kwargs)
if not config_file: user_conf = os.path.expanduser('~/.beaker_client/config') old_conf = os.path.expanduser('~/.beaker') if os.path.exists(user_conf): config_file = user_conf elif os.path.exists(old_conf): config_file = old_conf sys.stderr.write( "%s is deprecated for config, please use %s instead\n" % (old_conf, user_conf)) elif os.path.exists('/etc/beaker/client.conf'): config_file = "/etc/beaker/client.conf" else: pass conf = PyConfigParser() if config_file: conf.load_from_file(config_file) _host_filter_presets = None def host_filter_presets(): global _host_filter_presets if _host_filter_presets is not None: return _host_filter_presets _host_filter_presets = {} config_files = \ sorted(glob.glob(pkg_resources.resource_filename('bkr.client', 'host-filters/*.conf'))) + \ sorted(glob.glob('/etc/beaker/host-filters/*.conf'))
class BeakerTargets(object): def __init__(self, params={}, logger=None): self.__dict__ = params.copy() self.conf = PyConfigParser() default_config = os.path.expanduser(BEAKER_CONF) self.conf.load_from_file(default_config) self.hub = HubProxy(logger=logger, conf=self.conf) def _get_url(self, bkr_id): """ Constructs the Beaker URL for the job related to the provided Beaker ID. That ID should be all numeric, unless the structure of Beaker changes in the future. If that's the case, then the ID should be appropriately URL encoded to be appended to the end of a URL properly. """ base = self.conf.get('HUB_URL', '') if base == '': raise Exception("Unable to construct URL") if base[-1] != '/': base += '/' return base + 'jobs/' + bkr_id def get_system_statuses(self): """ Checks on the status of a set of Beaker jobs (ids) and returns their hostname once the jobs have reached their defined status. """ attempts = 0 pass_count = 0 all_count = len(self.ids) while attempts < self.max_attempts: job_results = self._check_jobs(self.ids) pass_count = 0 for resource in job_results['resources']: result = resource['result'] status = resource['status'] print >> stderr, "status: %s, result: %s" % (status, result) if status not in ['Cancelled', 'Aborted']: if result == 'Pass' or (result == 'Warn' and self.skip_no_system): pass_count += 1 elif result in ['Fail', 'Warn', 'Panic', 'Completed']: raise Exception("System failed with state '{0}'"\ .format(result)) elif status == 'Aborted': if result == 'Warn' and self.skip_no_system: pass_count += 1 else: raise Exception("System aborted") elif status == 'Cancelled': raise Exception("System canceled") attempts += 1 if pass_count == all_count: return job_results['resources'] sleep(WAIT_TIME) raise Exception("{0} system(s) never completed in {1} polling attempts. {2}"\ .format(all_count - pass_count, attempts, dumps(job_results))) def _check_jobs(self, ids): """ Get state of a job in Beaker """ jobs = ["J:" + _id for _id in ids] results = {} resources = [] bkrcmd = BeakerCommand('BeakerCommand') bkrcmd.check_taskspec_args(jobs) for task in jobs: myxml = self.hub.taskactions.to_xml(task) myxml = myxml.encode('utf8') root = eT.fromstring(myxml) # TODO: Using getiterator() since its backward compatible # with Python 2.6 # This is deprectated in 2.7 and we should be using iter() for job in root.getiterator('job'): results.update({'job_id': job.get('id'), 'results': job.get('result')}) for recipe in root.getiterator('recipe'): resources.append({'family': recipe.get('family'), 'distro': recipe.get('distro'), 'arch': recipe.get('arch'), 'variant': recipe.get('variant'), 'system': recipe.get('system'), 'status': recipe.get('status'), 'result': recipe.get('result'), 'id': recipe.get('job_id')}) results.update({'resources': resources}) return results