Example #1
0
    def __init__(self, conf=None, hub=None, **kwargs):
        self.conf = get_conf()

        # update data from another config
        if conf is not None:
            self.conf.load_from_conf(conf)

        # update data from config specified in os.environ
        conf_environ_key = "BEAKER_PROXY_CONFIG_FILE"
        if conf_environ_key in os.environ:
            self.conf.load_from_file(os.environ[conf_environ_key])

        self.conf.load_from_dict(kwargs)

        # self.hub is created here
        self.hub = hub
        if self.hub is None:
            self.hub = HubProxy(
                logger=logging.getLogger('bkr.common.hub.HubProxy'),
                conf=self.conf,
                **kwargs)
        self.log_storage = LogStorage(
            self.conf.get("CACHEPATH"), "%s://%s/beaker/logs" %
            (self.conf.get('URL_SCHEME', 'http'), self.conf.get_url_domain()),
            self.hub)
Example #2
0
def return_reservation(rid):
    conf['AUTH_METHOD'] = "password"
    conf['HUB_URL'] = 'https://%s' % settings.BEAKER_SERVER
    conf['USERNAME'] = settings.BEAKER_OWNER
    conf['PASSWORD'] = settings.BEAKER_PASS
    hub = HubProxy(conf=conf)
    return hub.recipes.extend(rid, 0)
Example #3
0
    def __init__(self, params, logger=None):
        # params from AnsibleModule argument_spec below
        self.jid = params['job_id']
        self.task = params['task']

        # set up beaker connection
        self.hub = HubProxy(logger=logger, conf=conf)
Example #4
0
    def set_hub(self, username=None, password=None, auto_login=True):
        if username:
            if password is None:
                password = password_prompt(default_value=password)
            self.conf["AUTH_METHOD"] = "password"
            self.conf["USERNAME"] = username
            self.conf["PASSWORD"] = password

        self.hub = HubProxy(conf=self.conf, auto_login=auto_login)
Example #5
0
    def __init__(self, params, logger=None):
        # params from AnsibleModule argument_spec below
        self.ids = params['ids']
        provision_params = params['provision_params']

        # Set wait methods from provision params, with reasonable defaults
        self.wait_time = provision_params.get('attempt_wait_time', 60)
        self.max_attempts = provision_params.get('max_attempts', 60)

        # set up beaker connection
        self.conf = PyConfigParser()
        default_config = os.path.expanduser(BEAKER_CONF)
        self.conf.load_from_file(default_config)
        self.hub = HubProxy(logger=logger, conf=self.conf)
Example #6
0
    def set_hub(self, username=None, password=None, auto_login=True, proxy_user=None):
        if username:
            if password is None:
                password = password_prompt(default_value=password)
            self.conf["AUTH_METHOD"] = "password"
            self.conf["USERNAME"] = username
            self.conf["PASSWORD"] = password
        if proxy_user:
            self.conf["PROXY_USER"] = proxy_user

        cacert = self.conf.get('CA_CERT')
        if cacert and not os.path.exists(cacert):
            raise BeakerClientConfigurationError(
                'CA_CERT configuration points to non-existing file: %s' % cacert)

        self.hub = HubProxy(conf=self.conf, auto_login=auto_login)
Example #7
0
 async def init(self, max_attempts, reserve_duration, keypair):
     """Initialize provider with data from Beaker configuration."""
     logger.info("Initializing Beaker provider")
     # eg: 240 attempts * 30s timeout - 2h timeout for job to complete
     self.max_attempts = max_attempts
     self.reserve_duration = reserve_duration
     self.keypair = keypair
     login_start = datetime.now()
     default_config = os.path.expanduser(
         os.environ.get("BEAKER_CONF", "/etc/beaker/client.conf")  # TODO use provc
     )  # get the beaker config for initialization of hub
     self.conf.load_from_file(default_config)
     self.hub = HubProxy(logger=logger, conf=self.conf)
     login_end = datetime.now()
     login_duration = login_end - login_start
     logger.info(f"Login duration {login_duration}")
Example #8
0
    def __init__(self, conf=None, hub=None, **kwargs):
        self.conf = get_conf()

        # update data from another config
        if conf is not None:
            self.conf.load_from_conf(conf)

        # update data from config specified in os.environ
        conf_environ_key = "BEAKER_PROXY_CONFIG_FILE"
        if conf_environ_key in os.environ:
            self.conf.load_from_file(os.environ[conf_environ_key])

        self.conf.load_from_dict(kwargs)

        # self.hub is created here
        self.hub = hub
        if self.hub is None:
            self.hub = HubProxy(logger=logging.getLogger('bkr.common.hub.HubProxy'), conf=self.conf,
                    **kwargs)
        self.log_storage = LogStorage(self.conf.get("CACHEPATH"),
                "%s://%s/beaker/logs" % (self.conf.get('URL_SCHEME',
                'http'), self.conf.get_url_domain()),
                self.hub)
Example #9
0
 def __init__(self, params={}, logger=None):
     self.__dict__ = params.copy()
     self.conf = PyConfigParser()
     default_config = os.path.expanduser(BEAKER_CONF)
     self.conf.load_from_file(default_config)
     self.hub = HubProxy(logger=logger, conf=self.conf)
Example #10
0
class ProxyHelper(object):
    def __init__(self, conf=None, hub=None, **kwargs):
        self.conf = get_conf()

        # update data from another config
        if conf is not None:
            self.conf.load_from_conf(conf)

        # update data from config specified in os.environ
        conf_environ_key = "BEAKER_PROXY_CONFIG_FILE"
        if conf_environ_key in os.environ:
            self.conf.load_from_file(os.environ[conf_environ_key])

        self.conf.load_from_dict(kwargs)

        # self.hub is created here
        self.hub = hub
        if self.hub is None:
            self.hub = HubProxy(
                logger=logging.getLogger('bkr.common.hub.HubProxy'),
                conf=self.conf,
                **kwargs)
        self.log_storage = LogStorage(
            self.conf.get("CACHEPATH"), "%s://%s/beaker/logs" %
            (self.conf.get('URL_SCHEME', 'http'), self.conf.get_url_domain()),
            self.hub)

    def close(self):
        if sys.version_info >= (2, 7):
            self.hub._hub('close')()

    def recipe_upload_file(self, recipe_id, path, name, size, md5sum, offset,
                           data):
        """ Upload a file in chunks
             path: the relative path to upload to
             name: the name of the file
             size: size of the contents (bytes)
             md5: md5sum (hex digest) of contents
             data: base64 encoded file contents
             offset: the offset of the chunk
            Files can be uploaded in chunks, if so the md5 and the size 
            describe the chunk rather than the whole file.  The offset
            indicates where the chunk belongs
        """
        # Originally offset=-1 had special meaning, but that was unused
        logger.debug(
            "recipe_upload_file recipe_id:%s name:%s offset:%s size:%s",
            recipe_id, name, offset, size)
        with self.log_storage.recipe(str(recipe_id),
                                     os.path.join(path, name)) as log_file:
            log_file.update_chunk(base64.decodestring(data), int(offset or 0))
        return True

    def task_result(self,
                    task_id,
                    result_type,
                    result_path=None,
                    result_score=None,
                    result_summary=None):
        """ report a result to the scheduler """
        logger.debug("task_result %s", task_id)
        return self.hub.recipes.tasks.result(task_id, result_type, result_path,
                                             result_score, result_summary)

    def task_info(self, qtask_id):
        """ accepts qualified task_id J:213 RS:1234 R:312 T:1234 etc.. Returns dict with status """
        logger.debug("task_info %s", qtask_id)
        return self.hub.taskactions.task_info(qtask_id)

    def recipe_stop(self, recipe_id, stop_type, msg=None):
        """ tell the scheduler that we are stopping this recipe
            stop_type = ['abort', 'cancel']
            msg to record
        """
        logger.debug("recipe_stop %s", recipe_id)
        return self.hub.recipes.stop(recipe_id, stop_type, msg)

    def recipeset_stop(self, recipeset_id, stop_type, msg=None):
        """ tell the scheduler that we are stopping this recipeset
            stop_type = ['abort', 'cancel']
            msg to record
        """
        logger.debug("recipeset_stop %s", recipeset_id)
        return self.hub.recipesets.stop(recipeset_id, stop_type, msg)

    def job_stop(self, job_id, stop_type, msg=None):
        """ tell the scheduler that we are stopping this job
            stop_type = ['abort', 'cancel']
            msg to record 
        """
        logger.debug("job_stop %s", job_id)
        return self.hub.jobs.stop(job_id, stop_type, msg)

    def get_my_recipe(self, request):
        """
        Accepts a dict with key 'recipe_id'. Returns an XML document for the 
        recipe with that id.
        """
        if 'recipe_id' in request:
            logger.debug("get_recipe recipe_id:%s", request['recipe_id'])
            return self.hub.recipes.to_xml(request['recipe_id'])

    def get_peer_roles(self, task_id):
        logger.debug('get_peer_roles %s', task_id)
        return self.hub.recipes.tasks.peer_roles(task_id)

    def extend_watchdog(self, task_id, kill_time):
        """ tell the scheduler to extend the watchdog by kill_time seconds
        """
        logger.debug("extend_watchdog %s %s", task_id, kill_time)
        return self.hub.recipes.tasks.extend(task_id, kill_time)

    def task_to_dict(self, task_name):
        """ returns metadata about task_name from the TaskLibrary 
        """
        return self.hub.tasks.to_dict(task_name)

    def get_console_log(self, recipe_id, length=None):
        """
        Get console log from the OpenStack instance
        """
        return self.hub.recipes.console_output(recipe_id, length)
Example #11
0
 def __init__(self, logger=None, conf=None, **kwargs):
     self.conf = PyConfigParser()
     default_config = os.path.expanduser(BEAKER_CONF)
     self.conf.load_from_file(default_config)
     self.hub = HubProxy(logger=logger, conf=self.conf, **kwargs)
Example #12
0
class ProxyHelper(object):


    def __init__(self, conf=None, hub=None, **kwargs):
        self.conf = get_conf()

        # update data from another config
        if conf is not None:
            self.conf.load_from_conf(conf)

        # update data from config specified in os.environ
        conf_environ_key = "BEAKER_PROXY_CONFIG_FILE"
        if conf_environ_key in os.environ:
            self.conf.load_from_file(os.environ[conf_environ_key])

        self.conf.load_from_dict(kwargs)

        # self.hub is created here
        self.hub = hub
        if self.hub is None:
            self.hub = HubProxy(logger=logging.getLogger('bkr.common.hub.HubProxy'), conf=self.conf,
                    **kwargs)
        self.log_storage = LogStorage(self.conf.get("CACHEPATH"),
                "%s://%s/beaker/logs" % (self.conf.get('URL_SCHEME',
                'http'), self.conf.get_url_domain()),
                self.hub)

    def close(self):
        if sys.version_info >= (2, 7):
            self.hub._hub('close')()

    def recipe_upload_file(self, 
                         recipe_id, 
                         path, 
                         name, 
                         size, 
                         md5sum, 
                         offset, 
                         data):
        """ Upload a file in chunks
             path: the relative path to upload to
             name: the name of the file
             size: size of the contents (bytes)
             md5: md5sum (hex digest) of contents
             data: base64 encoded file contents
             offset: the offset of the chunk
            Files can be uploaded in chunks, if so the md5 and the size 
            describe the chunk rather than the whole file.  The offset
            indicates where the chunk belongs
        """
        # Originally offset=-1 had special meaning, but that was unused
        logger.debug("recipe_upload_file recipe_id:%s name:%s offset:%s size:%s",
                recipe_id, name, offset, size)
        with self.log_storage.recipe(str(recipe_id), os.path.join(path, name)) as log_file:
            log_file.update_chunk(base64.decodestring(data), int(offset or 0))
        return True

    def task_result(self, 
                    task_id, 
                    result_type, 
                    result_path=None, 
                    result_score=None,
                    result_summary=None):
        """ report a result to the scheduler """
        logger.debug("task_result %s", task_id)
        return self.hub.recipes.tasks.result(task_id,
                                             result_type,
                                             result_path,
                                             result_score,
                                             result_summary)

    def task_info(self,
                  qtask_id):
        """ accepts qualified task_id J:213 RS:1234 R:312 T:1234 etc.. Returns dict with status """
        logger.debug("task_info %s", qtask_id)
        return self.hub.taskactions.task_info(qtask_id)

    def recipe_stop(self,
                    recipe_id,
                    stop_type,
                    msg=None):
        """ tell the scheduler that we are stopping this recipe
            stop_type = ['abort', 'cancel']
            msg to record
        """
        logger.debug("recipe_stop %s", recipe_id)
        return self.hub.recipes.stop(recipe_id, stop_type, msg)

    def recipeset_stop(self,
                    recipeset_id,
                    stop_type,
                    msg=None):
        """ tell the scheduler that we are stopping this recipeset
            stop_type = ['abort', 'cancel']
            msg to record
        """
        logger.debug("recipeset_stop %s", recipeset_id)
        return self.hub.recipesets.stop(recipeset_id, stop_type, msg)

    def job_stop(self,
                    job_id,
                    stop_type,
                    msg=None):
        """ tell the scheduler that we are stopping this job
            stop_type = ['abort', 'cancel']
            msg to record 
        """
        logger.debug("job_stop %s", job_id)
        return self.hub.jobs.stop(job_id, stop_type, msg)

    def get_my_recipe(self, request):
        """
        Accepts a dict with key 'recipe_id'. Returns an XML document for the 
        recipe with that id.
        """
        if 'recipe_id' in request:
            logger.debug("get_recipe recipe_id:%s", request['recipe_id'])
            return self.hub.recipes.to_xml(request['recipe_id'])

    def get_peer_roles(self, task_id):
        logger.debug('get_peer_roles %s', task_id)
        return self.hub.recipes.tasks.peer_roles(task_id)

    def extend_watchdog(self, task_id, kill_time):
        """ tell the scheduler to extend the watchdog by kill_time seconds
        """
        logger.debug("extend_watchdog %s %s", task_id, kill_time)
        return self.hub.recipes.tasks.extend(task_id, kill_time)

    def task_to_dict(self, task_name):
        """ returns metadata about task_name from the TaskLibrary 
        """
        return self.hub.tasks.to_dict(task_name)

    def get_console_log(self, recipe_id, length=None):
        """
        Get console log from the OpenStack instance
        """
        return self.hub.recipes.console_output(recipe_id, length)