示例#1
0
 def __init__(self, dist_config):
     self.dist_config = dist_config
     self.resources = {
         'flume': 'flume-%s' % host.cpu_arch(),
         'zookeeper': 'zookeeper-%s' % host.cpu_arch(),
     }
     self.verify_resources = utils.verify_resources(*self.resources.values())
示例#2
0
 def __init__(self, dist_config):
     self.dist_config = dist_config
     self.resources = {
         'flume': 'flume-%s' % host.cpu_arch(),
         'zookeeper': 'zookeeper-%s' % host.cpu_arch(),
     }
     self.verify_resources = utils.verify_resources(
         *self.resources.values())
示例#3
0
 def __init__(self, dist_config):
     self.dist_config = dist_config
     self.resources = {
         'kafka': 'kafka-%s' % host.cpu_arch(),
     }
     self.verify_resources = utils.verify_resources(
         *self.resources.values())
示例#4
0
 def __init__(self, dist_config):
     self.dist_config = utils.DistConfig(filename='dist.yaml',
                                         required_keys=['vendor', 'packages',
                                                        'dirs', 'ports'])
     self.resources = {
         'spark': 'spark-%s' % host.cpu_arch(),
     }
     self.verify_resources = utils.verify_resources(*self.resources.values())
示例#5
0
    def __init__(self, dist_config):
        self.dist_config = dist_config
        self.charm_config = hookenv.config()
        self.cpu_arch = host.cpu_arch()
        self.client_spec = {
            'hadoop': self.dist_config.hadoop_version,
        }

        # dist_config will have simple validation done on primary keys in the
        # dist.yaml, but we need to ensure deeper values are present.
        required_dirs = ['hadoop', 'hadoop_conf', 'hdfs_log_dir',
                         'yarn_log_dir']
        missing_dirs = set(required_dirs) - set(self.dist_config.dirs.keys())
        if missing_dirs:
            raise ValueError('dirs option in {} is missing required entr{}: {}'.format(
                self.dist_config.yaml_file,
                'ies' if len(missing_dirs) > 1 else 'y',
                ', '.join(missing_dirs)))

        # Build a list of hadoop resources needed from resources.yaml
        hadoop_resources = []
        hadoop_version = self.dist_config.hadoop_version
        try:
            jujuresources.resource_path('hadoop-%s-%s' % (hadoop_version, self.cpu_arch))
            hadoop_resources.append('hadoop-%s-%s' % (hadoop_version, self.cpu_arch))
        except KeyError:
            hadoop_resources.append('hadoop-%s' % (self.cpu_arch))

        # LZO compression for hadoop is distributed separately. Add it to the
        # list of reqs if defined in resources.yaml
        try:
            jujuresources.resource_path('hadoop-lzo-%s' % self.cpu_arch)
            hadoop_resources.append('hadoop-lzo-%s' % (self.cpu_arch))
        except KeyError:
            pass

        # Verify and fetch the required hadoop resources
        self.verify_conditional_resources = utils.verify_resources(*hadoop_resources)
示例#6
0
 def __init__(self, dist_config):
     self.dist_config = dist_config
     self.resources = {
         'kafka': 'kafka-%s' % host.cpu_arch(),
     }
     self.verify_resources = utils.verify_resources(*self.resources.values())