예제 #1
0
 def configure_hdfs_base(self, clustername, namenodes, port, webhdfs_port):
     dc = self.hadoop_base.dist_config
     core_site = dc.path('hadoop_conf') / 'core-site.xml'
     with utils.xmlpropmap_edit_in_place(core_site) as props:
         props['hadoop.proxyuser.hue.hosts'] = "*"
         props['hadoop.proxyuser.hue.groups'] = "*"
         props['hadoop.proxyuser.oozie.groups'] = '*'
         props['hadoop.proxyuser.oozie.hosts'] = '*'
         if 'lzo' in self.hadoop_base.resources:
             props['io.compression.codecs'] = (
                 'org.apache.hadoop.io.compress.GzipCodec, '
                 'org.apache.hadoop.io.compress.DefaultCodec, '
                 'org.apache.hadoop.io.compress.BZip2Codec, '
                 'org.apache.hadoop.io.compress.SnappyCodec, '
                 'com.hadoop.compression.lzo.LzoCodec, '
                 'com.hadoop.compression.lzo.LzopCodec')
             props[
                 'io.compression.codec.lzo.class'] = 'com.hadoop.compression.lzo.LzoCodec'
         else:
             props['io.compression.codecs'] = (
                 'org.apache.hadoop.io.compress.GzipCodec, '
                 'org.apache.hadoop.io.compress.DefaultCodec, '
                 'org.apache.hadoop.io.compress.BZip2Codec, '
                 'org.apache.hadoop.io.compress.SnappyCodec')
         props['fs.defaultFS'] = "hdfs://{clustername}".format(
             clustername=clustername, port=port)
     hdfs_site = dc.path('hadoop_conf') / 'hdfs-site.xml'
     with utils.xmlpropmap_edit_in_place(hdfs_site) as props:
         props['dfs.webhdfs.enabled'] = "true"
         props['dfs.namenode.name.dir'] = dc.path(
             'hdfs_dir_base') / 'cache/hadoop/dfs/name'
         props['dfs.datanode.data.dir'] = dc.path(
             'hdfs_dir_base') / 'cache/hadoop/dfs/name'
         props[
             'dfs.permissions'] = 'false'  # TODO - secure this hadoop installation!
         props['dfs.nameservices'] = clustername
         props['dfs.client.failover.proxy.provider.%s' % clustername] = \
             'org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider'
         props['dfs.ha.fencing.methods'] = 'sshfence\nshell(/bin/true)'
         props['dfs.ha.fencing.ssh.private-key-files'] = utils.ssh_priv_key(
             'hdfs')
         props['dfs.ha.namenodes.%s' % clustername] = ','.join(namenodes)
         for node in namenodes:
             props['dfs.namenode.rpc-address.%s.%s' %
                   (clustername, node)] = '%s:%s' % (node, port)
             props['dfs.namenode.http-address.%s.%s' %
                   (clustername, node)] = '%s:%s' % (node, webhdfs_port)
예제 #2
0
 def configure_hdfs_base(self, clustername, namenodes, port, webhdfs_port):
     dc = self.hadoop_base.dist_config
     core_site = dc.path('hadoop_conf') / 'core-site.xml'
     with utils.xmlpropmap_edit_in_place(core_site) as props:
         props['hadoop.proxyuser.hue.hosts'] = "*"
         props['hadoop.proxyuser.hue.groups'] = "*"
         props['hadoop.proxyuser.oozie.groups'] = '*'
         props['hadoop.proxyuser.oozie.hosts'] = '*'
         if 'lzo' in self.hadoop_base.resources:
             props['io.compression.codecs'] = ('org.apache.hadoop.io.compress.GzipCodec, '
                                               'org.apache.hadoop.io.compress.DefaultCodec, '
                                               'org.apache.hadoop.io.compress.BZip2Codec, '
                                               'org.apache.hadoop.io.compress.SnappyCodec, '
                                               'com.hadoop.compression.lzo.LzoCodec, '
                                               'com.hadoop.compression.lzo.LzopCodec')
             props['io.compression.codec.lzo.class'] = 'com.hadoop.compression.lzo.LzoCodec'
         else:
             props['io.compression.codecs'] = ('org.apache.hadoop.io.compress.GzipCodec, '
                                               'org.apache.hadoop.io.compress.DefaultCodec, '
                                               'org.apache.hadoop.io.compress.BZip2Codec, '
                                               'org.apache.hadoop.io.compress.SnappyCodec')
         props['fs.defaultFS'] = "hdfs://{clustername}".format(clustername=clustername, port=port)
     hdfs_site = dc.path('hadoop_conf') / 'hdfs-site.xml'
     with utils.xmlpropmap_edit_in_place(hdfs_site) as props:
         props['dfs.webhdfs.enabled'] = "true"
         props['dfs.namenode.name.dir'] = dc.path('hdfs_dir_base') / 'cache/hadoop/dfs/name'
         props['dfs.datanode.data.dir'] = dc.path('hdfs_dir_base') / 'cache/hadoop/dfs/name'
         props['dfs.permissions'] = 'false'  # TODO - secure this hadoop installation!
         props['dfs.nameservices'] = clustername
         props['dfs.client.failover.proxy.provider.%s' % clustername] = \
             'org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider'
         props['dfs.ha.fencing.methods'] = 'sshfence\nshell(/bin/true)'
         props['dfs.ha.fencing.ssh.private-key-files'] = utils.ssh_priv_key('hdfs')
         props['dfs.ha.namenodes.%s' % clustername] = ','.join(namenodes)
         for node in namenodes:
             props['dfs.namenode.rpc-address.%s.%s' % (clustername, node)] = '%s:%s' % (node, port)
             props['dfs.namenode.http-address.%s.%s' % (clustername, node)] = '%s:%s' % (node, webhdfs_port)
def generate_ssh_key():
    utils.generate_ssh_key('hdfs')
    leadership.leader_set({
        'ssh-key-priv': utils.ssh_priv_key('hdfs').text(),
        'ssh-key-pub': utils.ssh_pub_key('hdfs').text(),
    })