def setHdfsFilev2(interval):
    #file = xmlparsing('/H2/etc/hadoop/hdfs-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME') + '/etc/hadoop/hdfs-site.xml')
    # setting datanode directory
    file.appendProperty('dfs.data.dir', 'file:/data/datanodeData', 'Directory for saving the datanode data')
    file.appendProperty('dfs.heartbeat.interval',interval,'heartbeat time interval')
    file.saveFile()
Beispiel #2
0
def setHdfsFilev1(nnip,nnwebport):
    file = xmlparsing('/etc/hadoop/hdfs-site.xml')
    # setting namenode directory
    file.appendProperty('dfs.name.dir','/data/namenodeData,/data/backup')
    # setting web(http) management portal
    file.appendProperty('dfs.http.address',nnip+":"+nnwebport,'web management port.')
    file.saveFile()
Beispiel #3
0
def setCoreFilev2(nnip, nnport):
    file = xmlparsing('/H2/etc/hadoop/core-site.xml')
    #file = xmlparsing(os.getenv('HADOOP_HOME')+'/etc/hadoop/core-site.xml')
    file.appendProperty('fs.default.name', 'hdfs://' + nnip + ':' + nnport,
                        'namenode address')
    # file.appendProperty('hadoop.tmp.dir','/tmp/hadoop',' temporary data')
    file.saveFile()
Beispiel #4
0
def setHdfsFilev2(nnip,nnwebport):
    #file = xmlparsing('/H2/etc/hadoop/hdfs-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME')+'/etc/hadoop/hdfs-site.xml')
    # setting namenode directory
    file.appendProperty('dfs.name.dir','file:/data/namenodeData,file:/data/backup')
    # setting web(http) management portal
    file.appendProperty('dfs.http.address',nnip+":"+nnwebport,'web management port.')
    file.saveFile()
def setYarnFile(rmip):
    #file = xmlparsing('/H2/etc/hadoop/yarn-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME') + '/etc/hadoop/yarn-site.xml')
    file.appendProperty('yarn.resourcemanager.resource-tracker.address',
                        rmip + ':8025')
    file.appendProperty('yarn.resourcemanager.scheduler.address',
                        rmip + ':8030')
    file.saveFile()
Beispiel #6
0
def setHdfsFilev1(nnip, nnwebport):
    file = xmlparsing('/etc/hadoop/hdfs-site.xml')
    # setting namenode directory
    file.appendProperty('dfs.name.dir', '/data/namenodeData,/data/backup')
    # setting web(http) management portal
    file.appendProperty('dfs.http.address', nnip + ":" + nnwebport,
                        'web management port.')
    file.saveFile()
Beispiel #7
0
def setHdfsFilev1(checkperiod):
    file = xmlparsing('/etc/hadoop/hdfs-site.xml')
    file.appendProperty('dfs.name.dir','/data/namenodeData')
    # file.appendProperty('dfs.http.address',nnip+':'+nnwebport)
    # file.appendProperty('dfs.secondary.http.address',snnip+':'+snnport)
    file.appendProperty('fs.checkpoint.dir','/data/check')
    file.appendProperty('fs.checkpoint.edits.dir','/data/edits')
    file.appendProperty('fs.checkpoint.period',checkperiod)
    file.saveFile()
def setHdfsFilev1(checkperiod):
    file = xmlparsing('/etc/hadoop/hdfs-site.xml')
    file.appendProperty('dfs.name.dir', '/data/namenodeData')
    # file.appendProperty('dfs.http.address',nnip+':'+nnwebport)
    # file.appendProperty('dfs.secondary.http.address',snnip+':'+snnport)
    file.appendProperty('fs.checkpoint.dir', '/data/check')
    file.appendProperty('fs.checkpoint.edits.dir', '/data/edits')
    file.appendProperty('fs.checkpoint.period', checkperiod)
    file.saveFile()
Beispiel #9
0
def setHdfsFilev2(nnip, nnwebport):
    #file = xmlparsing('/H2/etc/hadoop/hdfs-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME') + '/etc/hadoop/hdfs-site.xml')
    # setting namenode directory
    file.appendProperty('dfs.name.dir',
                        'file:/data/namenodeData,file:/data/backup')
    # setting web(http) management portal
    file.appendProperty('dfs.http.address', nnip + ":" + nnwebport,
                        'web management port.')
    file.saveFile()
def extrainstall(rmip):
    os.chdir('/root/HadoopInstall/')
    print commands.getoutput('cp  -f  '+os.getenv('HADOOP_HOME')+'/etc/hadoop/mapred-site.xml.template    '+os.getenv('HADOOP_HOME')+'/etc/hadoop/mapred-site.xml')
    # Installing splunk
    commands.getoutput('rpm -i '+packagename('splunk'))
    status,output = commands.getstatusoutput('/opt/splunk/bin/splunk | grep "Splunk web interface" ')
    splunkaddress = None
    if status == 0:
        splunkaddress=output[ output.rfind(' ') :]

    # Installing pig framework
    commands.getoutput('tar -xzf '+packagename('pig'))
    print commands.getoutput('mv  -f  pig-0.12.1    /pig')
    if commands.getoutput('cat /root/.bashrc  | grep  PIG_HOME   ') == '':
        file = open('/root/.bashrc','a')
        file.write('export PIG_HOME=/pig \n')
        file.write('export PATH=$PIG_HOME/bin:$PATH\n')
        file.close()
        commands.getoutput('chcon --reference /root/.bash_profile    /root/.bashrc')
    file = None
    if hadoopversion == '1':
        file = xmlparsing('/etc/hadoop/mapred-site.xml')
    elif hadoopversion == '2':
        file = xmlparsing(os.getenv('HADOOP_HOME') + '/etc/hadoop/mapred-site.xml')
    file.appendProperty('mapreduce.jobhistory.address', rmip + ':10020')
    file.saveFile()



    # Installing Hive framework
    commands.getoutput('tar -xzf ' + packagename('hive'))
    print commands.getoutput('mv   -f   apache-hive-1.2.1-bin     /hive')
    if commands.getoutput('cat /root/.bashrc  | grep  HIVE_HOME   ') == '':
        file = open('/root/.bashrc', 'a')
        file.write('export HIVE_HOME=/hive \n')
        file.write('export PATH=$HIVE_HOME/bin:$PATH \n')
        if hadoopversion=='2':
            file.write('export  HADOOP_USER_CLASSPATH_FIRST=true')
        file.close()
        commands.getoutput(' chmod +x  /root/.bashrc ;  source  /root/.bashrc')
        commands.getoutput('chcon --reference /root/.bash_profile    /root/.bashrc')
Beispiel #11
0
def setCoreFilev1(nnip,nnport):
    file = xmlparsing('/etc/hadoop/core-site.xml')
    file.appendProperty('fs.default.name','hdfs://'+nnip+':'+nnport,' namenode address')
    file.saveFile()
def setCoreFilev1(nnip,nnport):
    file = xmlparsing('/etc/hadoop/core-site.xml')
    file.appendProperty('fs.default.name','hdfs://'+nnip+':'+nnport,' namenode address')
    #file.appendProperty('hadoop.tmp.dir','/usr/local/hadoop/tmp',' temporary data')
    file.saveFile()
def setMapredFilev2(framework):
    #commands.getoutput('mv  '+os.getenv('HADOOP_HOME')+'/etc/hadoop/mapred-site.xml.template  '  \
    #                   + os.getenv('HADOOP_HOME')+'/etc/hadoop/mapred-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME') +'/etc/hadoop/mapred-site.xml')
    file.appendProperty('mapreduce.framework.name',framework)
    file.saveFile()
Beispiel #14
0
def setYarnFile(rmip):
    #file = xmlparsing('/H2/etc/hadoop/yarn-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME')+'/etc/hadoop/yarn-site.xml')
    file.appendProperty('yarn.resourcemanager.resource-tracker.address',rmip+':8025')
    file.appendProperty('yarn.resourcemanager.scheduler.address', rmip + ':8030')
    file.saveFile()
Beispiel #15
0
def setMapredFilev1(jbip,jbport):
    file = xmlparsing('/etc/hadoop/mapred-site.xml')
    # setting job tracker ip
    file.appendProperty('mapred.job.tracker',jbip+':'+jbport)
    file.saveFile()
def setHdfsFilev1(interval):
    file = xmlparsing('/etc/hadoop/hdfs-site.xml')
    # setting datanode directory
    file.appendProperty('dfs.data.dir', '/data/datanodeData', 'Directory for saving the datanode data')
    file.appendProperty('dfs.heartbeat.interval',interval,'heartbeat time interval')
    file.saveFile()
Beispiel #17
0
def setYarnFile(rmip):
    #file = xmlparsing('/H2/etc/hadoop/yarn-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME')+'/etc/hadoop/yarn-site.xml')
    file.appendProperty('yarn.resourcemanager.resource-tracker.address',rmip+':8025')
    file.appendProperty('yarn.nodemanager.aux-services','mapreduce_shuffle')
    file.saveFile()
Beispiel #18
0
def setCoreFilev2(nnip,nnport):
    #file = xmlparsing('/H2/etc/hadoop/core-site.xml')
    file = xmlparsing(os.getenv('HADOOP_HOME') + '/etc/hadoop/core-site.xml')
    file.appendProperty('fs.default.name','hdfs://'+nnip+':'+nnport,' namenode address')
    file.saveFile()
def setHdfsFilev1(replica,size):
    file = xmlparsing('/etc/hadoop/hdfs-site.xml')
    # setting datanode directory
    file.appendProperty('dfs.replication',replica,'Number of replications')
    file.appendProperty('dfs.block.size', size , 'Number of replications')
    file.saveFile()
Beispiel #20
0
def setCoreFilev1(nnip,nnport):
    file = xmlparsing(filename='/etc/hadoop/core-site.xml')
    file.appendProperty('fs.default.name','hdfs://'+nnip+':'+nnport,' namenode address')
    #file.appendProperty('hadoop.tmp.dir','/usr/local/hadoop/tmp',' temporary data')
    file.saveFile()
def setMapredFilev1(jbip,jbport):
    file = xmlparsing('/etc/hadoop/mapred-site.xml')
    # setting job tracker ip
    file.appendProperty('mapred.job.tracker',jbip+':'+jbport)
    file.saveFile()
def setCoreFilev1(nnip, nnport):
    file = xmlparsing('/etc/hadoop/core-site.xml')
    file.appendProperty('fs.default.name', 'hdfs://' + nnip + ':' + nnport,
                        ' namenode address')
    file.saveFile()
def setHdfsFilev2(replica,size):
    file = xmlparsing(os.getenv('HADOOP_HOME') + '/etc/hadoop/hdfs-site.xml')
    file.appendProperty('dfs.replication', replica, 'Number of replications')
    file.appendProperty('dfs.block.size', size, 'Number of replications')
    file.saveFile()