Beispiel #1
0
 def update_bashrc(self):
     """
     Add the following lines to the bottom of the ~/.bashrc file
     :return:
     """
     banner("Updating ~/.bashrc file")
     script = textwrap.dedent(self.script["update.bashrc"])
     Installer.add_script("~/.bashrc", script)
 def hadoop_env(self, filename="/opt/hadoop/etc/hadoop/hadoop-env.sh"):
     # set up hadoop env file
     name = "Hadoop"
     banner(name)
     script = textwrap.dedent("""
         export JAVA_HOME=$(dirname $(dirname $(readlink -f $(which javac))))
     """)
     Installer.add_script(filename, script)
 def update_slaves(self):
     """
     Add new worker name to bottom of slaves file on master
     :return:
     """
     banner("Updating $SPARK_HOME/conf/slaves file")
     script = textwrap.dedent("""
        {user}@{worker}
     """)
     Installer.add_script("$SPARK_HOME/conf/slaves", script)
 def update_bashrc(self):
     """
     Add the following lines to the bottom of the ~/.bashrc file
     :return:
     """
     banner("Updating ~/.bashrc file")
     script = textwrap.dedent("""
             export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-armhf
             export HADOOP_HOME=/opt/hadoop
             # export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
             export PATH=/home/pi/ENV3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/games:/usr/games:/opt/hadoop/bin:/opt/hadoop/sbin:
        """)
     Installer.add_script("~/.bashrc", script)
Beispiel #5
0
    def create_spark_bashrc_txt(self):
        """
        Test to add at bottome of ~/.bashrc.  File is created on master and copied to worker
        :return:
        """
        script = self.script["update.bashrc"]

        if self.dryrun:
            print (script)
        else:
            f = open("/home/pi/spark-bashrc.txt", "w+")
            #f.write("test")
            f.close()
            Installer.add_script("~/spark-bashrc.txt", script)
Beispiel #6
0
    def create_spark_setup_worker(self):
        """
        This file is created on master and copied to worker, then executed on worker from master
        :return:
        """
        banner("Creating the spark.setup.worker.sh file")
        script = self.script["spark.setup.worker.sh"]

        if self.dryrun:
            print (script)
        else:
            f = open("/home/pi/spark-setup-worker.sh", "w+")
            #f.write("test")
            f.close()
            Installer.add_script("~/spark-setup-worker.sh", script)
Beispiel #7
0
    def setup(self, master= None, workers=None):

        #
        # SETUP THE MASTER
        #
        banner(f"Setup Master: {master}")
        self.run_script(name="sparksetup", hosts=self.master)
        #os.system("sudo apt-get update")

        if "SPARK_HOME" not in os.environ:
            Console.error("$SPARK_HOME is not set")
            return ""

        spark_home = os.environ("SPARK_HOME")
        filename =  "{spark_home}/conf/slaves"
        banner(f"Updating file: {filename}")
        filename =
        if not self.dryrun:
            Installer.add_script(fileanme, "{user}@{worker}")

        banner(f"Setup bashrc: {master}")
        print(Spark.update_bashrc())

        #
        # SETUP THE WORKER. STEP 1: GET THE FILES FROM MASTER
        #
        banner(f"Get files from {master}")
        print(self.create_spark_setup_worker())
        self.run_script(name="copy.spark.to.worker", hosts=self.workers)

        #
        # SETUP THE WORKER. SETUP BASHRC ON WORKERS
        #

        print(self.create_spark_bashrc_txt())

        print(self.update_slaves())