Beispiel #1
0
if utils.get_os_env('EXECUTOR_MEMORY') != "":
    executor_memory = utils.get_os_env('EXECUTOR_MEMORY')
    with open(spark_dir + '/conf/spark-defaults.conf', "r+") as f:
        lines = f.read().splitlines()
        f.seek(0)
        f.truncate()
        f.write('spark.executor.memory    ' + executor_memory + '\n')
        for line in lines:
            if not line.startswith("spark.executor.memory"):
                f.write(line + '\n')
        f.close()

log_watchers = {}

if utils.get_os_env('START_MASTER').lower() == 'true':
    os.environ['SPARK_MASTER_IP'] = utils.get_private_ip()
    if utils.get_os_env('DEFAULT_CORES') != "":
        try:
            default_cores = int(utils.get_os_env('DEFAULT_CORES'))
            os.environ['SPARK_MASTER_OPTS'] = "-Dspark.deploy.defaultCores=" + str(default_cores)
        except:
            logging.warning("Invalid format of DEFAULT_CORES env variable!")
    master_log = subprocess.check_output([spark_dir + "/sbin/start-master.sh"], universal_newlines=True)
    log_watchers['Master'] = subprocess.Popen(["tail", "-f", master_log.rsplit(None, 1)[-1]])

master_stack_name = utils.get_os_env('MASTER_STACK_NAME')
master_uri = ""
master_ip = ""

if zk_conn_str != "":
    master_uri = utils.generate_master_uri()
Beispiel #2
0
if utils.get_os_env('EXECUTOR_MEMORY') != "":
    executor_memory = utils.get_os_env('EXECUTOR_MEMORY')
    with open(spark_dir + '/conf/spark-defaults.conf', "r+") as f:
        lines = f.read().splitlines()
        f.seek(0)
        f.truncate()
        f.write('spark.executor.memory    ' + executor_memory + '\n')
        for line in lines:
            if not line.startswith("spark.executor.memory"):
                f.write(line + '\n')
        f.close()

log_watchers = {}

if utils.get_os_env('START_MASTER').lower() == 'true':
    os.environ['SPARK_MASTER_IP'] = utils.get_private_ip()
    if utils.get_os_env('DEFAULT_CORES') != "":
        try:
            default_cores = int(utils.get_os_env('DEFAULT_CORES'))
            os.environ[
                'SPARK_MASTER_OPTS'] = "-Dspark.deploy.defaultCores=" + str(
                    default_cores)
        except:
            logging.warning("Invalid format of DEFAULT_CORES env variable!")
    master_log = subprocess.check_output([spark_dir + "/sbin/start-master.sh"],
                                         universal_newlines=True)
    log_watchers['Master'] = subprocess.Popen(
        ["tail", "-f", master_log.rsplit(None, 1)[-1]])

master_stack_name = utils.get_os_env('MASTER_STACK_NAME')
master_uri = ""
Beispiel #3
0
#!/usr/bin/env python3

import logging
import connexion
from flask import request, jsonify
import utils
import json

utils.set_ec2_identities()
private_ip = utils.get_private_ip()
job_watchers = {}


def get_dummy():
    return "It works."


def get_twintip():
    return "{}"


def get_master_uri():
    master_uri = utils.generate_master_uri()
    if master_uri != "":
        return master_uri
    else:
        return "spark://" + private_ip + ":7077"


def get_master_ip():
    master_ip = utils.get_alive_master_ip()
Beispiel #4
0
#!/usr/bin/env python3

import logging
import connexion
from flask import request
import utils

utils.set_ec2_identities()
private_ip = utils.get_private_ip()
job_watchers = {}


def get_dummy():
    return "It works."


def get_twintip():
    return "{}"


def get_master_uri():
    master_uri = utils.generate_master_uri()
    if master_uri != "":
        return master_uri
    else:
        return "spark://" + private_ip + ":7077"


def get_master_ip():
    master_ip = utils.get_alive_master_ip()
    if master_ip != "":