Exemplo n.º 1
0
def main():
    global coordinator_db, log_directory

    parser = argparse.ArgumentParser(
        description="provides a GUI for information on job history")
    utils.add_redis_params(parser)
    parser.add_argument("--port",
                        "-p",
                        help="port on which the GUI accepts HTTP connections",
                        type=int,
                        default=4280)
    parser.add_argument("log_directory",
                        help="base log directory for the "
                        "coordinator")
    args = parser.parse_args()

    coordinator_db = redis_utils.CoordinatorDB(args.redis_host,
                                               args.redis_port, args.redis_db)

    log_directory = args.log_directory

    try:
        bottle.run(host='0.0.0.0', port=args.port)
    except socket.error, e:
        print e
        # Return error 42 to indicate that we can't bind, so that scripts
        # calling this one can handle that case specially
        return constants.CANNOT_BIND
Exemplo n.º 2
0
def main():
    global coordinator_db, username

    parser = argparse.ArgumentParser(
        description="Run a web-interface for monitoring the cluster.")
    utils.add_redis_params(parser)
    parser.add_argument("--port",
                        "-p",
                        help="port on which the GUI accepts HTTP connections",
                        type=int,
                        default=4280)
    parser.add_argument("--user",
                        help="the username to run under "
                        "(default: %(default)s)",
                        default=getpass.getuser())
    args = parser.parse_args()

    coordinator_db = redis_utils.CoordinatorDB(args.redis_host,
                                               args.redis_port, args.redis_db)
    username = args.user

    try:
        bottle.run(host='0.0.0.0', port=args.port)
    except socket.error, e:
        print e
        # Return error 42 to indicate that we can't bind, so that scripts
        # calling this one can handle that case specially
        return constants.CANNOT_BIND
    def __init__(self, redis_host, redis_port, redis_db, config, themis_binary,
                 log_directory, keepalive_refresh, keepalive_timeout, profiler,
                 profiler_options, ld_preload, interfaces):
        self.redis_host = redis_host
        self.redis_port = redis_port
        self.redis_db = redis_db
        self.config_file = config
        with open(config, 'r') as fp:
            self.config = yaml.load(fp)
        self.themis_binary = themis_binary
        self.log_directory = log_directory
        self.keepalive_refresh = keepalive_refresh
        self.keepalive_timeout = keepalive_timeout
        self.profiler = profiler
        self.profiler_options = profiler_options
        self.ld_preload = ld_preload
        self.batch_nonce = random.randint(0, 1000000000)
        self.batch_phase_info = {}
        self.interfaces = interfaces

        self.node_coordinator_log_dir = os.path.join(log_directory,
                                                     "node_coordinators")

        self.coordinator_db = redis_utils.CoordinatorDB(
            redis_host, redis_port, redis_db)

        self.ssh_command = utils.ssh_command()
Exemplo n.º 4
0
def reload_read_request_queues(job_description_file, job_ids, redis_host,
                               redis_port, redis_db, skip_phase_zero,
                               skip_phase_one, phase_zero_sample_size):

    with open(job_description_file, 'r') as fp:
        job_description = json.load(fp)

    coordinator_db = redis_utils.CoordinatorDB(redis_host, redis_port,
                                               redis_db)

    input_files = input_file_utils.gather_input_file_paths(
        coordinator_db, job_description["input_directory"])

    phases = []

    if not skip_phase_zero:
        phases.append(0)

    if not skip_phase_one:
        phases.append(1)

    read_requests = input_file_utils.generate_read_requests(
        input_files, phase_zero_sample_size, job_ids, phases)

    input_file_utils.load_read_requests(coordinator_db, read_requests)
Exemplo n.º 5
0
    def __init__(self, hostname, keepalive_refresh, keepalive_timeout,
                 redis_host, redis_port, redis_db, pid):

        super(KeepaliveThread, self).__init__(iter_sleep=keepalive_refresh)

        self.coordinator_db = redis_utils.CoordinatorDB(
            redis_host, redis_port, redis_db)

        self.timeout = keepalive_timeout
        self.hostname = hostname

        self.coordinator_db.update_pid(self.hostname, pid)
Exemplo n.º 6
0
def main():

    parser = argparse.ArgumentParser(
        description="Cluster utility program for getting IP addresses")
    utils.add_redis_params(parser)
    parser.add_argument("command",
                        help="Utility command. Valid commands: all, live")
    args = parser.parse_args()

    coordinator_db = redis_utils.CoordinatorDB(args.redis_host,
                                               args.redis_port, args.redis_db)

    assert args.command in ["all", "live"]

    return cluster_utils(args.command, coordinator_db)
Exemplo n.º 7
0
    def __init__(self, redis_host, redis_port, redis_db, config, themis_binary,
                 log_directory, batch_nonce, keepalive_refresh,
                 keepalive_timeout, profiler, profiler_options, ld_preload,
                 interfaces):

        self.redis_host = redis_host
        self.redis_port = redis_port
        self.redis_db = redis_db

        self.coordinator_db = redis_utils.CoordinatorDB(
            redis_host, redis_port, redis_db)

        self.config_file = config
        with open(config, 'r') as fp:
            self.config = yaml.load(fp)
        # Use a default config called defaults.yaml that is stored in the same
        # directory as the binary
        self.default_config = os.path.join(os.path.dirname(themis_binary),
                                           "defaults.yaml")
        self.themis_binary = themis_binary
        self.log_directory = log_directory
        self.batch_nonce = batch_nonce
        self.profiler = profiler
        self.profiler_options = profiler_options
        self.ld_preload = ld_preload

        self.current_batch = None
        self.ip_address = None
        self.num_interfaces = self.coordinator_db.num_interfaces
        self.interfaces = interfaces

        ip = os.popen('ip addr show %s' %
                      (interfaces, )).read().split("inet ")[1].split("/")[0]
        self.hostname = socket.getfqdn(ip)
        self.username = getpass.getuser()

        self.keepalive_process = KeepaliveThread(self.hostname,
                                                 keepalive_refresh,
                                                 keepalive_timeout, redis_host,
                                                 redis_port, redis_db,
                                                 os.getpid())

        self.keepalive_process.start()