Example #1
0
def from_description(descr):
    # nanny://[email protected]/prod_rem_cloud_tags_proxy
    if descr.startswith('nanny://'):
        m = re.match('^nanny://([a-zA-Z0-9-]+)@([^/]+)/([\w-]+)$', descr)
        if not m:
            raise ValueError("Malformed nanny description '%s'" % descr)
        token, host, service = m.groups()
        nanny = rem.nanny.Nanny(host, token)
        return nanny_service(nanny, service)

    # host0:port0,host1:port1
    else:
        addrs = [parse_network_address(addr) for addr in descr.split(',')]
        return fixed_plain_addr_list(addrs)
Example #2
0
            else:
                # FIXME Actually if isinstance(e, xmlrpclib.Fault) then not retriable
                #       but not fatal as WrongTaskIdError
                raise RemNotifier.RetriableError(str(e))

    rem_notifier = RemNotifier(send_update)

    #rem_notifier.send_update(pck.produce_rem_update_message()) # FIXME

# TODO _create_rpc_server may throw errno.EADDRINUSE
    rpc_server = _create_rpc_server(pck, opts)

    try_log_descriptors()
    logging.debug('rpc_server.server_address = %s' % (rpc_server.server_address,))

    my_host = try_guess_my_host(parse_network_address(opts.rem_server_addr), timeout=3.0)
    logging.debug('guessed host = %s' % my_host)

    rpc_server_addr = (
        my_host or os.uname()[1],
        rpc_server.server_address[1]
    )

    reset_tries = False

    if opts.resume_params:
        resume_params = json.loads(opts.resume_params)

        if resume_params.get('use_dummy_jobs', False):
            import rem.job
            rem.job.DUMMY_COMMAND_CREATOR = lambda job: ['true'] # TODO sleep
Example #3
0
def single_addr(addr):
    host, port = parse_network_address(addr)
    return single_host_port(host, port)
Example #4
0
    def _init_config_options(self, config):
        self.log_directory = self.prep_dir(config.get("log", "dir"))
        self.log_filename = config.get("log", "filename")
        self.log_backup_count = config.getint("log", "rollcount")
        self.log_warn_level = config.get("log", "warnlevel")
        self.log_to_stderr = False

        self.packets_directory = self.prep_dir(config.get("store", "pck_dir"))
        self.backup_directory = self.prep_dir(config.get("store", "backup_dir"))
        self.backup_period = config.getint("store", "backup_period")
        self.backup_count = config.getint("store", "backup_count")
        self.backup_in_child = config.safe_getboolean("store", "backup_in_child", False)
        self.backup_child_max_working_time = config.getint("store", "backup_child_max_working_time")
        self.backup_fork_lock_friendly_timeout = config.safe_getint("store", "backup_fork_lock_friendly_timeout", None)
        self.backups_enabled = config.safe_getboolean("store", "backups_enabled", True)
        self.journal_lifetime = config.getint("store", "journal_lifetime")
        self.binary_directory = self.prep_dir(config.get("store", "binary_dir"))
        self.binary_lifetime = config.getint("store", "binary_lifetime")
        self.errored_packet_lifetime = config.getint("store", "error_packet_lifetime")
        self.successfull_packet_lifetime = config.getint("store", "success_packet_lifetime")
        self.suspended_packet_lifetime = config.safe_getint("store", "suspended_packet_lifetime", None)
        self.tags_db_file = config.get("store", "tags_db_file")
        self.recent_tags_file = config.get("store", "recent_tags_file")
        self.fix_bin_links_at_startup = config.safe_getboolean("store", "fix_bin_links_at_startup", True)

        self.server_process_title = config.safe_get("run", "server_process_title", None)
        self.use_ekrokhalev_server_process_title = config.safe_getboolean("run", "use_ekrokhalev_server_process_title", True)
        self.working_job_max_count = config.getint("run", "poolsize")
        self.subprocsrv_runner_count = config.safe_getint("run", "subprocsrv_runner_count", 0)
        self.xmlrpc_pool_size = config.safe_getint("run", "xmlrpc_poolsize", 1)
        self.readonly_xmlrpc_pool_size = config.safe_getint("run", "readonly_xmlrpc_pool_size", 1)
        self.pgrpguard_binary = config.safe_get("run", "pgrpguard_binary", None)
        self.pgrpguard_binary = config.safe_get("run", "process_wrapper", self.pgrpguard_binary)
        self.sandbox_api_url = config.safe_get("run", "sandbox_api_url", None)

        self.sandbox_api_token = config.safe_get("run", "sandbox_api_token", None)
        sandbox_api_token_file = config.safe_get("run", "sandbox_api_token_file", None)
        if sandbox_api_token_file is not None:
            self.sandbox_api_token = open(sandbox_api_token_file).read().strip()
            if not self.sandbox_api_token:
                raise ValueError("Empty token in %s" % sandbox_api_token_file)

        self.sandbox_api_timeout = config.safe_getint("run", "sandbox_api_timeout", 15)
        self.sandbox_task_owner = config.safe_get("run", "sandbox_task_owner", None)
        self.sandbox_task_priority = config.safe_get("run", "sandbox_task_priority", None)
        self.sandbox_task_max_count = config.safe_getint("run", "sandbox_task_max_count", 50)
        self.sandbox_rpc_listen_addr = config.safe_get("run", "sandbox_rpc_listen_addr", None)
        self.sandbox_python_resource_id = config.safe_getint("run", "sandbox_python_resource_id", None)

        self.sandbox_task_kill_timeout = 14 * 86400
        self.sandbox_rpc_invoker_thread_pool_size = 10
        self.sandbox_invoker_thread_pool_size = 10
        self.sandbox_rpc_server_thread_pool_size = 10
        self.sandbox_executor_resource_ttl = 180 # days
        self.sandbox_executor_resource_id = config.safe_getint("run", "sandbox_executor_resource_id", None)

        if self.sandbox_rpc_listen_addr:
            self.sandbox_rpc_listen_addr = parse_network_address(self.sandbox_rpc_listen_addr)

        if self.sandbox_task_priority is not None:
            self.sandbox_task_priority = \
                rem.sandbox.TaskPriority.from_string(self.sandbox_task_priority)

        if self.sandbox_api_url and not( \
            self.sandbox_task_owner \
            and self.sandbox_task_priority \
            and self.sandbox_task_max_count \
            and self.sandbox_python_resource_id \
            and self.sandbox_rpc_listen_addr):
            raise ValueError("Sandbox setup is incomplete")

        self.allow_files_auto_sharing = config.safe_getboolean("run", "allow_files_auto_sharing", False)
        self.all_packets_in_sandbox = config.safe_getboolean("run", "all_packets_in_sandbox", False)
        self.random_packet_sandboxness = config.safe_getboolean("run", "random_packet_sandboxness", False)

        self.cloud_tags_server = config.safe_get("store", "cloud_tags_server", None)
        self.cloud_tags_masks = config.safe_get("store", "cloud_tags_masks", None)
        self.cloud_tags_masks_reload_interval = config.safe_getint("store", "cloud_tags_masks_reload_interval", 300)
        self.cloud_tags_release_delay = 7200

        cloud_tags_nanny_token_file = config.safe_get('store', 'cloud_tags_nanny_token_file', None)
        if cloud_tags_nanny_token_file:
            self.cloud_tags_nanny_token = open(cloud_tags_nanny_token_file).read().strip()
            if not self.cloud_tags_nanny_token:
                raise ValueError("Empty token in %s" % cloud_tags_nanny_token_file)
            self.cloud_tags_server = self.cloud_tags_server.format(token=self.cloud_tags_nanny_token) # hack?
        else:
            if self.cloud_tags_server and '{token}' in self.cloud_tags_server:
                raise RuntimeError("No store.cloud_tags_nanny_token_file for cloud_tags_server")
            self.cloud_tags_nanny_token = None

        self.tags_random_cloudiness = config.safe_getboolean("store", "tags_random_cloudiness", False)
        self.all_tags_in_cloud = config.safe_getboolean("store", "all_tags_in_cloud", False)
        self.allow_startup_tags_conversion = config.safe_getboolean("store", "allow_startup_tags_conversion", True)

        self.manager_port = config.getint("server", "port")
        self.manager_readonly_port = config.safe_getint("server", "readonly_port")

        self.disable_remote_tags = config.safe_getboolean("server", "disable_remote_tags", False)

        self.system_port = None if self.disable_remote_tags \
            else config.safe_getint("server", "system_port")

        self.remote_tags_db_file = None if self.disable_remote_tags \
            else config.safe_get("store", "remote_tags_db_file")

        self.network_topology = None if self.disable_remote_tags \
            else config.safe_get("server", "network_topology")

        # 1. ConnectionManager, 2. email subjects
        self.network_name = config.safe_get("server", "network_hostname", None)

        if not self.disable_remote_tags and not (
            self.system_port
            and self.remote_tags_db_file
            and self.network_topology
            and self.network_name):
            raise ValueError("Incomplete setup for remote tags")

        self.send_emails = config.getboolean("server", "send_emails")
        self.send_emergency_emails = config.safe_getboolean("server", "send_emergency_emails")
        self.mailer_thread_count = config.safe_getint("server", "mailer_thread_count", 1)
        self.use_memory_profiler = config.getboolean("server", "use_memory_profiler")
        self.max_remotetags_resend_delay = config.safe_getint("server", "max_remotetags_resend_delay", 300)
        self.allow_debug_rpc_methods = config.safe_getboolean("server", "allow_debug_rpc_methods", False)
        self.allow_python_resource_id_update = config.safe_getboolean("server", "allow_python_resource_id_update", False)
        self.server_oauth_application_id = config.safe_get('server', 'oauth_application_id', None)
        self.register_objects_creation = False
        self.child_processes_oom_adj = None