Exemple #1
0
        def vnc_start_fun():
            # make sure not to interfere with db (db should actually be already closed at this point but be really sure)
            db_tools.close_connection()

            # turn process into daemon
            with daemon.DaemonContext(detach_process=True,
                                      stdout=sys.stdout,
                                      stderr=sys.stderr):
                # execute vnc start script in daemon (writes pid file automatically)
                subprocess.Popen(cmd_line.strip().split(),
                                 env=vnc_env,
                                 preexec_fn=preexec,
                                 stdout=proc_stdout,
                                 stderr=proc_stderr)

                # run websockify
                if websockify_cmd_line:
                    # websockify needs to write somewhere
                    sub = subprocess.Popen(websockify_cmd_line.strip().split(),
                                           env=vnc_env,
                                           preexec_fn=preexec,
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE)
                    # write pid file manually
                    with open(self.websockify_pid_file, "w") as f:
                        f.write(str(sub.pid))
                    sub.wait()  # terminate when child does
Exemple #2
0
 def __init__(self):
     threading_tools.process_pool.__init__(self, "main", zmq=True)
     self.CC.init(icswServiceEnum.grapher_server, global_config)
     self.CC.check_config()
     # close connection (daemonizing)
     db_tools.close_connection()
     self.CC.read_config_from_db([
         ("GRAPH_ROOT_DEBUG",
          configfile.str_c_var(os.path.abspath(
              os.path.join(settings.STATIC_ROOT_DEBUG, "graphs")),
                               database=True)),
         ("GRAPH_ROOT",
          configfile.str_c_var(os.path.abspath(
              os.path.join(
                  settings.STATIC_ROOT_DEBUG if global_config["DEBUG"] else
                  settings.STATIC_ROOT, "graphs")),
                               database=True)),
     ])
     if global_config["RRD_CACHED_SOCKET"] == "/var/run/rrdcached.sock":
         global_config["RRD_CACHED_SOCKET"] = os.path.join(
             global_config["RRD_CACHED_DIR"], "rrdcached.sock")
     self.CC.log_config()
     # re-insert config
     self.CC.re_insert_config()
     self.register_exception("int_error", self._int_error)
     self.register_exception("term_error", self._int_error)
     self.register_exception("hup_error", self._hup_error)
     self.add_process(GraphProcess("graph"), start=True)
     self.add_process(GraphStaleProcess("stale"), start=True)
     db_tools.close_connection()
     self._init_network_sockets()
     DataStore.setup(self)
Exemple #3
0
 def process_init(self):
     # hm ...
     self.global_config = global_config
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context)
     db_tools.close_connection()
     self.register_func("fetch_partition_info", self._fetch_partition_info)
     self.register_func("scan_network_info", self._scan_network_info)
     self.register_func("scan_system_info", self._scan_system_info)
     self.register_func("snmp_basic_scan", self._snmp_basic_scan)
     self.register_func("snmp_result", self._snmp_result)
     self.register_func("base_scan", self._base_scan)
     self.register_func("wmi_scan", self._wmi_scan)
     self.register_func("ext_con_result", self._ext_con_result)
     self.register_func("host_monitor_result", self._host_monitor_result)
     self.EC.init(global_config)
     self._server = device.objects.get(Q(pk=global_config["SERVER_IDX"]))
     self._config = config.objects.get(Q(pk=global_config["CONFIG_IDX"]))
     self.__run_idx = 0
     # global job list
     self.__job_list = []
     self.__pending_commands = {}
     self._init_subsys()
Exemple #4
0
 def process_init(self):
     global_config.close()
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         zmq=True,
         context=self.zmq_context,
         init_logger=True)
     db_tools.close_connection()
     self.rrd_cache_socket = global_config["RRD_CACHED_SOCKET"]
     self.rrd_root = global_config["RRD_DIR"]
     cov_keys = [
         _key for _key in global_config.keys()
         if _key.startswith("RRD_COVERAGE")
     ]
     self.rrd_coverage = [global_config[_key] for _key in cov_keys]
     self.log("RRD coverage: {}".format(", ".join(self.rrd_coverage)))
     self.register_timer(self.check_size, 6 * 3600, first_timeout=1)
     self.__verbose = global_config["VERBOSE"]
     self._setup_rsync()
     if self.do_rsync:
         self.log(
             "enabling periodic RAM-to-disk sync from {} to {} every {}".
             format(
                 global_config["RRD_DIR"],
                 global_config["RRD_DISK_CACHE"],
                 logging_tools.get_diff_time_str(
                     global_config["RRD_DISK_CACHE_SYNC"]),
             ))
         self.register_timer(self.sync_from_ram_to_disk,
                             global_config["RRD_DISK_CACHE_SYNC"])
Exemple #5
0
 def process_init(self):
     global_config.close()
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         zmq=True,
         context=self.zmq_context,
         init_logger=True
     )
     db_tools.close_connection()
     self.__debug = global_config["DEBUG"]
     # cache address
     self.__memcache_address = [
         "{}:{:d}".format(
             global_config["MEMCACHE_ADDRESS"].split(":")[0],
             global_config["MEMCACHE_PORT"],
         )
     ]
     # last update of aggregation structure
     self.__struct_update = None
     # cache for filtered values
     self.__vector_filter_cache = {}
     self.init_sockets()
     self.init_ag_xml()
     self.register_timer(self.aggregate, 30, instant=False, first_timeout=1)
Exemple #6
0
 def bg_check_notify(self):
     self.srv_routing.update()
     # step 1: delete pending jobs which are too old
     _timeout = background_job.objects.filter(
         Q(initiator=self.srv_routing.local_device.pk)
         & Q(state__in=["pre-init", "pending"])
         & Q(valid_until__lte=cluster_timezone.localize(
             datetime.datetime.now())))
     if _timeout.count():
         self.log(
             "{} timeout".format(
                 logging_tools.get_plural("background job",
                                          _timeout.count())),
             logging_tools.LOG_LEVEL_WARN)
         for _to in _timeout:
             _to.set_state("timeout")
     # print background_job.objects.filter(Q(initiator=self.srv_routing.local_device.pk) & Q(state="pre-init") & Q(valid_until_lt=datetime.datetime.now()))
     try:
         _pending = background_job.objects.filter(
             Q(initiator=self.srv_routing.local_device.pk)
             & Q(state="pre-init")).order_by("pk")
         # force evaluation
         _pc = _pending.count()
     except:
         self.log(
             "error accessing DB: {}".format(
                 process_tools.get_except_info()),
             logging_tools.LOG_LEVEL_CRITICAL)
         # close connection
         db_tools.close_connection()
     else:
         if _pc:
             self.log("pending background jobs: {:d}".format(_pc))
             for _cur_bg in _pending:
                 self._handle_bgj(_cur_bg)
Exemple #7
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     db_tools.close_connection()
     self._init_cache()
     self.__node_options = sge_tools.get_empty_node_options()
     self.__run_options = sge_tools.get_empty_job_options(
         suppress_times=True,
         suppress_nodelist=True,
         show_stdoutstderr=False,
     )
     self._init_network()
     self._init_sge_info()
     # job content dict
     self.__job_content_dict = {}
     # pinning dict
     self.__job_pinning_dict = {}
     self.register_func("get_config", self._get_config)
     self.register_func("job_control", self._job_control)
     self.register_func("queue_control", self._queue_control)
     self.register_func("file_watch_content", self._file_watch_content)
     self.register_func("affinity_info", self._affinity_info)
     self.register_func("job_ended", self._job_ended)
     self.register_func("full_reload", self._full_reload)
     # job stop/start info
     self.register_timer(self._update_nodes, 30, first_timeout=5)
     if global_config["TRACE_FAIRSHARE"]:
         self.log("register fairshare tracer")
         self.register_timer(self._update_fairshare, 60, instant=True)
Exemple #8
0
 def __init__(self):
     threading_tools.icswProcessPool.__init__(
         self,
         "main",
     )
     self.CC.init(icswServiceEnum.package_server, global_config)
     self.CC.check_config()
     self.__pc_port = InstanceXML(quiet=True).get_port_dict(
         icswServiceEnum.package_client,
         command=True
     )
     self.register_exception("int_error", self._int_error)
     self.register_exception("term_error", self._int_error)
     self.register_exception("hup_error", self._hup_error)
     db_tools.close_connection()
     self.CC.log_config()
     self.CC.re_insert_config()
     self.EC.init(global_config)
     self._init_clients()
     self._init_network_sockets()
     self.add_process(RepoProcess("repo"), start=True)
     # close DB connection
     db_tools.close_connection()
     # not needed, 0MQ is smart enough to keep the connections alive
     # self.reconnect_to_clients()
     self.send_to_process("repo", "rescan_repos")
Exemple #9
0
 def process_init(self):
     global_config.close()
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         zmq=True,
         context=self.zmq_context,
         init_logger=True
     )
     # close database connection
     db_tools.close_connection()
     self.register_func("rescan_repos", self._rescan_repos)
     self.register_func("reload_searches", self._reload_searches)
     self.register_func("clear_cache", self._clear_cache)
     self.register_func("search", self._search)
     self._correct_search_states()
     self.__background_commands = []
     self.register_timer(self._check_delayed, 1)
     # set repository type
     if os.path.isfile("/etc/centos-release") or os.path.isfile("/etc/redhat-release"):
         self.repo_type = RepoTypeRpmYum(self)
     elif os.path.isfile("/etc/debian_version"):
         self.repo_type = RepoTypeDebDebian(self)
     else:
         self.repo_type = RepoTypeRpmZypper(self)
Exemple #10
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context)
     db_tools.close_connection()
     self.register_func("start_backup", self._start_backup)
Exemple #11
0
 def process_init(self):
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context)
     self.register_func("set_option_dict", self._set_option_dict)
     self.register_func("set_srv_com", self._set_srv_com)
     self.register_func("start_command", self._start_command)
     db_tools.close_connection()
Exemple #12
0
 def process_init(self):
     self.__verbose = global_config["VERBOSE"]
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     db_tools.close_connection()
     self.register_func("compress", self._compress)
Exemple #13
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     db_tools.close_connection()
     self.register_timer(self._clear_old_graphs, 60, instant=True)
     self.register_timer(self._check_for_stale_rrds, 3600, instant=True)
Exemple #14
0
 def _op_error(self, info):
     from initat.cluster.backbone import db_tools
     try:
         from django.db import connection
     except:
         self.log("cannot import connection from django.db", logging_tools.LOG_LEVEL_ERROR)
     else:
         self.log("operational error, closing db connection", logging_tools.LOG_LEVEL_ERROR)
         try:
             db_tools.close_connection()
         except:
             pass
Exemple #15
0
 def __init__(self):
     threading_tools.process_pool.__init__(self, "main", zmq=True)
     self.register_exception("int_error", self._int_error)
     self.register_exception("term_error", self._int_error)
     self.CC.init(icswServiceEnum.discovery_server, global_config)
     self.CC.check_config()
     # close connection (daemonize)
     db_tools.close_connection()
     self.CC.read_config_from_db([
         ("SNMP_PROCESSES",
          configfile.int_c_var(
              4,
              help_string="number of SNMP processes [%(default)d]",
              short_options="n")),
         ("MAX_CALLS",
          configfile.int_c_var(
              100,
              help_string="number of calls per helper process [%(default)d]"
          )),
     ])
     self.CC.re_insert_config()
     self.CC.log_config()
     self.add_process(DiscoveryProcess("discovery"), start=True)
     self.add_process(EventLogPollerProcess(
         EventLogPollerProcess.PROCESS_NAME),
                      start=True)
     self.add_process(GenerateAssetsProcess("generate_assets"), start=True)
     self._init_network_sockets()
     self.register_func("snmp_run", self._snmp_run)
     self.register_func("generate_assets", self._generate_assets)
     self.register_func(
         "process_assets_finished",
         self._process_assets_finished,
     )
     self.register_func(
         "process_batch_assets_finished",
         self._process_batch_assets_finished,
     )
     self.register_func("send_msg", self.send_msg)
     db_tools.close_connection()
     self.__max_calls = global_config[
         "MAX_CALLS"] if not global_config["DEBUG"] else 5
     self.__snmp_running = True
     self._init_processes()
     # not really necessary
     self.install_remote_call_handlers()
     # clear pending scans
     self.clear_pending_scans()
     self.__run_idx = 0
     self.__pending_commands = {}
     if process_tools.get_machine_name(
     ) == "eddiex" and global_config["DEBUG"]:
         self._test()
Exemple #16
0
 def send_to_remote_server(self, srv_type_enum, send_obj):
     from initat.cluster.backbone import routing
     if self.__target_dict is None:
         from initat.cluster.backbone import db_tools
         db_tools.close_connection()
         self.__target_dict = {}
         self.__strs_router = routing.SrvTypeRouting(log_com=self.log)
     if srv_type_enum not in self.__target_dict:
         self.__target_dict[srv_type_enum] = RemoteServerAddress(self, srv_type_enum)
     _rsa = self.__target_dict[srv_type_enum]
     _rsa.check_for_address(self.__strs_router)
     return self.send_to_remote_server_int(_rsa, send_obj)
Exemple #17
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     # close database connection
     db_tools.close_connection()
     self.register_func("rescan_kernels", self._rescan_kernels)
     self.kernel_dev = config_tools.icswServerCheck(
         service_type_enum=icswServiceEnum.kernel_server).get_result()
 def process_init(self):
     global_config.close()
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         zmq=True,
         context=self.zmq_context,
     )
     # self.add_process(BuildProcess("build"), start=True)
     db_tools.close_connection()
     self.register_func("process_assets", self._process_assets)
     self.register_func("process_batch_assets", self._process_batch_assets)
Exemple #19
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     db_tools.close_connection()
     self.register_func("monitoring_info", self._monitoring_info)
     self.register_func("passive_check_result", self._pcr)
     self.register_func("passive_check_results_as_chunk",
                        self._pcrs_as_chunk)
Exemple #20
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context
     )
     db_tools.close_connection()
     self.register_func("generate", self._generate)
     self.__run_idx = 0
     # global job list
     self.__job_list = []
     self.__pending_commands = {}
Exemple #21
0
 def _update(self):
     cur_dt = datetime.datetime.now().replace(microsecond=0)
     if not global_config["DEBUG"]:
         cur_dt = cur_dt.replace(minute=0, second=0)
     if cur_dt == self.__next_backup_dt or self.__next_backup_dt is None:
         self._set_next_backup_time()
         self.log("start DB-backup")
         self.add_process(backup_process("backup_process"), start=True)
         self.send_to_process(
             "backup_process",
             "start_backup",
         )
         db_tools.close_connection()
Exemple #22
0
    def process_init(self):
        global_config.close()
        self.__log_template = logging_tools.get_logger(
            global_config["LOG_NAME"],
            global_config["LOG_DESTINATION"],
            zmq=True,
            context=self.zmq_context,
            init_logger=True)
        db_tools.close_connection()

        self.register_timer(self.update,
                            30 if global_config["DEBUG"] else 300,
                            instant=False)
Exemple #23
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     db_tools.close_connection()
     self.__debug = global_config["DEBUG"]
     self.register_func("mvector", self._mvector)
     self.register_func("perfdata", self._perfdata)
     self._uuid_cache = UUIDCache(self.log)
     # self._name_cache = NameCache(self.log)
     self._mvector_cache = MachineVectorCache(self.log)
Exemple #24
0
    def process_init(self):
        global_config.enable_pm(self)
        self.__log_template = logging_tools.get_logger(
            global_config["LOG_NAME"],
            global_config["LOG_DESTINATION"],
            context=self.zmq_context,
        )
        db_tools.close_connection()

        self.register_timer(self.periodic_update, 60 if global_config["DEBUG"] else 300, instant=True)

        self.register_func('get_kpi_source_data', self._get_kpi_source_data)
        self.register_func('calculate_kpi_preview', self._calculate_kpi_preview)
        self.register_func('calculate_kpi_db', self._calculate_kpi_db)
Exemple #25
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     db_tools.close_connection()
     self._init_sge_info()
     self._init_network()
     # job stop/start info
     self.__elo_obj = None
     self.register_timer(self._update, 30, instant=True)
     self.register_func("get_license_usage", self.get_license_usage)
Exemple #26
0
 def __init__(self):
     self.__start_time = time.time()
     self.__verbose = global_config["VERBOSE"]
     self.__log_cache, self.__log_template = ([], None)
     threading_tools.process_pool.__init__(
         self,
         "main",
         zmq=True,
     )
     self.register_exception("int_error", self._int_error)
     self.register_exception("term_error", self._int_error)
     self.register_func("compress_done", self._compress_done)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         zmq=True,
         context=self.zmq_context
     )
     # log config
     self._log_config()
     self.device = config_tools.server_check(service_type_enum=icswServiceEnum.image_server).effective_device
     if not self.device:
         self.log("not an image server", logging_tools.LOG_LEVEL_ERROR)
         self._int_error("not an image server")
     elif not process_tools.find_file("xmllint"):
         self.log("xmllint not found", logging_tools.LOG_LEVEL_ERROR)
         self._int_error("xmllint not found")
     elif global_config["CLEAR_LOCK"] or global_config["SET_LOCK"]:
         cur_img = self._get_image()
         if global_config["CLEAR_LOCK"]:
             _info_str = "lock cleared"
             cur_img.build_lock = False
         else:
             _info_str = "lock set"
             cur_img.build_lock = True
         cur_img.save()
         self._int_error("{} on image {}".format(_info_str, unicode(cur_img)))
     else:
         self.log("image server is '{}'".format(unicode(self.device) if self.device else "---"))
         self.__builder_names = []
         for cur_num in xrange(global_config["BUILDERS"]):
             builder_name = "builder_{:d}".format(cur_num)
             self.__builder_names.append(builder_name)
             self.add_process(BuildProcess(builder_name), start=True)
     db_tools.close_connection()
     self.__build_lock = False
     if not self["exit_requested"]:
         self.init_build()
Exemple #27
0
 def process_init(self):
     global_config.enable_pm(self)
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         context=self.zmq_context,
     )
     # close database connection
     db_tools.close_connection()
     self.router_obj = config_tools.RouterObject(self.log)
     self.config_src = LogSource.objects.get(
         Q(pk=global_config["LOG_SOURCE_IDX"]))
     self.register_func("generate_config", self._generate_config)
     # for requests from config_control
     self.register_func("complex_request", self._complex_request)
     build_client.init(self)
Exemple #28
0
    def process_init(self):
        global_config.enable_pm(self)
        self.__log_template = logging_tools.get_logger(
            global_config["LOG_NAME"],
            global_config["LOG_DESTINATION"],
            context=self.zmq_context,
        )
        db_tools.close_connection()

        # can be triggered
        # self.register_func("check_license_violations", self._check_from_command)

        # and is run periodically
        self._update_interval = 30 * 60

        self._init_network()
Exemple #29
0
 def process_init(self):
     global_config.close()
     self.__log_template = logging_tools.get_logger(
         global_config["LOG_NAME"],
         global_config["LOG_DESTINATION"],
         zmq=True,
         context=self.zmq_context)
     db_tools.close_connection()
     self._instance = InstanceXML(log_com=self.log)
     self._init_network()
     self._init_capabilities()
     self.__last_user_scan = None
     self.__scan_running = False
     self.register_timer(self._update,
                         2 if global_config["DEBUG"] else 30,
                         instant=True)
Exemple #30
0
    def process_init(self):
        global_config.enable_pm(self)
        self.__log_template = logging_tools.get_logger(
            global_config["LOG_NAME"],
            global_config["LOG_DESTINATION"],
            context=self.zmq_context,
        )
        # some global flags
        self.always_collect_warnings = True
        db_tools.close_connection()

        self.register_timer(
            self.update,
            30 if global_config["DEBUG"] else 300,
            instant=False,
            first_timeout=5,
        )