def _validate_request(self, req): results = {} bad_request = False if not isinstance(req, dict): _LOGGER.error("Bad request %s. Request is required as json dict " "format", str(req)) bad_request = True else: requires = ("hostname", "ip", "appname", "progname", "timestamp") for required in requires: results[required] = req.get(required) if required not in req: _LOGGER.error("Bad request. Required field=%s not in " "request", required) bad_request = True else: if req.get("action") not in self.supported_actions: _LOGGER.error("Rep server got unexpetected msg: %s", req) bad_request = True if bad_request: utcnow = datetime.utcnow() results["timestamp"] = utils.datetime_to_seconds(utcnow) results["returncode"] = -1 return -1, results return 0, None
def _build_lookup_tables(apps, existing_tasks): now = utils.datetime_to_seconds(datetime.utcnow()) forwarders_load, forwarders, app_2_forwarders = [], set(), {} for app in apps: if now - app["timestamp"] >= TAConfClient._threshhold_time: _LOGGER.warn("App=%s on forwarder=%s lost heartbeat", app["appname"], app["ip"]) continue if app["ip"] not in forwarders: forwarders.add(app["ip"]) forwarders_load.append({ "ip": app["ip"], "hostname": app["hostname"], "cpu_count": int(app["cpu_count"]), "tasks": [], }) # FIXME progname if app["appname"] not in app_2_forwarders: app_2_forwarders[app["appname"]] = set() app_2_forwarders[app["appname"]].add(app["ip"]) for task in existing_tasks: for forwarder in forwarders_load: if forwarder["ip"] == task["ip"]: forwarder["tasks"].append(task) break return forwarders_load, app_2_forwarders
def handle_state_update(self, data): """ @data: { "action": "APP_STATE_UPDATE", "host": hostname, "ip": ip_address, "appname": appname, "progname": program_name, "remote_server": remote_server, "state": json_states, "timestamp": utcnow, } """ utcnow = datetime.utcnow() data["timestamp"] = utils.datetime_to_seconds(utcnow) collection = self._get_collection_name(data, "state") _LOGGER.info("Update state for %s", collection) if collection not in self._app_state_collections: self._create_collection(collection) self._app_state_collections.add(collection) keys = ("appname", "progname", "remote_server") state_key = self._get_key(data, keys) if state_key not in self._app_state_keys: k = self._kv.insert_collection_data(collection, data, self.appname) self._app_state_keys[state_key] = k["_key"] else: k = self._kv.update_collection_data(collection, self._app_state_keys[state_key], data, self.appname)
def handle_task_config_delete(self, data): """ @data: { "action": "APP_TASK_CONFIG_UPDATE", "hostname": hostname, # forwarder hostname "ip": ip_address, # forwarder ip "appname": appname, # TA on the forwarder "progname": program_name, # program name in the TA "task_id": task_id, "task_name": task_name, "remote_server": remote_server, "timestamp": utcnow, } """ utcnow = datetime.utcnow() data["timestamp"] = utils.datetime_to_seconds(utcnow) _LOGGER.info("Delete task config for %s", self._config_collection) keys = ("appname", "progname", "task_name", "remote_server") task_key = self._get_key(data, keys) if task_key in self._app_task_keys: _LOGGER.info("Delete task=%s", task_key) self._kv.delete_collection_data(self._config_collection, self._app_task_keys[task_key], self.appname) del self._app_task_keys[task_key] else: _LOGGER.error("task=%s is not found.", task_key)
def handle_task_config_update(self, data): """ @data: { "action": "APP_TASK_CONFIG_UPDATE", "hostname": hostname, # forwarder hostname "ip": ip_address, # forwarder ip "appname": appname, # TA on the forwarder "progname": program_name, # program name in the TA "task_id": task_id, "task_name": task_name, "remote_server": remote_server, "task_config": config_info, "timestamp": utcnow, } """ utcnow = datetime.utcnow() data["timestamp"] = utils.datetime_to_seconds(utcnow) _LOGGER.info("Update task config for %s", self._config_collection) keys = ("appname", "progname", "task_name", "remote_server") task_key = self._get_key(data, keys) if task_key not in self._app_task_keys: k = self._kv.insert_collection_data(self._config_collection, data, self.appname) self._app_task_keys[task_key] = k["_key"] else: k = self._kv.update_collection_data( self._config_collection, self._app_task_keys[task_key], data, self.appname )
def delete_task_config(self, configs): """ Post task configuration to REP server @configs: A dict which shall contain at least the following fields { "hostname": hostname, # forwarder hostname "ip": ip_address, # forwarder ip adde "appname": appname, # TA on forwarder "progname": program_name, # program name in the TA "task_name": task_name, # Task name "task_id": task_name, # Task "remote_server": remote_server, } The configs will be dispatched to this "appname" on the forwarder @return: 0 if it is sucessful otherwise -1 """ assert configs.get("hostname") assert configs.get("ip") assert configs.get("appname") assert "progname" in configs assert configs.get("task_name") assert configs.get("remote_server") configs["action"] = at.APP_TASK_CONFIG_DELETE configs["timestamp"] = utils.datetime_to_seconds(datetime.utcnow()) resp = self._req_and_rep(configs) return self._log_response_status(resp, configs)
def _get_common_req(self, action, appname, progname): return { "action": action, "hostname": _HostnameIp.hostname, "ip": _HostnameIp.ips[0], "appname": appname, "progname": progname if progname else None, "timestamp": utils.datetime_to_seconds(datetime.utcnow()), }
def add_event(short_name='', name='', secret_key='', url='', start_time=None, end_time=None): """ Get event information and add it to Redis :param short_name: Id/short name :param name: Display name :param secret_key: Secret key for removing :param url: URL for redirection :param start_time: Python datetime object of event start time :param end_time: Python datetime object of event end time """ # Generate new secret key if it's not provided if not secret_key: secret_key = generate_secret_key(allowed_characters=app.config['SECRET_KEY_CHARACTERS']) # Convert datetime objects to timestamp seconds start_seconds = datetime_to_seconds(start_time) end_seconds = datetime_to_seconds(end_time) # Setup dictionary of event information to save event_info = { 'short_name': short_name, 'name': name, 'url': url, 'secret_key': secret_key, 'start_time': start_seconds, 'end_time': end_seconds } redis_instance = app.extensions['redis']['REDIS'] # Setup Redis transaction pipe = redis_instance.pipeline() # Add commands to transaction pipe.hmset(app.config['REDIS_KEY_EVENT_INFO'] % short_name, event_info) pipe.expireat(app.config['REDIS_KEY_EVENT_INFO'] % short_name, end_time) pipe.zadd(app.config['REDIS_KEY_EVENT_START'], short_name, start_seconds) pipe.zadd(app.config['REDIS_KEY_EVENT_END'], short_name, end_seconds) # Execute and return True if no Falses in Redis multi bulk reply return False not in pipe.execute()
def _log_heartbeat_loss(self): now = utils.datetime_to_seconds(datetime.utcnow()) apps = self._req_client.get_app_register_info() for app in apps: msg = "appname={},ip={},host={},timestamp={}".format( app["appname"], app["ip"], app["hostname"], app["timestamp"]) if now - app["timestamp"] >= TAConfClient._threshhold_time: self._heartbeat_logger.warn("%s,heartbeat=lost", msg) else: self._heartbeat_logger.warn("%s,heartbeat=live", msg)
def _check_forwarder_availabilities(self): now = utils.datetime_to_seconds(datetime.utcnow()) tasks_lookup = self._build_tasks_lookup() if not tasks_lookup: return apps = self._req_client.get_app_register_info() if not apps: return bad_tasks, good_tasks = self._separate_tasks(apps, tasks_lookup, now) bad_tasks = self._merge_bad_tasks(bad_tasks) if not bad_tasks: return _LOGGER.info("Found %d tasks which lost heartbeat in %d seconds", len(bad_tasks), self._threshhold_time) tasks = self._reassign_tasks_to_forwarders(bad_tasks, good_tasks, apps) self._do_send_tasks(tasks)
def tear_down(self): if self._stopped: return self._stopped = True sock = self.context.socket(self.zmq.REQ) sock.connect(self.ip_port) msg = { "action": at.APP_TEAR_DOWN, "hostname": self.hostname, "appname": "", "progname": None, "ip": self.ips[0], "timestamp": utils.datetime_to_seconds(datetime.utcnow()), } sock.connect(self.ip_port) sock.send_json(msg) req = sock.recv_json() self._thr.join() _LOGGER.info("RepServer stopped.")
def _handle_request(self, req): try: rets = self.callback(req) except Exception: _LOGGER.error("Failed to call callback error=%s", traceback.format_exc()) rets = None results = {} requires = ("hostname", "ip", "appname", "progname") for required in requires: results[required] = req.get(required) utcnow = datetime.utcnow() results["timestamp"] = utils.datetime_to_seconds(utcnow) if rets is None: results["returncode"] = -1 else: results["returncode"] = 0 results.update(rets) return results
def handle_registration_update(self, data): """ @data: { "action": "APP_REGIST_INFO_UPDATE", "host": hostname, "ip": ip_address, "appname": appname, "progname": progname, "platform": platform_info, "cpu_count": cpu_count, "timestamp": utcnow, } """ utcnow = datetime.utcnow() data["timestamp"] = utils.datetime_to_seconds(utcnow) app_key = self._get_key(data) _LOGGER.info("Update registration for %s", self._app_collection) if app_key not in self._apps: k = self._kv.insert_collection_data(self._app_collection, data, self.appname) self._apps[app_key] = self._kv.get_collection_data(self._app_collection, k["_key"], self.appname) else: self._kv.update_collection_data(self._app_collection, self._apps[app_key]["_key"], data, self.appname) self._apps[app_key].update(data)
def test_datetime_to_seconds(self): seconds = 1535760000 assert seconds == datetime_to_seconds(datetime(2018, 9, 1, tzinfo=UTC))
def datetime_compare_to_secs(value1, value2): dt1 = utils.datetime_to_seconds(value1) dt2 = utils.datetime_to_seconds(value2) return dt1 == dt2