def close_project(self, resp=None): self.logger.info('Closing project') self.selected_project.close_project() self.selected_project = None self.config = Config_parser(easyFlow_conf).get_config() if resp is not None: self.reset_cookies(resp) # set cookies to 'null'
def select_project(self, projectUUID): self.logger.info('Selected project %s', projectUUID) if self.is_project_open(): self.close_project() self.selected_project = Project(projectUUID) self.config = Config_parser(easyFlow_conf, projectUUID).get_config() self.selected_project.setup_process_manager() return self.selected_project.get_project_summary()
def __init__(self, projectUUID): self.config = Config_parser(easyFlow_conf, projectUUID).get_config() logging.basicConfig(format='%(levelname)s[%(asctime)s]: %(message)s') self.logger = logging.getLogger(__name__) self.logger.setLevel( getattr(logging, self.config.default_project.process_manager.log_level)) formatter = logging.Formatter( '%(levelname)s[%(asctime)s]: %(message)s') self.log_handler = logging.FileHandler( os.path.join(os.environ['FLOW_LOGS'], 'project.log')) self.log_handler.setLevel( getattr(logging, self.config.default_project.process_manager.log_level)) self.log_handler.setFormatter(formatter) self.logger.addHandler(self.log_handler) try: self._serv = redis.Redis( unix_socket_path=self.config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self.logger.warning( 'Redis unix_socket not used for projects, falling back to TCP') self._serv = redis.StrictRedis(self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True) try: self._serv_buffers = redis.Redis( unix_socket_path=self.config.redis.buffers.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self.logger.warning( 'Redis unix_socket not used fo buffering, falling back to TCP') self._serv_buffers = redis.StrictRedis( self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True) self._metadata_interface = Process_metadata_interface() self._buffer_metadata_interface = Buffer_metadata_interface() self._alert_manager = Alert_manager() self._being_restarted = [] self.processes = {} self.processes_uuid = set() self.processes_uuid_with_signal = set() self.buffers = {} self.buffers_uuid = set() self.projectUUID = projectUUID self.puuid_to_be_restarted = {}
def __init__(self): self.config = Config_parser(easyFlow_conf).get_config() try: self._serv = redis.Redis( unix_socket_path=self.config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self._serv = redis.StrictRedis(self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True)
def __init__(self, projectUUID): self.config = Config_parser(easyFlow_conf, projectUUID).get_config() logging.basicConfig(format='%(levelname)s[%(asctime)s]: %(message)s') self.logger = logging.getLogger(__name__) self.logger.setLevel(getattr(logging, self.config.default_project.project_manager.log_level)) formatter = logging.Formatter('%(levelname)s[%(asctime)s]: %(message)s') self.log_handler = logging.FileHandler(join(os.environ['FLOW_LOGS'], 'project.log')) self.log_handler.setLevel(getattr(logging, self.config.default_project.project_manager.log_level)) self.log_handler.setFormatter(formatter) self.logger.addHandler(self.log_handler) try: self._serv = redis.Redis(unix_socket_path=self.config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self.logger.warning('Redis unix_socket not used, falling back to TCP') self._serv = redis.StrictRedis( self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True) # get project from redis rawJSONProject = self._serv.get(projectUUID) jProject = json.loads(rawJSONProject) self.jProject = jProject if jProject is None: # throws project not found exception self.logger.error('Project not found', exc_info=True) raise ProjectNotFound("The provided projectUUID does not match any known project") self.projectUUID = projectUUID self.projectName = jProject.get('projectName', 'No project name') self.projectInfo = jProject.get('projectInfo', '') self.creationTimestamp = jProject.get('creationTimestamp', 0) self.processNum = jProject.get('processNum', 0) self._start_command_already_called = False self.processes = {} for puuid, p in self.jProject.get('processes', {}).items(): self.processes[puuid] = self.filter_correct_init_fields(p) self.buffers = {} for buuid, b in self.jProject.get('buffers', {}).items(): self.buffers[buuid] = b
def __init__(self, projectUUID, puuid, custom_config, logger): self.logger = logger self.config = Config_parser(easyFlow_conf, projectUUID).get_config() # from puuid, get buuid self.projectUUID = projectUUID self.puuid = puuid try: self._serv_config = redis.Redis( unix_socket_path=self.config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self._serv_config = redis.StrictRedis( self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True) try: self._serv_buffers = redis.Redis( unix_socket_path=self.config.redis.buffers.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self._serv_buffers = redis.StrictRedis( self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True) self.custom_config = custom_config # ONLY 1 ingress and 1 egress self.ingress = None self.egress = None self.update_connections(custom_config) self.partOfTheFlow = False self._buffer_metadata_interface = Buffer_metadata_interface()
def __init__(self): self.selected_project = None self.config = Config_parser(easyFlow_conf).get_config() logging.basicConfig(format='%(levelname)s[%(asctime)s]: %(message)s') self.logger = logging.getLogger(__name__) self.logger.setLevel(getattr(logging, self.config.default_project.project_manager.log_level)) formatter = logging.Formatter('%(levelname)s[%(asctime)s]: %(message)s') self.log_handler = logging.FileHandler(join(os.environ['FLOW_LOGS'], 'project.log')) self.log_handler.setLevel(getattr(logging, self.config.default_project.project_manager.log_level)) self.log_handler.setFormatter(formatter) self.logger.addHandler(self.log_handler) try: self.serv = redis.Redis(unix_socket_path=self.config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self.logger.warning('Redis unix_socket not used, falling back to TCP') self.serv = redis.StrictRedis( self.config.redis.host, self.config.redis.port, self.config.redis.db, charset="utf-8", decode_responses=True)
def update_config(self): configData = self._serv_config.get('config_' + self.puuid) if configData is None: # already updated. Should not happend return configData = json.loads(configData) self.custom_config = configData['custom_config'] self._serv_config.delete('config_' + self.puuid) self.state_refresh_rate = self.config.web.refresh_metadata_interval_in_sec self.projectUUID = configData.get('projectUUID', 'No projectUUID') self.name = configData.get('name', 'No name') self.type = configData.get('type', None) self.description = configData.get('description', '') self.bulletin_level = configData.get('bulletin_level', 'WARNING') if self.logger: # update logging level if self.bulletin_level == 'DEBUG': self.logger.setLevel(logging.DEBUG) self._log_handler.setLevel(logging.DEBUG) self._pubhandler.setLevel(logging.DEBUG) elif self.bulletin_level == 'INFO': self.logger.setLevel(logging.INFO) self._log_handler.setLevel(logging.INFO) self._pubhandler.setLevel(logging.INFO) elif self.bulletin_level == 'WARNING': self.logger.setLevel(logging.WARNING) self._pubhandler.setLevel(logging.WARNING) self._log_handler.setLevel(logging.WARNING) elif self.bulletin_level == 'ERROR': self.logger.setLevel(logging.ERROR) self._pubhandler.setLevel(logging.ERROR) self._log_handler.setLevel(logging.ERROR) self.x = configData.get('x', 0) self.y = configData.get('y', 0) self.config = Config_parser('config/easyFlow_conf.json', self.projectUUID).get_config()
def __init__(self, puuid): self.config = Config_parser( os.path.join(os.environ['FLOW_CONFIG'], 'easyFlow_conf.json')).get_config() try: self._serv_config = redis.Redis( unix_socket_path=self.config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket self._serv_config = redis.StrictRedis( self.config.redis.project.host, self.config.redis.project.port, self.config.redis.project.db, charset="utf-8", decode_responses=True) signal.signal(signal.SIGUSR1, self.sig_handler) self._alert_manager = Alert_manager() self.puuid = puuid self.pid = os.getpid() self._p = psutil.Process() self.custom_message = "" self._keyCommands = 'command_' + self.puuid self.state = 'running' self.logger = None logging.basicConfig(format='%(levelname)s[%(asctime)s]: %(message)s') self.logger = logging.getLogger(__name__) self.logger.setLevel(logging.INFO) formatter = logging.Formatter( '%(levelname)s[%(asctime)s]: %(message)s') self._log_handler = logging.FileHandler( os.path.join(os.environ['FLOW_LOGS'], '{}.log'.format(self.puuid))) self._log_handler.setLevel(logging.INFO) self._log_handler.setFormatter(formatter) self.logger.addHandler(self._log_handler) pub = zmqContext().socket(zmqPUB) pub.connect('tcp://{}:{}'.format(self.config.server.host, self.config.zmq.port)) self._pubhandler = PUBHandler(pub) self._pubhandler.root_topic = self.puuid self._pubhandler.setLevel(logging.INFO) self.logger.addHandler(self._pubhandler) self.update_config() self._metadata_interface = Process_metadata_interface() self._buffer_metadata_interface = Buffer_metadata_interface() self.last_refresh = time.time( ) - self.state_refresh_rate # ensure a refresh self.last_reload = time.time( ) - self.state_refresh_rate # ensure a reload self._processStat = ProcessStat( self.config.default_project.process.buffer_time_resolution_in_sec, self.config.default_project.process.buffer_time_spanned_in_min) self.push_p_info() if self.type == 'multiplexer_in': self.logger.debug('Using multiplexer_in link manager') self._link_manager = Multiple_link_manager(self.projectUUID, self.puuid, self.custom_config, self.logger, multi_in=True) elif self.type == 'multiplexer_out': self.logger.debug('Using multiplexer_out link manager') self._link_manager = Multiple_link_manager(self.projectUUID, self.puuid, self.custom_config, self.logger, multi_in=False) elif self.type == 'switch': self.logger.debug('Using switch link manager') self._link_manager = Multiple_link_manager(self.projectUUID, self.puuid, self.custom_config, self.logger, multi_in=False, is_switch=True) else: self._link_manager = Link_manager(self.projectUUID, self.puuid, self.custom_config, self.logger) # do not log to zmq by default self.log_to_zmq(False) self.pre_run() self.run()
import zmq from zmq.log.handlers import PUBHandler import json from io import BytesIO import random, math import configparser from time import sleep, strftime, time import datetime import os from util import genUUID, objToDictionnary, Config_parser from alerts_manager import Alert_manager from flow_project_manager import ProjectNotFound, Flow_project_manager easyFlow_conf = os.path.join(os.environ['FLOW_CONFIG'], 'easyFlow_conf.json') config = Config_parser(easyFlow_conf).get_config() app = Flask(__name__) app.config['SECRET_KEY'] = config.server.SECRET_KEY app.config['UPLOAD_FOLDER'] = config.server.upload_folder flow_project_manager = Flow_project_manager() try: redis_pmanager = redis.Redis( unix_socket_path=config.redis.project.unix_socket_path, decode_responses=True) except: # fallback using TCP instead of unix_socket redis_pmanager = redis.StrictRedis(config.redis.project.host, config.redis.project.port, config.redis.project.db, charset="utf-8",