class DataProcessor(object): def __init__(self, queue, headers, targets): self.log = Logger(PROC_CLIENT_LOG_FILE, level=D_VERB) # Main thread communication self.keep_running = True self.transmit = queue self.headers = headers self.targets = targets # print data self.printing = False self.base_data = None self.fig = () self.ax = () # store data self.local_store = False self.files = {} # Launching Thread self.thr = threading.Thread(target=self.process, args=(), name='process_thread') self.start() ### ### Process Thread ### def start(self): self.log.info('[MAIN THREAD] Starting process thread') self.thr.start() self.log.debug('[MAIN THREAD] Process thread started') def stop(self): self.keep_running = False self.log.info('[MAIN THREAD] Asked processing thread end') def process(self): while self.keep_running: self.log.debug('[PROCESS THREAD] Getting data') try: data = self.transmit.get(timeout=1) data = json.loads(data) self.log.debug('[PROCESS THREAD] Got data {}'.format(data)) if self.printing: to_print = self.build_print_data(data) self.log.debug('[PROCESS THREAD] Printing') multi_print_dic(self.base_data, self.print_data) self.log.debug('[PROCESS THREAD] Printed') if self.local_store: # self.build_store_data? self.process_store(data) #### To write: self.process_local except Empty: self.log.debug('[PROCESS THREAD] No data') self.log.info('[PROCESS THREAD] End of thread') ### ### Print utilities ### def start_print(self): self.log.info('[MAIN THREAD] Start printing') self.build_print_headers() self.log.debug('[MAIN THREAD] Built headers') self.print_data = multi_init_print(self.base_data) self.log.debug('[MAIN THREAD] Graphics initiated') self.printing = True def stop_print(self): self.log.info('[MAIN THREAD] Stop printing') self.printing = False clear_print() def build_print_headers(self): ret = {} for types in self.targets: for instance in self.targets[types]: ret[instance] = {} for data_field in self.headers[types]: ret[instance][data_field] = [] self.base_data = ret self.log.debug('[DATA THREAD] Header: {}'.format(self.base_data)) def build_print_data(self, dico): for target in dico: for data_field in dico[target]: self.base_data[target][data_field].append( dico[target][data_field]) #### #### Storage utilities #### def process_store(self, dico): for target in self.files: try: if target == 'system': res = [ dico[target][data_field] for data_field in self.headers['system'] ] else: res = [ dico[target][data_field] for data_field in self.headers['process'] ] except AttributeError: res = range(len(dico)) print >> self.files[target], list_to_csv(res) self.log.debug('[PROCESS THREAD] Stored {}'.format( list_to_csv(res))) def start_store(self, dirname=None): # Make record dir if not dirname: dirname = time.time() directory = os.path.join(DATA_DIR, dirname) self.log.info( '[MAIN THREAD] Starting local storage in {}'.format(directory)) if os.path.isdir(directory): shutil.rmtree(directory) os.makedirs(directory) self.log.debug('[MAIN THREAD] Made local record dir') # Open files for types in self.targets: for instance in self.targets[types]: filename = os.path.join(directory, instance) self.files[instance] = open(filename, 'w') self.log.debug('[MAIN THREAD] Opened {}'.format(filename)) # Write headers for key in self.files: if key == 'system': print >> self.files[key], list_to_csv(self.headers['system']) self.log.debug('[MAIN THREAD] wrote {} in file {}'.format( list_to_csv(self.headers['system']), key)) else: print >> self.files[key], list_to_csv(self.headers['process']) self.log.debug('[MAIN THREAD] wrote {} in file {}'.format( list_to_csv(self.headers['process']), key)) # Ask start storing and return store file paths self.local_store = True self.log.debug('[MAIN THREAD] End start local') return [os.path.join(directory, instance) for instance in self.files] def stop_store(self): self.log.info('[MAIN THREAD] Stopping storage') self.local_store = False for key in self.files: self.files[key].close() self.log.debug('closed {}'.format(key)) def reset_processor(self): self.files = {}
class LightClient(object): def __init__(self, ip): self.log = Logger(MAIN_CLIENT_LOG_FILE, D_VERB) self.log.info('[MAIN THREAD] Instantiated client') self.receiving = False self.define_headers() self.targets = {} self.transmit = Queue.Queue() self.data_client = DataClient(self.transmit, ip) self.data_processor = DataProcessor(self.transmit, self.headers, self.targets) self.connect(ip) def connect(self, ip): self.soc_ctrl = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.soc_ctrl.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) my_ip = socket.gethostbyname('') self.log.debug('[MAIN THREAD] connecting...') self.soc_ctrl.connect((ip, SOC_PORT_CTRL)) self.log.info('[MAIN THREAD] Client connected to server') def disconnect(self): ### data processor should not be here self.data_processor.stop() self.soc_ctrl.close() def define_headers(self): head = {} head['process'] = PROC_CPU_DATA + PROC_MEM_DATA + TIMESTAMPS head[ 'system'] = SYS_CPU_OTHER + LOAD_AVG + SYS_CPU_DATA + SYS_MEM_DATA + TIMESTAMPS self.headers = head def add_target(self, target, name): if target in self.targets: self.targets[target].append(name) else: self.targets[target] = [name] def remove_target(self, target, name): if target in self.targets: if name in self.targets[target]: self.targets[target].remove(name) self.log.info('[MAIN THREAD] Removed {} named {}'.format( target, name)) else: self.log.error( '[MAIN THREAD] Asked to remove {} named {} while not recorded' .format(target, name)) else: self.log.error( '[MAIN THREAD] Asked to remove {} named {} while not recorded'. format(target, name)) def start_record(self, target, name): self.log.debug('[MAIN THREAD] Asking server to start recording') msg = MSG_SEP.join([START_RECORD, target, name]) answer = send_data(self.soc_ctrl, msg) self.log.info('[MAIN THREAD] Server asked to start recording') if answer == SYNC: self.add_target(target, name) self.log.info('[MAIN THREAD] Added {} named {}'.format( target, name)) else: self.log.warn( '[MAIN THREAD] Could not add {} named {} because of server answer' .format(target, name)) def stop_record(self, target, name): self.log.debug('[MAIN THREAD] Asking server to stop recording') msg = MSG_SEP.join([STOP_RECORD, target, name]) answer = send_data(self.soc_ctrl, msg) self.log.info( '[MAIN THREAD] Server asked to stop recording {}'.format(name)) if answer == SYNC: self.remove_target(target, name) else: self.log.warn( '[MAIN THREAD] Could not remove {} named {} because of server answer' .format(target, name)) def start_receive(self): if not self.receiving: self.receiving = True self.log.debug('[MAIN THREAD] Asking server to start sending') status = send_data(self.soc_ctrl, START_SEND) self.log.info('[MAIN THREAD] Server asked to start sending') if status == FAIL: self.log.error( '[MAIN THREAD] Client tried to receive but server denied it' ) else: print status self.data_client.start() self.log.info('[MAIN THREAD] Client is receiving') self.log.debug("[MAIN THREAD] DATA THREAD started") else: self.log.warn( "[MAIN THREAD] Asked to start receiving while already receiving" ) def stop_receive(self): if self.receiving: self.log.debug( '[MAIN THREAD] Closing data channel. Exiting data client thread' ) self.data_client.stop() self.log.info("[MAIN THREAD] Asked server to stop receiving") self.receiving = False send_data(self.soc_ctrl, STOP_SEND) else: self.log.warn( "[MAIN THREAD] Asked to stop receiving while already receiving" ) def start_store(self, dirname='easy_client'): return self.data_processor.start_store(dirname) def stop_store(self): self.data_processor.stop_store() def start_print(self): self.data_processor.start_print() def stop_print(self): self.printing = self.data_processor.stop_print() def stop_process(self): self.stop_print() self.stop_store() self.data_processor.stop() self.stop_receive() self.soc_ctrl.close() def stop_all(self): self.stop_process() send_data(self.soc_ctrl, STOP_ALL)
class DataManager(object): def __init__(self, headers, transmit, connection_table): self.step = D_STEP self.timeout = int(D_TIMEOUT / self.step) self.log = Logger(DATA_SERVER_LOG_FILE, D_VERB) self.run = True self.receivers = [] self.transmit = transmit self.connection_table = connection_table self.sys_headers = headers['system'] self.proc_headers = headers['process'] self.data_thread = threading.Thread(target = self.process_loop, name = 'data managing', args = ()) self.log.info('Starting DATA THREAD') self.data_thread.start() self.log.debug('DATA THREAD Started') def process_loop(self): ### ### Add timeout so that we keep control when waiting for data ### while self.run: self.log.debug('[DATA THREAD] Waiting for queue') data = self.transmit.get() self.log.debug('[DATA THREAD] Got {}'.format(data)) for socket in self.receivers: self.process_send(socket, data) def quit(self): self.run = False def start_send(self): self.init_thread = threading.Thread(target = self.init_connection, name = 'init_send_connection', args = ()) self.log.info('[MAIN THREAD] Starting INIT THREAD') self.init_thread.start() self.log.debug('[MAIN THREAD] INIT THREAD Started') def init_connection(self): soc_data = socket.socket(socket.AF_INET, socket.SOCK_STREAM) soc_data.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) soc_data.bind(('', SOC_PORT_DATA)) soc_data.listen(1) self.log.info('[INIT THREAD] Waiting for a connection') connection, client_address = soc_data.accept() self.log.info('[INIT THREAD] Connection accepted from {}'.format(client_address)) self.receivers.append(connection) def process_send(self, connection, data): targets = self.get_client_targets(connection) self.log.debug('[DATA THREAD] targets are {}'.format(targets)) sub_data = self.get_sub_dict(data, targets) self.log.debug('[DATA THREAD] sub_data is {}'.format(sub_data)) mess = json.dumps(sub_data) self.log.debug('[DATA THREAD] Sending data {}'.format(mess)) status = send_data(connection, mess) if status == '': self.receivers.remove(connection) self.log.info('[DATA THREAD] connection removed') self.log.debug('[DATA THREAD] Data sent') def get_sub_dict(self, data, targets): return dict([(key, data[key]) for key in targets if key in data]) def get_client_targets(self, connection): client_address = connection.getpeername()[0] targets = None for client in self.connection_table: self.log.debug('[DATA THREAD] Checking with potential address {} '.format(client.getpeername())) if client.getpeername()[0] == client_address: targets = self.connection_table[client] if targets is not None: return targets else: self.log.error('[DATA THREAD] Could not find client {} in connection table'.format(client_address)) return [] def stop_send(self): self.log.info('[MAIN THREAD] Stopping DATA THREAD') tmp = self.receivers self.receivers = [] for elem in tmp: elem.close() self.log.debug('[MAIN THREAD] Closed data socket') def is_sending(self): if len(self.receivers) > 0: return True else: return False
class LightClient(object): def __init__(self, ip): self.log = Logger(MAIN_CLIENT_LOG_FILE, D_VERB) self.log.info('[MAIN THREAD] Instantiated client') self.receiving = False self.define_headers() self.targets = {} self.transmit = Queue.Queue() self.data_client = DataClient(self.transmit, ip) self.data_processor = DataProcessor(self.transmit, self.headers, self.targets) self.connect(ip) def connect(self, ip): self.soc_ctrl = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.soc_ctrl.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) my_ip = socket.gethostbyname('') self.log.debug('[MAIN THREAD] connecting...') self.soc_ctrl.connect((ip,SOC_PORT_CTRL)) self.log.info('[MAIN THREAD] Client connected to server') def disconnect(self): ### data processor should not be here self.data_processor.stop() self.soc_ctrl.close() def define_headers(self): head = {} head['process'] = PROC_CPU_DATA + PROC_MEM_DATA + TIMESTAMPS head['system'] = SYS_CPU_OTHER + LOAD_AVG + SYS_CPU_DATA + SYS_MEM_DATA + TIMESTAMPS self.headers = head def add_target(self, target, name): if target in self.targets: self.targets[target].append(name) else: self.targets[target]=[name] def remove_target(self, target, name): if target in self.targets: if name in self.targets[target]: self.targets[target].remove(name) self.log.info('[MAIN THREAD] Removed {} named {}'.format(target, name)) else: self.log.error('[MAIN THREAD] Asked to remove {} named {} while not recorded'.format(target, name)) else: self.log.error('[MAIN THREAD] Asked to remove {} named {} while not recorded'.format(target, name)) def start_record(self, target, name): self.log.debug('[MAIN THREAD] Asking server to start recording') msg = MSG_SEP.join([START_RECORD, target, name]) answer = send_data(self.soc_ctrl,msg) self.log.info('[MAIN THREAD] Server asked to start recording') if answer == SYNC: self.add_target(target, name) self.log.info('[MAIN THREAD] Added {} named {}'.format(target, name)) else: self.log.warn('[MAIN THREAD] Could not add {} named {} because of server answer'.format(target, name)) def stop_record(self, target, name): self.log.debug('[MAIN THREAD] Asking server to stop recording') msg = MSG_SEP.join([STOP_RECORD, target, name]) answer = send_data(self.soc_ctrl,msg) self.log.info('[MAIN THREAD] Server asked to stop recording {}'.format(name)) if answer == SYNC: self.remove_target(target, name) else: self.log.warn('[MAIN THREAD] Could not remove {} named {} because of server answer'.format(target, name)) def start_receive(self): if not self.receiving: self.receiving = True self.log.debug('[MAIN THREAD] Asking server to start sending') status = send_data(self.soc_ctrl,START_SEND) self.log.info('[MAIN THREAD] Server asked to start sending') if status == FAIL: self.log.error('[MAIN THREAD] Client tried to receive but server denied it') else: print status self.data_client.start() self.log.info('[MAIN THREAD] Client is receiving') self.log.debug("[MAIN THREAD] DATA THREAD started") else: self.log.warn("[MAIN THREAD] Asked to start receiving while already receiving") def stop_receive(self): if self.receiving: self.log.debug('[MAIN THREAD] Closing data channel. Exiting data client thread') self.data_client.stop() self.log.info("[MAIN THREAD] Asked server to stop receiving") self.receiving = False send_data(self.soc_ctrl,STOP_SEND) else: self.log.warn("[MAIN THREAD] Asked to stop receiving while already receiving") def start_store(self, dirname = 'easy_client'): return self.data_processor.start_store(dirname) def stop_store(self): self.data_processor.stop_store() def start_print(self): self.data_processor.start_print() def stop_print(self): self.printing = self.data_processor.stop_print() def stop_process(self): self.stop_print() self.stop_store() self.data_processor.stop() self.stop_receive() self.soc_ctrl.close() def stop_all(self): self.stop_process() send_data(self.soc_ctrl, STOP_ALL)
class DataProcessor(object): def __init__(self, queue, headers, targets): self.log = Logger(PROC_CLIENT_LOG_FILE, level = D_VERB) # Main thread communication self.keep_running = True self.transmit = queue self.headers = headers self.targets = targets # print data self.printing = False self.base_data = None self.fig = () self.ax = () # store data self.local_store = False self.files = {} # Launching Thread self.thr = threading.Thread(target = self.process, args = (), name = 'process_thread') self.start() ### ### Process Thread ### def start(self): self.log.info('[MAIN THREAD] Starting process thread') self.thr.start() self.log.debug('[MAIN THREAD] Process thread started') def stop(self): self.keep_running = False self.log.info('[MAIN THREAD] Asked processing thread end') def process(self): while self.keep_running: self.log.debug('[PROCESS THREAD] Getting data') try: data = self.transmit.get(timeout = 1) data = json.loads(data) self.log.debug('[PROCESS THREAD] Got data {}'.format(data)) if self.printing: to_print = self.build_print_data(data) self.log.debug('[PROCESS THREAD] Printing') multi_print_dic(self.base_data, self.print_data) self.log.debug('[PROCESS THREAD] Printed') if self.local_store: # self.build_store_data? self.process_store(data) #### To write: self.process_local except Empty: self.log.debug('[PROCESS THREAD] No data') self.log.info('[PROCESS THREAD] End of thread') ### ### Print utilities ### def start_print(self): self.log.info('[MAIN THREAD] Start printing') self.build_print_headers() self.log.debug('[MAIN THREAD] Built headers') self.print_data = multi_init_print(self.base_data) self.log.debug('[MAIN THREAD] Graphics initiated') self.printing = True def stop_print(self): self.log.info('[MAIN THREAD] Stop printing') self.printing = False clear_print() def build_print_headers(self): ret = {} for types in self.targets: for instance in self.targets[types]: ret[instance]={} for data_field in self.headers[types]: ret[instance][data_field] = [] self.base_data = ret self.log.debug('[DATA THREAD] Header: {}'.format(self.base_data)) def build_print_data(self, dico): for target in dico: for data_field in dico[target]: self.base_data[target][data_field].append(dico[target][data_field]) #### #### Storage utilities #### def process_store(self, dico): for target in self.files: try: if target == 'system': res = [dico[target][data_field] for data_field in self.headers['system']] else: res = [dico[target][data_field] for data_field in self.headers['process']] except AttributeError: res = range(len(dico)) print >> self.files[target], list_to_csv(res) self.log.debug('[PROCESS THREAD] Stored {}'.format(list_to_csv(res))) def start_store(self, dirname = None): # Make record dir if not dirname: dirname = time.time() directory = os.path.join(DATA_DIR, dirname) self.log.info('[MAIN THREAD] Starting local storage in {}'.format(directory)) if os.path.isdir(directory): shutil.rmtree(directory) os.makedirs(directory) self.log.debug('[MAIN THREAD] Made local record dir') # Open files for types in self.targets: for instance in self.targets[types]: filename = os.path.join(directory, instance) self.files[instance] = open(filename, 'w') self.log.debug('[MAIN THREAD] Opened {}'.format(filename)) # Write headers for key in self.files: if key == 'system': print >> self.files[key], list_to_csv(self.headers['system']) self.log.debug('[MAIN THREAD] wrote {} in file {}'.format(list_to_csv(self.headers['system']), key)) else: print >> self.files[key], list_to_csv(self.headers['process']) self.log.debug('[MAIN THREAD] wrote {} in file {}'.format(list_to_csv(self.headers['process']), key)) # Ask start storing and return store file paths self.local_store = True self.log.debug('[MAIN THREAD] End start local') return [os.path.join(directory, instance) for instance in self.files] def stop_store(self): self.log.info('[MAIN THREAD] Stopping storage') self.local_store = False for key in self.files: self.files[key].close() self.log.debug('closed {}'.format(key)) def reset_processor(self): self.files = {}
class CPUWatcher(object): # Could easily add irq, frag, pgfault, and vmem from bench/cpuload. # Which are worth watching? def __init__(self, headers, data): self.log = Logger(CPU_SERVER_LOG_FILE, D_VERB) self.step = D_STEP self.timeout = int(D_TIMEOUT / self.step) # Sync variables self.transmit = data self.run = True # Record var self.sys_prev_cpu = {key:0 for key in SYS_CPU_DATA} self.time = 0 self.load = 0 self.proc = dict() self.thr_start = threading.Thread(target = self.record_process, name = 'cpu_thread', args=(), kwargs={}) self.log.info('[MAIN THREAD] starting CPU Thread') self.thr_start.start() self.log.debug('[MAIN THREAD] CPU Thread started') def quit(self): #self.stop() self.run = False def start(self, target): """ Starts recording process async. """ ## self.directory = os.path.join(NAO_DATA_DIR, time.ctime()) ## os.makedirs(self.directory) if target == 'system': self.proc['system'] = None self.log.info('[MAIN THREAD] Start watching system') return True else: pid = self._get_pid(target) if pid: self.proc[target] = { 'pid':pid, 'prev_cpu':{key:0 for key in PROC_CPU_DATA}, } return True else: self.log.error("Non valid process {}. Skipping this process".format(target)) return False def stop(self, target): """ Stops the recording process """ if target in self.proc: del self.proc[target] self.log.info('[MAIN THREAD] Has stopped {}'.format(target)) return_val = True else: self.log.error('[MAIN THREAD] Has been asked to stop {} while not recording'.format(target)) reurn_val = False time.sleep(self.step) return return_val # init helpers def _get_pid(self, process): print 'ps aux | grep {}'.format(process) ps_result = os.popen('ps aux | grep {}'.format(process)).readlines() print ps_result tmp = [] for res in ps_result: if '--proc' in res: tmp.append(res) if 'grep' in res: tmp.append(res) for proc in tmp: ps_result.remove(proc) if len(ps_result) != 1: pid = 0 else: pid = ps_result[0].split()[1] return pid # record methods # refactor files should be handled by server data thread # only use transmission queue here def _record(self, tmp): self.transmit.put(tmp) self.log.debug('[CPU Thread] Has put to queue {}'.format(tmp)) def record_process(self): self.log.debug('[CPU THREAD] In thread') count = 0 # record loop var init while self.run: # Timeout + stop() message if count < self.timeout: self.log.debug('[CPU THREAD] Processing') tmp = {} tme = self._get_time() # sys time is used for several measures keys = self.proc.keys() for key in keys: if key == 'system': tmp[key] = self.get_sys_data(tme) else: tmp[key] = self.get_proc_data(tme, key) if tmp: self._record(tmp) count += self.step time.sleep(self.step) else: self.log.warn('[CPU THREAD] Timeout happened, should we change code to stop process?') time.sleep(1) print '[CPU THREAD] End of thread record' # record helpers def get_sys_data(self, tme): tmp_sys = self.get_sys_cpu_stat(tme) # SYS_MEM_DATA tmp_sys.update(self.get_sys_mem_stat()) # SYS_CPU_DATA tmp_sys.update(self.get_load_avg()) # LOAD_AVG tmp_sys['time'] = tme # SYS_CPU_OTHER tmp_sys['load'] = 100 * (1 - tmp_sys['idle_time']) # add frag, pgfault, any other? tmp_sys['timestamp'] = time.time() return tmp_sys def get_proc_data(self,tme, key): tmp_proc = self.get_proc_cpu_stat(key, tme) # PROC_CPU_DATA tmp_proc.update(self.get_proc_mem_stat(key)) # PROC_MEM_DATA tmp_proc['timestamp'] = time.time() return tmp_proc def _get_time(self): with open('/proc/stat') as cpu_stat: cpu_line = cpu_stat.readline() tmp = cpu_line.split() tmp = tmp[1:] tmp = map(float, tmp) now_time = sum(tmp) res = now_time - self.time self.time = now_time return res def get_load_avg(self): with open('/proc/loadavg', 'r') as load_file: line = load_file.readline() res = line.split() return {LOAD_AVG[i]:float(res[i]) for i in range(3)} def get_sys_cpu_stat(self, tme): res = dict() with open('/proc/stat') as cpu_stat: cpu_line = cpu_stat.readline() tmp = cpu_line.split() tmp = tmp[1:] tmp = map(float, tmp) tmp_sys_cpu = { 'usr_time':tmp[0], 'nice_time':tmp[1], 'sys_time':tmp[2], 'idle_time':tmp[3], 'io_time':tmp[4], 'irq_time':tmp[5], 'softirq_time':tmp[6] } try: for key in SYS_CPU_DATA: res[key] = (tmp_sys_cpu[key] - self.sys_prev_cpu[key]) / tme self.sys_prev_cpu[key] = tmp_sys_cpu[key] except KeyError as e: print "key error {}".format(e.message) return res def get_proc_cpu_stat(self, process, sys_time): pid = self.proc[process]['pid'] res = dict() with open("/proc/"+str(pid)+"/stat") as cpuinfo: line = cpuinfo.read() tmp = line.split() tmp = tmp[11:17] tmp = map(int, tmp) tmp_proc_cpu = { "utime":tmp[2], "cutime":tmp[4], "stime":tmp[3], "cstime":tmp[5], "majflt":tmp[0], "majcfault":tmp[1], #"time": sum(tmp[2:]) } try: for key in PROC_CPU_DATA: if key != 'time': res[key] = tmp_proc_cpu[key] - self.proc[process]['prev_cpu'][key] # divide by proc time? self.proc[process]['prev_cpu'][key] = tmp_proc_cpu[key] res['time'] = (res['utime'] + res['stime']) / sys_time except KeyError as e: print "key error {}".format(e.message) return res def get_sys_mem_stat(self): """ Returns a dict containing infos from /proc/meminfo - MemAvailable - MemFree - Buffers - Cached """ res = dict() with open('/proc/meminfo') as meminfo: mem_list = meminfo.readlines() # Optimize if it takes too long for line in mem_list: tmp = line.split() tmp[0] = tmp[0].replace(':', '') if tmp[0] in SYS_MEM_DATA: res[tmp[0]] = int(tmp[1]) if len(res) != len(SYS_MEM_DATA): raise Exception("Error: wrong parsing of /proc/meminfo") return res def get_proc_mem_stat(self, process): pid = self.proc[process]['pid'] # Optimize if it takes too long res = dict() with open("/proc/"+str(pid)+"/status") as meminfo: mem_list = meminfo.readlines() for line in mem_list: tmp = line.split() tmp[0] = tmp[0].replace(':', '') if tmp[0] in PROC_MEM_DATA: res[tmp[0]] = tmp[1] return res
class DataClient(object): def __init__(self, queue, ip): self.log = Logger(DATA_CLIENT_LOG_FILE, D_VERB) self.log.info('[MAIN THREAD] Instantiatie data_client') self.transmit = queue self.receiving = False self.remote_ip = ip self.my_ip = socket.gethostbyname(socket.gethostname()) def start(self): self.soc_data = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.soc_data.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.log.debug('[MAIN THREAD] Connecting to server data channel') self.soc_data.connect((self.remote_ip,SOC_PORT_DATA)) self.log.info('[MAIN THREAD] Data Channel Connected') self.data_receive = threading.Thread(target = self.receive, args = ()) self.log.info('[MAIN THREAD] Starting DATA THREAD') self.receiving = True self.data_receive.start() self.log.debug('[MAIN THREAD] DATA THREAD started') def stop(self): self.log.debug("[MAIN THREAD] Stop command sent") self.receiving = False self.log.info("[MAIN THREAD] Asked DATA THREAD to stop receiving") def receive(self): #FIXME_1 : recv_data is blocking. If nothing is sent and asked to stop, it will block program exit while self.receiving: self.log.debug('[DATA THREAD] waiting for data from server') data = recv_data(self.soc_data) self.log.debug('[DATA THREAD] Received data {}\n'.format(data)) if data: self.transmit.put(data) self.log.debug('[DATA THREAD] Transmitted data ') else: # Not sure this should exist self.log.info('[DATA THREAD] Empty data received. Closing socket ') self.soc_data.close() break if not self.receiving: self.log.info('[DATA THREAD] self.receiving is False. Closing socket ') self.soc_data.close() self.log.info('[DATA THREAD] Exiting thread \n')