def __init__(self, udp_port, use_thread=False): if (use_thread): self.data_queue = tQueue() self.to_thread = tQueue() self.thread = Thread(None, target=thread_function, args=(udp_port, self.data_queue, self.to_thread)) else: self.data_queue = pQueue() self.to_thread = pQueue() self.thread = Process(target=thread_function, args=(udp_port, self.data_queue, self.to_thread)) self.thread.start()
def dissect_file(file_path, timeout=60, options=[]): shared_queue = pQueue() sorted_rtn_list = None the_timeout = timeout try: #carry out dissections and return results as a dissect object p = Process(target=run, args=(shared_queue, file_path, timeout, options)) p.daemon = True p.start() except Exception as e: print e.message, e.args raise e try: sorted_rtn_list = shared_queue.get(timeout=the_timeout) except Exception as e: print e finally: shared_queue.close() return sorted_rtn_list
def __init__(self, detectionLoader, queueSize=1024): self.detectionLoader = detectionLoader self.stopped = False self.datalen = self.detectionLoader.datalen # initialize the queue used to store data if opt.sp: self.Q = Queue(maxsize=queueSize) else: self.Q = pQueue(maxsize=queueSize)
def __init__(self, detectionLoader, queueSize=1024): # initialize the file video stream along with the boolean # used to indicate if the thread should be stopped or not self.detectionLoader = detectionLoader self.stopped = False # initialize the queue used to store data if opt.sp: self.Q = Queue(maxsize=queueSize) else: self.Q = pQueue(maxsize=queueSize)
def dissect_wire(interface, options=[], timeout=None): shared_queue = pQueue() exit_event=Event() p=None try: p=Process(target=run,args=(exit_event, shared_queue, interface ,timeout, options)) p.daemon=True p.start() except Exception as e: print e.message, e.args raise e return (p,exit_event, shared_queue)
def init_queues(self): self._qin = pQueue() self._qout = pQueue() self._qout_sync = pQueue()
idxconn.add_field_action('thread', xappy.FieldActions.COLLAPSE) #idxconn.add_field_action('thread', xappy.FieldActions.SORTABLE, type='string') idxconn.set_max_mem_use(max_mem=256*1024*1024) return idxconn if __name__ == '__main__': from functools import partial from change_scan import update_maildir_cache, scan_mail, created_listener,\ destroy_listener, File, Directory, indexer_listener from sqlobject.events import listen, RowDestroySignal, RowCreatedSignal msg_indexer_queue = pQueue() ps = [] p = Process(target=IndexerProcess, args=('%s%i' % (xapidx, 1), msg_indexer_queue)) ps.append(p) p.start() p = Process(target=IndexerProcess, args=('%s%i' % (xapidx, 2), msg_indexer_queue)) ps.append(p) p.start() p = Process(target=IndexerProcess, args=('%s%i' % (xapidx, 3), msg_indexer_queue)) ps.append(p) p.start()
from multiprocessing import Value, Array, Lock slept = Value('i', 0) starts = Value('i', 0) results_count = Value('i', 0) result_loop = Value('i', 0) worker_count = Value('i', 0) lock = Lock() from multiprocessing import Queue as pQueue from queue import Queue as tQueue holder = { 'mp_type': 'p', 'p': { 'Ex': concurrent.futures.ProcessPoolExecutor, 'q': pQueue(), 'r': pQueue() }, 't': { 'Ex': concurrent.futures.ThreadPoolExecutor, 'q': tQueue(), 'r': tQueue() } } def start_log(enable=True, lvl='WARNING', mp=True): log = setup_logging(enable, lvl, loguru_enqueue=mp) log.info('examinator logging started') return log