def __init__(self, num_of_processes=2): super(MultiprocessTool, self).__init__() global global_last_id global_last_id = mp.Value('i', 0) self.num_of_processes = num_of_processes lock = mp.Lock() self.lock = lock
def test(): manager = processing.Manager() gc.disable() print('\n\t######## testing Queue.Queue\n') test_queuespeed(threading.Thread, Queue.Queue(), threading.Condition()) print('\n\t######## testing processing.Queue\n') test_queuespeed(processing.Process, processing.Queue(), processing.Condition()) print('\n\t######## testing Queue managed by server process\n') test_queuespeed(processing.Process, manager.Queue(), manager.Condition()) print('\n\t######## testing processing.Pipe\n') test_pipespeed() print print('\n\t######## testing list\n') test_seqspeed(range(10)) print('\n\t######## testing list managed by server process\n') test_seqspeed(manager.list(range(10))) print('\n\t######## testing Array("i", ..., lock=False)\n') test_seqspeed(processing.Array('i', range(10), lock=False)) print('\n\t######## testing Array("i", ..., lock=True)\n') test_seqspeed(processing.Array('i', range(10), lock=True)) print() print('\n\t######## testing threading.Lock\n') test_lockspeed(threading.Lock()) print('\n\t######## testing threading.RLock\n') test_lockspeed(threading.RLock()) print('\n\t######## testing processing.Lock\n') test_lockspeed(processing.Lock()) print('\n\t######## testing processing.RLock\n') test_lockspeed(processing.RLock()) print('\n\t######## testing lock managed by server process\n') test_lockspeed(manager.Lock()) print('\n\t######## testing rlock managed by server process\n') test_lockspeed(manager.RLock()) print() print('\n\t######## testing threading.Condition\n') test_conditionspeed(threading.Thread, threading.Condition()) print('\n\t######## testing processing.Condition\n') test_conditionspeed(processing.Process, processing.Condition()) print('\n\t######## testing condition managed by a server process\n') test_conditionspeed(processing.Process, manager.Condition()) gc.enable()
def main(args): lines = [] word2int = {} int2word = {} count = 0 line_count = 0 pid = 0 readFile = open(args.FILE_PATH, 'r') writeFile = open(args.OUTPUT_FILE_PATH, 'w') start = time() cpu_count = mp.cpu_count() pool = Pool(cpu_count - 1) processes = [] print('Starting everything...') lock = mp.Lock() for line in readFile: print('line count: {}'.format(line_count)) word1 = line.split('\n')[0].split('\t')[0].split('/')[0] word2 = line.split('\n')[0].split('\t')[1].split('/')[1] weight = line.split('\n')[0].split('\t')[-1] lines.append([word1, word2, weight]) for word in [word1, word2]: if not word in word2int: word2int[word] = count int2word[count] = word count += 1 line_count += 1 if line_count % 8000000 == 0: # senf the lines to be written in new file p = Process(target=assignNumbers, args=(pid, args, word2int, writeFile, lock, lines, len(lines))) processes.append(p) p.start() pid += 1 lines = [] for process in processes: process.join() end = time() print('Total time for the whole process : {} seconds'.format(end - start)) print('proceddings with writign mappings') # pool.map(writeFileModule, [(word2int, 'word2int.eng'), (int2word, 'int2word.eng')]) P = Process(target=writeFileModule, args=(word2int, 'word2int.eng')) Q = Process(target=writeFileModule, args=(int2word, 'int2word.eng')) P.start() Q.start() P.join() Q.join() readFile.close() writeFile.close()
def test_value(): TASKS = 10 running = processing.Value('i', TASKS) mutex = processing.Lock() for i in range(TASKS): processing.Process(target=value_func, args=(running, mutex)).start() while running.value > 0: time.sleep(0.08) mutex.acquire() print(running.value, end=' ') sys.stdout.flush() mutex.release() print() print('No more running processes')
def kline_data(self, pair_list, interval, **kwargs): start_date = kwargs.get('start_date', '') end_date = kwargs.get('end_date', '') storage = kwargs.get('storage', '') output = kwargs.get('output', '') progress_statements = kwargs.get('progress_statements', '') if start_date: start_date = datetime.datetime.strptime(start_date, '%m/%d/%Y') if end_date: end_date = datetime.datetime.strptime(end_date, '%m/%d/%Y') valid_kline_intervals = [ '1m', '3m', '5m', '15m', '30m', '1h', '2h', '4h', '6h', '8h', '12h' ] if interval not in set(valid_kline_intervals): raise ValueError( 'Invalid Interval: Kline interval should be one of the following - {}' .format(','.join(valid_kline_intervals))) output = self.process_kline_output(output) if not storage: storage = ['csv', None] try: storage_method, intended_dir = storage except ValueError: storage_method = storage[0] intended_dir = None if progress_statements: self.progress_statements = progress_statements if storage_method.lower() == 'csv': kline_interval_directory = self.create_csv_directories( pair_list, interval, intended_dir) csv_file_info = mp.Manager().list() pair = [currency_pair for i, currency_pair in enumerate(pair_list)] lock = mp.Lock() pool = mp.Pool(processes=3, initargs=(lock, )) # data = pool.starmap(self.kline_to_csv,zip(pair,re(start_date),re(end_date),re(kline_interval_directory),re(interval),re(titles),re(fields),re(csv_file_info))) data = pool.starmap( self.kline_to_csv, zip(pair, re(start_date), re(end_date), re(kline_interval_directory), re(interval), re(csv_file_info))) pool.close() pool.join() self.concatenate_csvs(set(list(csv_file_info))) else: raise ValueError( 'Invalid Storage Type: Currently only csv storage supported')
def __init__(self, thread=None, *args, **kwargs): super(IProcess, self).__init__(*args, **kwargs) self._thread = thread or 1 self._pool = multiprocessing.Queue(self._thread) self._threads = [] self._requests = [] self._states = {} for i in range(self._thread): request, response = multiprocessing.Queue(), multiprocessing.Queue( ) t = threading.Thread(target=self.loop, args=(request, response), name='request-%s' % i) self._threads.append(t) self._requests.append((request, response)) self._pool.put(i) self._states[t.name] = None self._reader_mutex = multiprocessing.Lock() self._reader = multiprocessing.Queue(), multiprocessing.Queue() self._threads.append( threading.Thread(target=self.loop, args=self._reader, name='reader')) self._states['reader'] = None self._collection = multiprocessing.Queue() self._received = multiprocessing.Event() self._properties = {} for key in dir(self.__class__): value = getattr(self.__class__, key, None) value = getattr(value, '__doc__', None) if str(value).startswith('child_property.'): self._properties[key] = None if str(value).startswith('child_timer.'): delta = int(value.split('.')[-1]) self._threads.append( threading.Thread(target=self.timentry, args=(key, delta), name='timer.%s' % key)) self._wrap_run()
# This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import math, time import serial import numpy as np import multiprocess as mp from picamera import PiCamera import picamera.array queueLock = mp.Lock() # Get serial device and config it ser = serial.Serial("/dev/ttyAMA0", 115200) # Mapping: # F:0x46 B:0x42 R:0x52 L:0x49 def sendCommand(x): ser.write(bytes([0x80])) ser.write(bytes([x])) ser.write(bytes([0x81])) ser.write(bytes([0x81])) def receive():