def getThread(threadID): """returns a Thread object for the thread with ID _threadID_""" thread = None THREAD = COMMENTS_BY_THREAD.get(threadID, None) if THREAD: thread = threads.Thread(threadID, THREAD) return thread
def run(self): """ - create 'concurrency' number of threads - per thread call pre() - sync threads, start timer - per thread call core() 'iteration' number of times', tic() - stop timer - per thread, call post, close threads - eval once """ threads = [] concurrency = int(self.bench_cfg['concurrency']) self._start() for tid in range(0, concurrency): self.events[tid] = {} self.events[tid]['event_1'] = rut.Event() self.events[tid]['event_2'] = rut.Event() self.events[tid]['event_3'] = rut.Event() self.events[tid]['event_4'] = rut.Event() self.events[tid]['event_5'] = rut.Event() self.start[tid] = time.time() self.times[tid] = list() t = rut.Thread(self._thread, tid) threads.append(t) for t in threads: t.start() # wait for all threads to start up and initialize self.t_init = time.time() rut.lout("\n> " + "=" * concurrency) rut.lout("\n> ") for tid in range(0, concurrency): self.events[tid]['event_1'].wait() # start workload in all threads self.t_start = time.time() for tid in range(0, concurrency): self.events[tid]['event_2'].set() # wait for all threads to finish core test for tid in range(0, concurrency): self.events[tid]['event_3'].wait() self.t_stop = time.time() # start shut down rut.lout("\n< " + "-" * concurrency) rut.lout("\n< ") for tid in range(0, concurrency): self.events[tid]['event_4'].set() # wait for all threads to finish shut down for tid in range(0, concurrency): self.events[tid]['event_5'].wait()
def getThread(threadID): """returns a Thread object for the thread with ID _threadID_""" thread = None THREAD = COMMENTS_BY_THREAD.get(threadID, None) if THREAD: thread = threads.Thread(threadID, THREAD) #is this too confusing? with all the versions of "thread" return thread
def cutThread(threadID, date_time): """allows the user to look at the contents of a thread from the start until the given _date_time_. this will be a datetime object, datetime(year, month, day, hour, minute, second, microsecond, timezone). just returns the slice of the thread as a new Thread object, which the user can then apply functions to""" THREAD = COMMENTS_BY_THREAD[threadID] CUT_THREAD = [] #only want comments before date_time for comment in THREAD: formatted = threads.Thread.getTime(comment['time']) if formatted <= date_time: CUT_THREAD.append(comment) thread = threads.Thread(threadID, CUT_THREAD) return thread
def cutThread(threadID, date_time): """allows the user to look at the contents of a thread from the start until the given _date_time_. this will be a datetime object, datetime(year, month, day, hour, minute, second, microsecond, timezone). just returns the slice of the thread as a new Thread object, which the user can then apply functions to""" THREAD = COMMENTS_BY_THREAD[threadID] CUT_THREAD = [] #only want comments before date_time for comment in THREAD: formatted = threads.Thread.getTime(comment['time']) if formatted <= date_time: CUT_THREAD.append(comment) #now we have the top level comments from t=0 to _datetime_, #but we still need to chop off the replies that were made after _datetime_ for com in CUT_THREAD: new_replies = [] #only want the replies up till _datetime_ if com['replies']: for reply in com['replies']: form_reply = threads.Thread.getTime(reply['time']) if form_reply <= date_time: new_replies.append(reply) com['replies'] = new_replies thread = threads.Thread(threadID, CUT_THREAD) return thread
maxBytes=1024 * 1024 * 5, backupCount=5, encoding='utf-8') fileHandler.setFormatter(logFormatter) rootLogger.addHandler(fileHandler) # Set configured log level rootLogger.setLevel(conf.settings['loglevel']) # Load config file conf.load() # Scan logger logger = rootLogger.getChild("AUTOSCAN") # Multiprocessing thread = threads.Thread() scan_lock = threads.PriorityLock() resleep_paths = [] # local imports import db import plex import utils from google import GoogleDrive, GoogleDriveManager google = None manager = None ############################################################ # QUEUE PROCESSOR ############################################################
import threads import members import json cookieText = "{cookie here}" groupId = "{groupId here}" threads = threads.Thread(groupId, cookieText) data = threads.getDict() threadJson = json.dumps(data, indent=4, sort_keys=True) members = members.Member(groupId, cookieText) data = members.getDict() memberJson = json.dumps(data, indent=4, sort_keys=True) with open('members.json', 'w') as outfile: outfile.write(memberJson) with open('threads.json', 'w') as outfile: outfile.write(threadJson)