def list_files_multithread(root, outfilename):
	# spawn consumer thread - write info to file 
	
	consumerthread = threading.Thread(target = consumer, args = (outfilename,))
	consumerthread.daemon = True	#else cannot exit! 
	consumerthread.start()
	
	# spawn producer threads - get info of the files 
	threads = []
	paths = []
	for(thisdir, subshere, fileshere) in os.walk(root):
		paths.extend([os.path.join(thisdir, fname) for fname in fileshere])
		if len(paths) > MAX_NUM_OF_FILES:
			# create a new thread to store information of files in this folder
			pathscopy = copy.deepcopy(paths)
			thread = threading.Thread(target = getinfo, args = (pathscopy,))
			threads.append(thread)
			thread.start()
			del paths[0: len(paths)]		# reset the paths 
	total = len(threads)
	count = 0
	for thread in threads: 
		thread.join()		
		count = count + 1
		# print(count * 100 / total)
	consumerthread.join()
Exemple #2
0
def peerscan():
    start = time.time()
    global peers
    global fed_peers
    global scan_timer

    # Discover peers
    tmp_peers = fed_peers
    tmp_peers += peers
    tmp_peers += expand_lan()
    for peer in man_peers:
        if not peer in peers:
            tmp_peers.append(peer)
    fed_peers = []

    # Connect to discovered nodes
    chunk_size = ceil(len(tmp_peers) / PEERSCAN_THREADS)
    ranges = [
        tmp_peers[i:i + chunk_size]
        for i in range(0, len(tmp_peers), chunk_size)
    ]
    lthreads = []
    for i in range(0, len(ranges)):
        peer_range = [str(ip) for ip in ranges[i]]
        lthreads.append(
            threading.Thread(target=scan_range, args=(peer_range, )))
        lthreads[i].start()

    log.ok(
        "Network",
        "Scanning network with %d threads, waiting until finished..." %
        len(lthreads))
    for thread in lthreads:
        thread.join()

    scan_timer = threading.Timer(SCAN_INTERVAL, peerscan)
    scan_timer.start()
    log.ok(
        "Network",
        "Finished LAN scan in %.2f seconds, rescaning in %d seconds." %
        (time.time() - start, SCAN_INTERVAL))
Exemple #3
0
        threading.Thread.__init__(self)

    def run(self):                             # run provides thread logic
        for i in range(self.count):            # still sync stdout access
            with self.mutex:
                print('[%s] => %s' % (self.myId, i))

stdoutmutex = threading.Lock()                 # same as thread.allocate_lock()
threads = []
for i in range(10):
    thread = Mythread(i, 100, stdoutmutex)     # make/start 10 threads
    thread.start()                             # starts run method in a thread
    threads.append(thread)

for thread in threads:
    thread.join()                              # wait for thread exits
print('Main thread exiting.')
#************************************
"""
four different ways to run an action in a thread; all print 4294967296,
but prints should be synchronized with a mutex here to avoid overlap
"""

import threading, _thread
def action(i):
    print(i ** 32)

# subclass with state
class Mythread(threading.Thread):
    def __init__(self, i):
        self.i = i
def train(continu=True,
          depth=1,
          rand_min=0.2,
          rand_max=1.0,
          lr_max=8e-5,
          lr_min=1e-6,
          reg=0.005,
          num_threads=4):  # add more parameters probably.
    if continu:
        nn = load_nn("latest")
        best_nn = load_nn("best")
    else:
        nn = NeuralNet()
        nn.init_net(input_size=386,
                    output_size=1,
                    hidden_size=1000,
                    number_of_hidden=8)
        resets = 0
        while abs(nn.predict(chess_ai.chess_to_nn_input(Chess()))) > .1:
            nn = NeuralNet()
            nn.init_net(input_size=386,
                        output_size=1,
                        hidden_size=1000,
                        number_of_hidden=8)
            resets += 1
        print("nn started. Start predict: ",
              nn.predict(chess_ai.chess_to_nn_input(Chess())), "after", resets,
              "resets")
        best_nn = copy.deepcopy(nn)
    # best_nn = load_nn("best") # crashes if no such nn is saved

    with open("logs/log.txt", "a") as log_file:
        log_file.write("\n=========== New Training Run ================\n")
    bool_dict = {"bool": True}
    ui_thread = threading.Thread(target=shutdown_gui_thread,
                                 args=(bool_dict, ))
    ui_thread.start()
    # thread.start_new_thread(shutdown_gui_thread, (bool_dict, )) # starts thread, which displays gui to allow for easy graceful shutdown.
    lr = lr_max

    result_counts = [1, 1, 1]
    count = 0
    randomness = rand_max

    if continu:
        with open("saved_nns/progress.txt") as f:

            settings_dict = json.loads(f.read())
            result_counts = settings_dict["result_counts"]
            count = settings_dict["count"]
            lr = settings_dict["lr"]
            lr_min = settings_dict["lr_min"]
            randomness = settings_dict["rand"]
            rand_min = settings_dict["rand_min"]

    # else:
    #     result_counts = [1, 1, 1]  # index 0: counts of black wins, index 1: Ties, index 2: white wins
    #     count = 0

    while bool_dict[
            "bool"]:  # This dict is given to the UI thread. This allows for stopping the training via UI

        result_list = [None] * num_threads
        threads = []

        for i in range(num_threads):
            # print("Starting thread", i)
            thread = threading.Thread(target=run_one_game,
                                      args=(nn, randomness, depth, reg,
                                            result_counts, count + i, i,
                                            result_list, lr))
            threads.append(thread)
            thread.daemon = True
            thread.start()

        for thread in threads:
            thread.join()

        # print("All threads done")
        # print("TODO: merge thread results and update")  # TODO!

        avg_db = [
        ]  # Not actually taking the average. Just sum up (maybe reduce learning rate)
        avg_dw = []
        for db, dw, res, print_string, cost_string in result_list:
            lr_factor = 1 - result_counts[res + 1] / sum(result_counts)
            # print(res, lr_factor, result_counts)
            if not avg_db:
                avg_db = db
                avg_dw = dw
            else:
                for i in range(len(avg_db)):
                    avg_db[i] += db[i] * lr_factor
                    avg_dw[i] += dw[i] * lr_factor

            result_counts[res + 1] += 1
            if res == 1:
                cost_file_name = "logs/win_costs.csv"
            elif res == 0:
                cost_file_name = "logs/draw_costs.csv"
            else:
                cost_file_name = "logs/loss_costs.csv"

            print_string += ", lr_factor: " + str("{:.3f}".format(lr_factor))

            with open("logs/detailed_log.txt", "a") as log_file:
                log_file.write(print_string + "\n")
            with open(cost_file_name, "a") as cost_file:
                cost_file.write(cost_string)
            print(print_string)

        for i in range(len(avg_db)):
            avg_db[i] = avg_db[i] * (1 / num_threads)
            avg_dw[i] = avg_dw[i] * (1 / num_threads)

        nn.update_from_gradients(avg_db, avg_dw, lr=lr)

        count += num_threads
        if count > 200:
            randomness = max(
                rand_min, randomness * 0.995
            )  # Reduce randomness a little each run, until it becomes less than rand_min
        if count > 400:
            lr = max(lr_min, lr * 0.995)
        # sum_val = np.sum(chess.board)
        # avg_cost = total_cost / chess.turn_num if count >= 200 else total_cost
        # print_string = ("Game " + str(count)
        #                  + ", Avg Cost: " + str("{:.3f}".format(avg_cost[0]))
        #                  + ", Last cost: " + str("{:.3f}".format(last_cost[0]))
        #                  + ", Turns: " + str(chess.turn_num)
        #                  + ", Randomness:" + str("{:.2f}".format(randomness))
        #                  + ", lr:" + str("{:.5f}".format(lr))
        #                  + ", lr factor:" + str("{:.3f}".format(lr_factor))
        #                  + ", Win:" + str(result)
        #                  + ", Sum: " + str(sum_val)
        #                  + ", Val:" + str("{:.4f}".format(val)) )

        # print(print_string)
        # with open("logs/detailed_log.txt", "a") as log_file:
        #     log_file.write(print_string + "\n")

        # if result == 1:
        #     cost_file_name = "logs/win_costs.csv"
        # elif result == 0:
        #     cost_file_name = "logs/draw_costs.csv"
        # else:
        #     cost_file_name = "logs/loss_costs.csv"

        # with open(cost_file_name, "a") as cost_file:
        #     cost_string = ( str(count) + ","
        #                     + str(chess.turn_num) + ","
        #                     + str(total_cost[0]) +  ","
        #                     + str(avg_cost[0]) + ","
        #                     + str(last_cost[0]) + "\n" )
        #     cost_file.write(cost_string)

        # TODO SAVE COSTS FOR GRAPHING LATER
        # print("Game", count, ", Total Cost:", "{:.2f}".format(total_cost[0]), "Last cost: ", cost, "Turns: ", chess.turn_num, ", Randomness:", "{:.2f}".format(randomness), ", lr:", "{:.4f}".format(lr), "lr factor:", "{:.3f}".format(lr_factor), ", Winner:", chess.winner, ", Val:", "{:.5f}".format(val))
        if count % 200 == 0:  # every X games test and backup
            best_nn = test_and_backup(nn, best_nn, count)

    # test_and_backup(nn, best_nn, count)
    save_nn("latest", nn)
    with open("logs/log.txt", "a") as log_file:
        log_file.write("\n=========== Training Run Ended ================\n")
    settings = {
        "count": count,
        "result_counts": result_counts,
        "lr": lr,
        "lr_min": lr_min,
        "rand": randomness,
        "rand_min": rand_min
    }
    with open("saved_nns/progress.txt", "w") as f:
        f.write(json.dumps(settings))
Exemple #5
0
def main():
    thread.start_new(fun,())
    thread.join()
    print("-----over---")

def consumer(idnum, dataqueue):

    while True:
        time.sleep(0.1)
        try:
            data = dataqueue.get(block=False)
        except queue.Empty:
            pass
        else:
            with safeprint:
                print('Consumer', idnum, 'got =>', data)


if __name__ == '__main__':
    for i in range(numconsumers):
        thread = threading.Thread(target=consumer, args=(i, dataQueue))
        thread.daemon = True  # else cannot exit!
        thread.start()

    waitfor = []
    for i in range(numproducers):
        thread = threading.Thread(target=producer, args=(i, dataQueue))
        waitfor.append(thread)
        thread.start()

    for thread in waitfor:
        thread.join()  # or time.sleep() long enough sleep
    print('Main thread exit.')
Exemple #7
0
if __name__ == '__main__':
    for i in range(numproducers):
        waitfor = []
        thread = threading.Thread(target=producer, args=(i, dataQueue))
        waitfor.append(thread)
        thread.start()
        # thread.start_new_thread(producer, (i, dataQueue))
        # time.sleep(((numproducers-1) * nummessages) + 1) #保证主线程最后退出
    for i in range(numconsumers):
        thread = threading.Thread(target=consumer, args=(i, dataQueue))
        thread.daemon = False
        thread.start()

    for thread in waitfor:
        thread.join()
    print('Main thread exit.')

# import threading
#
# import queue
# class Producer(threading.Thread):
#    def __init__(self, in_queue, out_queue):
#        threading.Thread.__init__(self)
#        self.in_queue = in_queue
#        self.out_queue = out_queue
#    def run(self):
#        while True:
#            item = self.in_queue.get()
#            result = item
#            self.out_queue.put(result)
	for msgnum in range(nummessages):
		time.sleep(idnum)
		dataqueue.put('[producer id=%d, count=%d]' % (idnum, msgnum))

def consumer(idnum, dataqueue):

	while True:
		time.sleep(0.1)
		try:
			data = dataqueue.get(block=False)
		except queue.Empty:
			pass
		else:
			with safeprint:
				print('Consumer', idnum, 'got =>', data)

if __name__ == '__main__':
	for i in range(numconsumers):
		thread = threading.Thread(target=consumer, args=(i, dataQueue))
		thread.daemon = True		# else cannot exit!
		thread.start()

	waitfor = []
	for i in range(numproducers):
		thread = threading.Thread(target=producer, args=(i, dataQueue))
		waitfor.append(thread)
		thread.start()

	for thread in waitfor: thread.join()		# or time.sleep() long enough sleep
	print('Main thread exit.')