class QueueProcessThread(threading.Thread):
    def __init__(self, args=()):
        super(QueueProcessThread, self).__init__()
        self.queue = args[0]
        self.queue_max_size = args[1]
        self.db = None
        self._stop_event = threading.Event()

    def run(self):
        """
			Add a key to the KEYS table, if the keys does not already exist.
		"""
        self.db = Db()
        self.db.connect()
        while (1):
            if self.stopped():
                self.empty_queue()
                self.db.disconnect()
                break
            self.empty_queue()

        return

    def empty_queue(self):
        keys = []
        while self.queue.empty() == False:
            keys.append(self.queue.get())
            self.queue.task_done()
        self.db.add_keys(keys)

    def stop(self):
        self._stop_event.set()

    def stopped(self):
        return self._stop_event.is_set()
Esempio n. 2
0
class App:
    user = None
    cfg = None
    db = None

    def __init__(self, cfg):
        self.cfg = cfg
        self.db = Db()
        if self.db:
            self.db.connect(cfg.getDbPath())
    
    def routing(self):
        return { 
            '/echo(/.*)?': self.echo
            }

    def echo(self, vars, params):
        params[u'path_vars'] = vars
        return self.to_json_response(params)

    def to_json_response(self, data):
        s = json.dumps(data, indent=2)
        return ('text/json', s)
    def run(self):
        """
			Start the threads:
				BlockThread = retrieve keys from a block
				QueueProcessThread = process data on the queue when it is full
		"""
        db = Db()
        db.connect()

        last_block_db = db.get_last_block()

        # if nothing in database, start at block 1
        if (last_block_db == None): last_block_db = 1
        logging.info(str(last_block_db) + " last block in db")

        #Don't need db connection anymore
        db.disconnect()

        #Create thread to process queue data
        bdthread = QueueProcessThread(args=(self.queue, self.queue_max_size))
        bdthread.start()
        bdthread.name = "bdthread"

        #Create threads that retrieve keys from block
        threads = []
        for i in range(self.nthreads):
            t = BlockThreadFile(args=(self.queue))
            t.start()
            threads.append(t)

        blockchain = Blockchain(self.path)

        speed = show_speed(last_block_db)
        for block in blockchain.get_ordered_blocks(self.path + '/index',
                                                   start=last_block_db,
                                                   cache='index_cache.pickle'):
            if self.stopped:
                success("Stopping...")
                for t in threads:
                    if t == threading.current_thread():
                        continue  #TODO: do we need this anymore ?
                    elif t.name == bdthread.name:
                        continue
                    t.stop()
                for t in threads:
                    t.join()
                bdthread.stop()
                bdthread.join()
                success("bye")
                break  #TODO = ugly
            distributed = False
            while distributed == False:
                sleep(0.1 * self.nthreads)
                for t in threads:
                    if t.is_working() == False:
                        t.set_block(block)
                        distributed = True
                        logging.info("{}\tretrieving block {}...{}".format(
                            t.name, block.height, speed))
                        break

        return