def submit_block_orig(block_id, worker_name, date_found, block_hash, block_height, difficulty, value): # Creates a new template for the following (not yet found) block. _create_new_block_orig(date_found) # Fill-in block information and make it complete. stmt = '''UPDATE mining_block SET is_mature = -2, founder = '%(worker_name)s', date_found = '%(date_found)s', hash = '%(hash)s', difficulty = %(difficulty)s, value = %(value)s, workers = 0, blocknum = %(block_height)s WHERE id = %(block_id)s''' % \ {'worker_name': worker_name, 'date_found': date_found, 'hash': block_hash, 'difficulty': difficulty, 'block_height': block_height, 'value': Decimal(value)/10**8, 'block_id': block_id} dbsink.execute('submit_block_orig', stmt)
def _drop_block_template(): query = '''select max(id) from mining_block''' max_id = (yield dbsink.direct_query('drop_block_template--q', query))[0][0] stmt = '''DELETE FROM mining_block WHERE id = %(max_id)d and date_found = '%(null_date)s' ''' % \ {'max_id': max_id, 'null_date': config.NULL_DATE} dbsink.execute('drop_block_template', stmt)
def submit_share_stats(time_window, push_secs, shares, slot_num, slot_score, block_id): stmt = PUSH_TEMPLATES['stats_body'] % \ {'name': config.STRATUM_UNIQUE_ID, 'time_window': time_window, 'shares': shares, 'block_id': block_id, 'push_secs': push_secs, 'slot_num': slot_num, 'slot_score': slot_score, 'delay_size': config.STATS_PUSH_DELAY_GROUP_SIZE} dbsink.execute('submit_share_stats', stmt)
def _create_new_block_orig(date_started=None): # Let's create new one! log.info("Creating new mining_block record") if not date_started: date_started = datetime.datetime.now() stmt = '''INSERT INTO mining_block (founder, date_started, date_found, total_shares, blocknum, is_mature) VALUES ('', '%(date_started)s', '%(null_date)s', 0, 0, -2)''' % \ {'null_date': config.NULL_DATE, 'date_started': date_started} dbsink.execute('_create_new_block_orig', stmt)
def submit_block_new(block_id, worker_name, date_found, block_hash, block_height, difficulty, value): stmt = ''' INSERT INTO mining_block (id, founder, date_started, date_found, value, total_shares, total_score, difficulty, blocknum, is_mature, hash, workers) SELECT b.id + 1, '%(worker_name)s', b.date_found, '%(date_found)s', %(value)s, 0, 0, %(difficulty)s, %(block_height)s, -2, '%(block_hash)s', 0 FROM mining_block b WHERE b.id = (select max(id) from mining_block); ''' % \ {'worker_name': worker_name, 'date_found': date_found, 'block_hash': block_hash, 'difficulty': difficulty, 'block_height': block_height, 'value': Decimal(value)/10**8} dbsink.execute('submit_block', stmt)
def submit_shares(shares_iterator): """Batch submit""" if config.PUSH_SHARES__MINING_SHARES and config.PUSH_SHARES__MINING_SLOTS: # We need to materialize possible generator because we need to process # the data twice shares_iterator = list(shares_iterator) if config.PUSH_SHARES__MINING_SHARES or not config.PUSH_SHARES__MINING_SLOTS: # This is unsafe way to build SQL, but we know all these # values are coming from internal source records = (PUSH_TEMPLATES['sh_values'] % worker_shares for worker_shares in shares_iterator) stmt = PUSH_TEMPLATES['sh_body'] % (', \n'.join(records)) dbsink.execute('submit_shares', stmt) if config.PUSH_SHARES__MINING_SLOTS: records = (PUSH_TEMPLATES['sh_values_slots'] % worker_shares for worker_shares in shares_iterator) stmt = PUSH_TEMPLATES['sh_body_slots'] % (', \n'.join(records)) dbsink.execute('submit_shares_slots', stmt)
def increase_worker_blocks(worker_id): stmt = '''UPDATE mining_worker SET blocks=blocks+1 WHERE id=%s''' % worker_id dbsink.execute('increase_worker_blocks', stmt)