Ejemplo n.º 1
0
        def _(share):
            if not (share.pow_hash <= share.header['bits'].target):
                return

            block = share.as_block(self.tracker, self.known_txs_var.value)
            if block is None:
                print >> sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x' % (
                    p2pool_data.format_hash(
                        share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                    share.header_hash)
                return
            helper.submit_block(block, True, self.factory, self.bitcoind,
                                self.bitcoind_work, self.net)
            print
            print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (
                p2pool_data.format_hash(share.hash),
                self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
            print
            # Code to send pushover and notification when a block is found by a peer (ikolubr - Mar 31st, 2018)
            if self.net.USE_PUSHOVER_BLOCK:
                conn = httplib.HTTPSConnection("api.pushover.net:443")
                conn.request(
                    "POST", "/1/messages.json",
                    urllib.urlencode({
                        "token":
                        self.net.PUSHOVER_APP_TOKEN,
                        "user":
                        self.net.PUSHOVER_USER_KEY,
                        "message":
                        'FOUND BLOCK! %s%064x' %
                        (self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                         share.header_hash),
                    }), {"Content-type": "application/x-www-form-urlencoded"})
                conn.getresponse()
Ejemplo n.º 2
0
        def download_shares():
            while True:
                desired = yield self.node.desired_var.get_when_satisfies(
                    lambda val: len(val) != 0)
                peer2, share_hash = random.choice(desired)

                if len(self.peers) == 0:
                    yield deferral.sleep(1)
                    continue
                peer = random.choice(self.peers.values())

                print 'Requesting parent share %s from %s' % (
                    p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
                try:
                    shares = yield peer.get_shares(
                        hashes=[share_hash],
                        parents=500,
                        stops=[],
                    )
                except:
                    log.err(None, 'in download_shares:')
                    continue

                if not shares:
                    yield deferral.sleep(
                        1
                    )  # sleep so we don't keep rerequesting the same share nobody has
                    continue
                self.handle_shares(shares, peer)
Ejemplo n.º 3
0
 def download_shares():
     while True:
         desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0)
         peer2, share_hash = random.choice(desired)
         
         if len(p2p_node.peers) == 0:
             yield deferral.sleep(1)
             continue
         peer = random.choice(p2p_node.peers.values())
         
         print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
         try:
             shares = yield peer.get_shares(
                 hashes=[share_hash],
                 parents=500,
                 stops=[],
             )
         except:
             log.err(None, 'in download_shares:')
             continue
         
         if not shares:
             yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
             continue
         p2p_node.handle_shares(shares, peer)
Ejemplo n.º 4
0
 def download_shares():
     while True:
         desired = yield self.node.desired_var.get_when_satisfies(lambda val: len(val) != 0)
         peer_addr, share_hash = random.choice(desired)
         
         if len(self.peers) == 0:
             yield deferral.sleep(1)
             continue
         peer = random.choice(self.peers.values())
         
         print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
         try:
             shares = yield peer.get_shares(
                 hashes=[share_hash],
                 parents=random.randrange(500), # randomize parents so that we eventually get past a too large block of shares
                 stops=list(set(self.node.tracker.heads) | set(
                     self.node.tracker.get_nth_parent_hash(head, min(max(0, self.node.tracker.get_height_and_last(head)[0] - 1), 10)) for head in self.node.tracker.heads
                 ))[:100],
             )
         except defer.TimeoutError:
             print 'Share request timed out!'
             continue
         except:
             log.err(None, 'in download_shares:')
             continue
         
         if not shares:
             yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
             continue
         self.handle_shares([(share, []) for share in shares], peer)
Ejemplo n.º 5
0
 def render_GET(self, request):
     try:
         try:
             request.setHeader('X-Long-Polling', '/long-polling')
             request.setHeader('Content-Type', 'application/json')
             
             id = random.randrange(10000)
             if p2pool.DEBUG:
                 print 'POLL %i START' % (id,)
             
             request_id = get_id(request)
             memory = get_memory(request)
             
             if request_id not in last_cache_invalidation:
                 last_cache_invalidation[request_id] = variable.Variable((None, None))
             
             while True:
                 yield wait_hold(request_id)
                 work = self.work.value
                 thought_work = last_cache_invalidation[request_id].value
                 if work != thought_work[-1]:
                     break
                 if p2pool.DEBUG:
                     print 'POLL %i WAITING' % (id,)
                 yield defer.DeferredList([self.work.changed.get_deferred(), last_cache_invalidation[request_id].changed.get_deferred()], fireOnOneCallback=True)
             
             if thought_work[-1] is not None and work != thought_work[-1] and any(x is None or work['previous_block'] == x['previous_block'] for x in thought_work[-memory or len(thought_work):]):
                 # clients won't believe the update
                 work = work.copy()
                 work['previous_block'] = random.randrange(2**256)
                 if p2pool.DEBUG:
                     print 'POLL %i FAKED' % (id,)
                 set_hold(request_id, .01)
             res = self.compute(work, get_payout_script(request, self.net))
             
             last_cache_invalidation[request_id].set((thought_work[-1], work))
             if p2pool.DEBUG:
                 print 'POLL %i END %s' % (id, p2pool_data.format_hash(work['best_share_hash']))
             
             if request.getHeader('X-All-Targets') is None and res.target2 > 2**256//2**32 - 1:
                 res = res.update(target2=2**256//2**32 - 1)
             
             request.write(json.dumps({
                 'jsonrpc': '2.0',
                 'id': 0,
                 'result': res.getwork(identifier=str(work['best_share_hash'])),
                 'error': None,
             }))
         except jsonrpc.Error:
             raise
         except Exception:
             log.err(None, 'Squelched long polling error:')
             raise jsonrpc.Error(-32099, u'Unknown error')
     except jsonrpc.Error, e:
         request.write(json.dumps({
             'jsonrpc': '2.0',
             'id': 0,
             'result': None,
             'error': e._to_obj(),
         }))
Ejemplo n.º 6
0
 def getwork(self, request, long_poll=False):
     request_id = get_id(request)
     memory = get_memory(request)
     
     id = random.randrange(10000)
     if p2pool.DEBUG:
         print 'POLL %i START long_poll=%r user_agent=%r x-work-identifier=%r user=%r' % (id, long_poll, request.getHeader('User-Agent'), request.getHeader('X-Work-Identifier'), get_username(request))
     
     if request_id not in self.worker_views:
         self.worker_views[request_id] = variable.Variable((0, (None, None))) # times, (previous_block/-1, previous_block/-2)
     
     thought_times, thought_work = self.worker_views[request_id].value
     
     if long_poll and thought_times == self.new_work_event.times:
         if p2pool.DEBUG:
             print 'POLL %i WAITING user=%r' % (id, get_username(request))
         yield defer.DeferredList([self.new_work_event.get_deferred(), self.worker_views[request_id].changed.get_deferred()], fireOnOneCallback=True)
     
     yield self.holds.wait_hold(request_id)
     
     res, identifier = self.compute(request)
     
     if thought_work[-1] is not None and self.new_work_event.times != thought_times and any(x is None or res.previous_block == x for x in thought_work[-memory or len(thought_work):]):
         # clients won't believe the update
         res = res.update(previous_block=random.randrange(2**256))
         if p2pool.DEBUG:
             print 'POLL %i FAKED user=%r' % (id, get_username(request))
         self.holds.set_hold(request_id, .01)
     
     self.worker_views[request_id].set((self.new_work_event.times if long_poll else thought_times, (thought_work[-1], res.previous_block)))
     if p2pool.DEBUG:
         print 'POLL %i END %s user=%r' % (id, p2pool_data.format_hash(identifier), get_username(request)) # XXX identifier is hack
     
     defer.returnValue(res.getwork(identifier=str(identifier)))
Ejemplo n.º 7
0
Archivo: main.py Proyecto: gyver/p2pool
 def got_response(data):
     try:
         # match up with transactions
         header = bitcoin.getwork.decode_data(data)
         transactions = merkle_root_to_transactions.get(header['merkle_root'], None)
         if transactions is None:
             print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
             return False
         block = dict(header=header, txs=transactions)
         hash_ = bitcoin.data.block_header_type.hash256(block['header'])
         if hash_ <= block['header']['target'] or p2pool_init.DEBUG:
             print
             print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
             print
             if factory.conn.value is not None:
                 factory.conn.value.send_block(block=block)
             else:
                 print 'No bitcoind connection! Erp!'
         target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target']
         if hash_ > target:
             print 'Worker submitted share with hash (%x) > target (%x)' % (hash_, target)
             return False
         share = p2pool.Share.from_block(block)
         my_shares.add(share.hash)
         print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
         good = share.previous_hash == current_work.value['best_share_hash']
         # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
         p2p_shares([share])
         # eg. good = share.hash == current_work.value['best_share_hash'] here
         return good
     except:
         log.err(None, 'Error processing data received from worker:')
         return False
Ejemplo n.º 8
0
Archivo: main.py Proyecto: gyver/p2pool
 def set_real_work2():
     best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset'])
     
     t = dict(current_work.value)
     t['best_share_hash'] = best
     current_work.set(t)
     
     t = time.time()
     for peer2, share_hash in desired:
         if share_hash not in tracker.tails: # was received in the time tracker.think was running
             continue
         last_request_time, count = requested.get(share_hash, (None, 0))
         if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
             continue
         potential_peers = set()
         for head in tracker.tails[share_hash]:
             potential_peers.update(peer_heads.get(head, set()))
         potential_peers = [peer for peer in potential_peers if peer.connected2]
         if count == 0 and peer2 is not None and peer2.connected2:
             peer = peer2
         else:
             peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2
             if peer is None:
                 continue
         
         print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr)
         peer.send_getshares(
             hashes=[share_hash],
             parents=2000,
             stops=list(set(tracker.heads) | set(
                 tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads
             ))[:100],
         )
         requested[share_hash] = t, count + 1
Ejemplo n.º 9
0
Archivo: main.py Proyecto: gyver/p2pool
 def p2p_shares(shares, peer=None):
     if len(shares) > 5:
         print 'Processing %i shares...' % (len(shares),)
     
     some_new = False
     for share in shares:
         if share.hash in tracker.shares:
             #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),)
             continue
         some_new = True
         
         #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None)
         
         tracker.add(share)
         #for peer2, share_hash in desired:
         #    print 'Requesting parent share %x' % (share_hash,)
         #    peer2.send_getshares(hashes=[share_hash], parents=2000)
         
         if share.bitcoin_hash <= share.header['target']:
             print
             print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,)
             print
             if factory.conn.value is not None:
                 factory.conn.value.send_block(block=share.as_block(tracker, args.net))
             else:
                 print 'No bitcoind connection! Erp!'
     
     if shares and peer is not None:
         peer_heads.setdefault(shares[0].hash, set()).add(peer)
     
     if some_new:
         set_real_work2()
     
     if len(shares) > 5:
         print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH)
Ejemplo n.º 10
0
 def download_shares():
     while True:
         desired = yield self.node.desired_var.get_when_satisfies(lambda val: len(val) != 0)
         peer_addr, share_hash = random.choice(desired)
         
         if len(self.peers) == 0:
             yield deferral.sleep(1)
             continue
         peer = random.choice(self.peers.values())
         
         print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
         try:
             shares = yield peer.get_shares(
                 hashes=[share_hash],
                 parents=random.randrange(500), # randomize parents so that we eventually get past a too large block of shares
                 stops=list(set(self.node.tracker.heads) | set(
                     self.node.tracker.get_nth_parent_hash(head, min(max(0, self.node.tracker.get_height_and_last(head)[0] - 1), 10)) for head in self.node.tracker.heads
                 ))[:100],
             )
         except defer.TimeoutError:
             print 'Share request timed out!'
             continue
         except:
             log.err(None, 'in download_shares:')
             continue
         
         if not shares:
             yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has
             continue
         self.handle_shares([(share, []) for share in shares], peer)
Ejemplo n.º 11
0
 def rpc_getwork(self, request, data=None):
     request.setHeader('X-Long-Polling', '/long-polling')
     
     if data is not None:
         defer.returnValue(self.response_callback(data))
     
     request_id = get_id(request)
     memory = get_memory(request)
     
     if request_id not in last_cache_invalidation:
         last_cache_invalidation[request_id] = variable.Variable((None, None))
     
     yield wait_hold(request_id)
     work = self.work.value
     thought_work = last_cache_invalidation[request_id].value
     
     if thought_work[-1] is not None and work != thought_work[-1] and any(x is None or work['previous_block'] == x['previous_block'] for x in thought_work[-memory or len(thought_work):]):
         # clients won't believe the update
         work = work.copy()
         work['previous_block'] = random.randrange(2**256)
         if p2pool.DEBUG:
             print 'GETWORK FAKED'
         set_hold(request_id, .01) # guarantee ordering
     res = self.compute(work, get_payout_script(request, self.net))
     
     last_cache_invalidation[request_id].set((thought_work[-1], work))
     if p2pool.DEBUG:
         print 'GETWORK END %s' % (p2pool_data.format_hash(work['best_share_hash']),)
     
     if request.getHeader('X-All-Targets') is None and res.target2 > 2**256//2**32 - 1:
         res = res.update(target2=2**256//2**32 - 1)
     
     defer.returnValue(res.getwork(identifier=str(work['best_share_hash'])))
Ejemplo n.º 12
0
        def _(share):
            if not (share.pow_hash <= share.header['bits'].target):
                return

            block = share.as_block(self.tracker, self.known_txs_var.value)
            if block is None:
                print >> sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s decred: %s%064x' % (
                    p2pool_data.format_hash(
                        share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                    share.header_hash)
                return
            helper.submit_block(block, True, self.factory, self.dcrd,
                                self.dcrd_work, self.net)
            print
            print 'GOT BLOCK FROM PEER! Passing to dcrd! %s decred: %s%064x' % (
                p2pool_data.format_hash(share.hash),
                self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
            print
Ejemplo n.º 13
0
 def _(share):
     if share.pow_hash <= share.header['target']:
         if factory.conn.value is not None:
             factory.conn.value.send_block(block=share.as_block(tracker))
         else:
             print 'No bitcoind connection! Erp!'
         print
         print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool_data.format_hash(share.hash), share.header_hash,)
         print
Ejemplo n.º 14
0
        def _(share):
            if not (share.pow_hash <= share.header["bits"].target):
                return

            block = share.as_block(self.tracker, self.known_txs_var.value)
            if block is None:
                print >> sys.stderr, "GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x" % (
                    p2pool_data.format_hash(share.hash),
                    self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                    share.header_hash,
                )
                return
            helper.submit_block(block, True, self.factory, self.bitcoind, self.bitcoind_work, self.net)
            print
            print "GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x" % (
                p2pool_data.format_hash(share.hash),
                self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                share.header_hash,
            )
            print
Ejemplo n.º 15
0
 def _(share):
     if share.pow_hash <= share.header['target']:
         if factory.conn.value is not None:
             factory.conn.value.send_block(
                 block=share.as_block(tracker))
         else:
             print 'No bitcoind connection! Erp!'
         print
         print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (
             p2pool_data.format_hash(share.hash),
             share.header_hash,
         )
         print
Ejemplo n.º 16
0
    def handle_shares(self, shares, peer):
        print 'init handle_shares \n'
        if len(shares) > 5:
            print 'Processing %i shares from %s...' % (
                len(shares), '%s:%i' % peer.addr if peer is not None else None)

        new_count = 0
        all_new_txs = {}
        for share, new_txs in shares:
            if new_txs is not None:
                all_new_txs.update(
                    (bitcoin_data.hash256(bitcoin_data.tx_type.pack(new_tx)),
                     new_tx) for new_tx in new_txs)

            if share.hash in self.node.tracker.items:
                print 'Got duplicate share, ignoring. Hash: %s' % (
                    p2pool_data.format_hash(share.hash), )
                continue

            new_count += 1

            #从其他节点接收share
            print 'Received share %s from %r' % (p2pool_data.format_hash(
                share.hash), share.peer_addr)
            print 'tracker.add by Received share!'
            self.node.tracker.add(share)

        new_known_txs = dict(self.node.known_txs_var.value)
        new_known_txs.update(all_new_txs)
        self.node.known_txs_var.set(new_known_txs)

        if new_count:
            print 'set_best_share : by new_count!!'
            self.node.set_best_share()

        if len(shares) > 5:
            print '... done processing %i shares. New: %i Have: %i/~%i' % (
                len(shares), new_count, len(
                    self.node.tracker.items), 2 * self.node.net.CHAIN_LENGTH)
Ejemplo n.º 17
0
        def set_real_work2():
            best, desired = tracker.think(
                ht, pre_current_work.value['previous_block'],
                time.time() - pre_current_work2.value['clock_offset'])

            current_work2.set(pre_current_work2.value)
            t = dict(pre_current_work.value)
            t['best_share_hash'] = best
            t['aux_work'] = pre_merged_work.value
            current_work.set(t)

            t = time.time()
            for peer2, share_hash in desired:
                if share_hash not in tracker.tails:  # was received in the time tracker.think was running
                    continue
                last_request_time, count = requested.get(share_hash, (None, 0))
                if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
                    continue
                potential_peers = set()
                for head in tracker.tails[share_hash]:
                    potential_peers.update(peer_heads.get(head, set()))
                potential_peers = [
                    peer for peer in potential_peers if peer.connected2
                ]
                if count == 0 and peer2 is not None and peer2.connected2:
                    peer = peer2
                else:
                    peer = random.choice(
                        potential_peers
                    ) if potential_peers and random.random() > .2 else peer2
                    if peer is None:
                        continue

                print 'Requesting parent share %s from %s' % (
                    p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
                peer.send_getshares(
                    hashes=[share_hash],
                    parents=2000,
                    stops=list(
                        set(tracker.heads) | set(
                            tracker.get_nth_parent_hash(
                                head,
                                min(
                                    max(
                                        0,
                                        tracker.get_height_and_last(head)[0] -
                                        1), 10))
                            for head in tracker.heads))[:100],
                )
                requested[share_hash] = t, count + 1
Ejemplo n.º 18
0
        def _(share):
            if not (share.pow_hash <= share.header['bits'].target):
                return
            
            block = share.as_block(self.tracker, self.known_txs_var.value)

            if block is None:
                print >>sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s dash: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
                return
            helper.submit_block(block, True, self.factory, self.dashd, self.dashd_work, self.net)
            print
            print 'GOT BLOCK FROM PEER! Passing to dashd! %s dash: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
            print
            self.factory.new_block.happened(share.hash)
Ejemplo n.º 19
0
        def _(share):
            if not (share.pow_hash <= share.header['bits'].target):
                return

            if self.quarkd_work.value['masternode_payments']:
                block = share.as_block(self.tracker, self.known_txs_var.value)
            else:
                block = share.as_block_old(self.tracker,
                                           self.known_txs_var.value)

            if block is None:
                print >> sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s quark: %s%064x' % (
                    p2pool_data.format_hash(
                        share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                    share.header_hash)
                return
            helper.submit_block(block, True, self.factory, self.quarkd,
                                self.quarkd_work, self.net)
            print
            print 'GOT BLOCK FROM PEER! Passing to quarkd! %s quark: %s%064x' % (
                p2pool_data.format_hash(share.hash),
                self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
            print
Ejemplo n.º 20
0
    def getwork(self, request, long_poll=False):
        request_id = get_id(request)
        memory = get_memory(request)

        id = random.randrange(10000)
        if p2pool.DEBUG:
            print 'POLL %i START long_poll=%r user_agent=%r x-work-identifier=%r user=%r' % (
                id, long_poll, request.getHeader('User-Agent'),
                request.getHeader('X-Work-Identifier'), get_username(request))

        if request_id not in self.worker_views:
            self.worker_views[request_id] = variable.Variable(
                (0, (None,
                     None)))  # times, (previous_block/-1, previous_block/-2)

        thought_times, thought_work = self.worker_views[request_id].value

        if long_poll and thought_times == self.new_work_event.times:
            if p2pool.DEBUG:
                print 'POLL %i WAITING user=%r' % (id, get_username(request))
            yield defer.DeferredList([
                self.new_work_event.get_deferred(),
                self.worker_views[request_id].changed.get_deferred()
            ],
                                     fireOnOneCallback=True)

        yield self.holds.wait_hold(request_id)

        res, identifier = self.compute(request)

        if thought_work[
                -1] is not None and self.new_work_event.times != thought_times and any(
                    x is None or res.previous_block == x
                    for x in thought_work[-memory or len(thought_work):]):
            # clients won't believe the update
            res = res.update(previous_block=random.randrange(2**256))
            if p2pool.DEBUG:
                print 'POLL %i FAKED user=%r' % (id, get_username(request))
            self.holds.set_hold(request_id, .01)

        self.worker_views[request_id].set(
            (self.new_work_event.times if long_poll else thought_times,
             (thought_work[-1], res.previous_block)))
        if p2pool.DEBUG:
            print 'POLL %i END %s user=%r' % (
                id, p2pool_data.format_hash(identifier), get_username(request)
            )  # XXX identifier is hack

        defer.returnValue(res.getwork(identifier=str(identifier)))
Ejemplo n.º 21
0
        def _(share):
            if not (share.pow_hash <= share.header['bits'].target):
                return
            
            if self.bitcoind_work.value['masternode_payments']:
                block = share.as_block(self.tracker, self.known_txs_var.value, self.bitcoind_work.value['votes'])
            else:
                block = share.as_block_old(self.tracker, self.known_txs_var.value)

            if block is None:
                print >>sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
                return
            helper.submit_block(block, True, self.factory, self.bitcoind, self.bitcoind_work, self.net)
            print
            print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
            print
Ejemplo n.º 22
0
 def _(share):
     if not (share.pow_hash <= share.header['bits'].target):
         return
     
     block = share.as_block(self.tracker, self.known_txs_var.value)
     if block is None:
         print >>sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
         return
     helper.submit_block(block, True, self.factory, self.bitcoind, self.bitcoind_work, self.net)
     print
     print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
     print
     #p2pmining
     p2pm_data = p2pm_database.P2PminingData()
     p2pm_data.record_pool_rewards('%064x' % share.header_hash)
     p2pm_data.close()
Ejemplo n.º 23
0
Archivo: main.py Proyecto: gyver/p2pool
 def p2p_share_hashes(share_hashes, peer):
     t = time.time()
     get_hashes = []
     for share_hash in share_hashes:
         if share_hash in tracker.shares:
             continue
         last_request_time, count = requested.get(share_hash, (None, 0))
         if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
             continue
         print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),)
         get_hashes.append(share_hash)
         requested[share_hash] = t, count + 1
     
     if share_hashes and peer is not None:
         peer_heads.setdefault(share_hashes[0], set()).add(peer)
     if get_hashes:
         peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
Ejemplo n.º 24
0
        def p2p_share_hashes(share_hashes, peer):
            t = time.time()
            get_hashes = []
            for share_hash in share_hashes:
                if share_hash in tracker.shares:
                    continue
                last_request_time, count = requested.get(share_hash, (None, 0))
                if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
                    continue
                print 'Got share hash, requesting! Hash: %s' % (
                    p2pool_data.format_hash(share_hash), )
                get_hashes.append(share_hash)
                requested[share_hash] = t, count + 1

            if share_hashes and peer is not None:
                peer_heads.setdefault(share_hashes[0], set()).add(peer)
            if get_hashes:
                peer.send_getshares(hashes=get_hashes, parents=0, stops=[])
Ejemplo n.º 25
0
        def _(share):
            if share.pow_hash <= share.header['bits'].target:
                submit_block(share.as_block(tracker), ignore_failure=True)
                print
                print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (
                    p2pool_data.format_hash(share.hash),
                    net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
                print

                def spread():
                    if (get_height_rel_highest(share.header['previous_block'])
                            > -5 or bitcoind_work.value['previous_block'] in [
                                share.header['previous_block'],
                                share.header_hash
                            ]):
                        broadcast_share(share.hash)

                spread()
                reactor.callLater(
                    5, spread)  # so get_height_rel_highest can update
Ejemplo n.º 26
0
        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[
                -4:] if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = dash_data.tx_type.unpack(
                new_packed_gentx
            ) if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else gentx

            header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(
                dash_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                dash_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header,
                             txs=[new_gentx] + other_transactions), False,
                        self.node.factory, self.node.dashd,
                        self.node.dashd_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to dashd! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
                        # New block found
                        self.node.factory.new_block.happened(header_hash)
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == dash_data.check_merkle_link(
                dash_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry(
                            'Error submitting merged block: (will retry)', 10,
                            10)(aux_work['merged_proxy'].rpc_getauxblock)(
                                pack.IntType(256, 'big').pack(
                                    aux_work['hash']).encode('hex'),
                                dash_data.aux_pow_type.pack(
                                    dict(
                                        merkle_tx=dict(
                                            tx=new_gentx,
                                            block_hash=header_hash,
                                            merkle_link=merkle_link,
                                        ),
                                        merkle_link=dash_data.
                                        calculate_merkle_link(hashes, index),
                                        parent_block_header=header,
                                    )).encode('hex'),
                            )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >> sys.stderr, 'Merged block submittal result: %s Expected: %s' % (
                                    result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (
                                    result, )

                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(
                    8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    dash_data.target_to_average_attempts(share.target),
                    not on_time, share.hash)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    user, )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s submitted share more than once!' % (
                    user, )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    dash_data.target_to_average_attempts(target), not on_time,
                    user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     dash_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=dash_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user,
                         share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(
                    dict(work=dash_data.target_to_average_attempts(target),
                         pubkey_hash=pubkey_hash))

            return on_time
Ejemplo n.º 27
0
 def got_response(header, request):
     try:
         user = worker_interface.get_username(request)
         # match up with transactions
         xxx = merkle_root_to_transactions.get(header['merkle_root'], None)
         if xxx is None:
             print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
             return False
         share_info, transactions, getwork_time = xxx
         
         hash_ = bitcoin_data.block_header_type.hash256(header)
         
         pow_hash = net.BITCOIN_POW_FUNC(header)
         
         if pow_hash <= header['target'] or p2pool.DEBUG:
             if factory.conn.value is not None:
                 factory.conn.value.send_block(block=dict(header=header, txs=transactions))
             else:
                 print 'No bitcoind connection! Erp!'
             if pow_hash <= header['target']:
                 print
                 print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,)
                 print
         
         if current_work.value['aux_work'] is not None and pow_hash <= current_work.value['aux_work']['target']:
             try:
                 aux_pow = dict(
                     merkle_tx=dict(
                         tx=transactions[0],
                         block_hash=hash_,
                         merkle_branch=[x['hash'] for x in p2pool_data.calculate_merkle_branch(transactions, 0)],
                         index=0,
                     ),
                     merkle_branch=[],
                     index=0,
                     parent_block_header=header,
                 )
                 
                 a, b = transactions[0]['tx_ins'][0]['script'][-32-8:-8].encode('hex'), bitcoin_data.aux_pow_type.pack(aux_pow).encode('hex')
                 #print a, b
                 merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,))
                 def _(res):
                     print "MERGED RESULT:", res
                 merged.rpc_getauxblock(a, b).addBoth(_)
             except:
                 log.err(None, 'Error while processing merged mining POW:')
         
         target = share_info['target']
         if pow_hash > target:
             print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (pow_hash, target)
             return False
         share = p2pool_data.Share(net, header, share_info, other_txs=transactions[1:])
         my_shares.add(share.hash)
         if share.previous_hash != current_work.value['best_share_hash']:
             doa_shares.add(share.hash)
         print 'GOT SHARE! %s %s prev %s age %.2fs' % (user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '')
         good = share.previous_hash == current_work.value['best_share_hash']
         # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
         p2p_shares([share])
         # eg. good = share.hash == current_work.value['best_share_hash'] here
         return good
     except:
         log.err(None, 'Error processing data received from worker:')
         return False
Ejemplo n.º 28
0
        def got_response(header, request):
            header_hash = bitcoin_data.hash256(
                bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(dict(header=header,
                                             txs=transactions), False,
                                        self.node.factory, self.node.bitcoind,
                                        self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(request)
            assert header['previous_block'] == ba.previous_block
            assert header['merkle_root'] == ba.merkle_root
            assert header['bits'] == ba.bits

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry(
                            'Error submitting merged block: (will retry)', 10,
                            10)(aux_work['merged_proxy'].rpc_getauxblock)(
                                pack.IntType(256, 'big').pack(
                                    aux_work['hash']).encode('hex'),
                                bitcoin_data.aux_pow_type.pack(
                                    dict(
                                        merkle_tx=dict(
                                            tx=transactions[0],
                                            block_hash=header_hash,
                                            merkle_link=merkle_link,
                                        ),
                                        merkle_link=bitcoin_data.
                                        calculate_merkle_link(hashes, index),
                                        parent_block_header=header,
                                    )).encode('hex'),
                            )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >> sys.stderr, 'Merged block submittal result: %s Expected: %s' % (
                                    result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (
                                    result, )

                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                share = get_share(header, transactions)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    request.getUser(),
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                if not p2pool.DEBUG:
                    self.node.tracker.verified.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    bitcoin_data.target_to_average_attempts(share.target),
                    not on_time)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    request.getUser(), )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s @ %s submitted share more than once!' % (
                    request.getUser(), request.getClientIP())
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    bitcoin_data.target_to_average_attempts(target),
                    not on_time, user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user))

            return on_time
Ejemplo n.º 29
0
 def _(share):
     if share.pow_hash <= share.header['bits'].target:
         submit_block(share.as_block(tracker), ignore_failure=True)
         print
         print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
         print
         def spread():
             if (get_height_rel_highest(share.header['previous_block']) > -5 or
                 bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]):
                 broadcast_share(share.hash)
         spread()
         reactor.callLater(5, spread) # so get_height_rel_highest can update
Ejemplo n.º 30
0
        def got_response(header, user, coinbase_nonce):
            t0 = time.time()
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[
                -4:] if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = bitcoin_data.tx_type.unpack(
                new_packed_gentx
            ) if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else gentx
            if bitcoin_data.is_segwit_tx(
                    gentx
            ):  # reintroduce witness data to the gentx produced by stratum miners
                new_gentx['marker'] = 0
                new_gentx['flag'] = gentx['flag']
                new_gentx['witness'] = gentx['witness']

            header_hash = bitcoin_data.hash256(
                bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header,
                             txs=[new_gentx] + other_transactions), False,
                        self.node.factory, self.node.bitcoind,
                        self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry(
                            'Error submitting merged block: (will retry)', 10,
                            10)(aux_work['merged_proxy'].rpc_getauxblock)(
                                pack.IntType(256, 'big').pack(
                                    aux_work['hash']).encode('hex'),
                                bitcoin_data.aux_pow_type.pack(
                                    dict(
                                        merkle_tx=dict(
                                            tx=new_gentx,
                                            block_hash=header_hash,
                                            merkle_link=merkle_link,
                                        ),
                                        merkle_link=bitcoin_data.
                                        calculate_merkle_link(hashes, index),
                                        parent_block_header=header,
                                    )).encode('hex'),
                            )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >> sys.stderr, 'Merged block submittal result: %s Expected: %s' % (
                                    result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (
                                    result, )

                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(
                    8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )

                # node.py will sometimes forget transactions if bitcoind's work has changed since this stratum
                # job was assigned. Fortunately, the tx_map is still in in our scope from this job, so we can use that
                # to refill it if needed.

                known_txs = self.node.known_txs_var.value
                missing = {
                    hsh: val
                    for (hsh, val) in tx_map.iteritems()
                    if not hsh in known_txs
                }
                if missing:
                    print "Warning: %i transactions were erroneously evicted from known_txs_var. Refilling now." % len(
                        missing)
                    self.node.known_txs_var.add(missing)

                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    bitcoin_data.target_to_average_attempts(share.target),
                    not on_time, share.hash)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    user, )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s submitted share more than once!' % (
                    user, )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    bitcoin_data.target_to_average_attempts(target),
                    not on_time, user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user,
                         share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target),
                         pubkey_hash=pubkey_hash))
            t1 = time.time()
            if p2pool.BENCH and (t1 - t1) > .01:
                print "%8.3f ms for work.py:got_response()" % (
                    (t1 - t0) * 1000.)

            return on_time
Ejemplo n.º 31
0
        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = (
                packed_gentx[: -self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[-4:]
                if coinbase_nonce != "\0" * self.COINBASE_NONCE_LENGTH
                else packed_gentx
            )
            new_gentx = (
                bitcoin_data.tx_type.unpack(new_packed_gentx)
                if coinbase_nonce != "\0" * self.COINBASE_NONCE_LENGTH
                else gentx
            )

            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header["bits"].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header, txs=[new_gentx] + other_transactions),
                        False,
                        self.node.factory,
                        self.node.bitcoind,
                        self.node.bitcoind_work,
                        self.node.net,
                    )
                    if pow_hash <= header["bits"].target:
                        print
                        print "GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x" % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash,
                        )
                        print
            except:
                log.err(None, "Error while processing potential block:")

            user, _, _, _ = self.get_user_details(user)
            assert header["previous_block"] == ba["previous_block"]
            assert header["merkle_root"] == bitcoin_data.check_merkle_link(
                bitcoin_data.hash256(new_packed_gentx), merkle_link
            )
            assert header["bits"] == ba["bits"]
            # Check momentum using midhash (who knows why it is called that) and birthday values
            midhash = hashlib.sha256(hashlib.sha256(bitcoin_data.block_header_type.pack(header)[:80]).digest()).digest()
            # print 'MIDHASH: {0}'.format(midhash.encode('hex'))
            # print 'A: {0}'.format(header['birthdayA'])
            # print 'B: {0}'.format(header['birthdayB'])
            momentumc = rewardcoin_momentum.checkMomentum(midhash, header["birthdayA"], header["birthdayB"])
            # print momentumc
            if momentumc == False:
                print "Invalid Momentum from Client!"
                return False

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work["target"] or p2pool.DEBUG:
                        df = deferral.retry("Error submitting merged block: (will retry)", 10, 10)(
                            aux_work["merged_proxy"].rpc_getauxblock
                        )(
                            pack.IntType(256, "big").pack(aux_work["hash"]).encode("hex"),
                            bitcoin_data.aux_pow_type.pack(
                                dict(
                                    merkle_tx=dict(tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link),
                                    merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                    parent_block_header=header,
                                )
                            ).encode("hex"),
                        )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work["target"]):
                                print >> sys.stderr, "Merged block submittal result: %s Expected: %s" % (
                                    result,
                                    pow_hash <= aux_work["target"],
                                )
                            else:
                                print "Merged block submittal result: %s" % (result,)

                        @df.addErrback
                        def _(err):
                            log.err(err, "Error submitting merged block:")

                except:
                    log.err(None, "Error while processing merged mining POW:")

            if pow_hash <= share_info["bits"].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print "GOT SHARE! %s %s prev %s age %.2fs%s" % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    " DEAD ON ARRIVAL" if not on_time else "",
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header["bits"].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, "Error forwarding block solution:")

                self.share_received.happened(
                    bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash
                )

            if pow_hash > target:
                print "Worker %s submitted share with hash > target:" % (user,)
                print "    Hash:   %56x" % (pow_hash,)
                print "    Target: %56x" % (target,)
            elif header_hash in received_header_hashes:
                print >> sys.stderr, "Worker %s submitted share more than once!" % (user,)
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(
                        work=bitcoin_data.target_to_average_attempts(target),
                        dead=not on_time,
                        user=user,
                        share_target=share_info["bits"].target,
                    )
                )
                self.local_addr_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash)
                )

            return on_time
Ejemplo n.º 32
0
        def got_response(header, request):
            try:
                user = worker_interface.get_username(request)
                # match up with transactions
                xxx = merkle_root_to_transactions.get(header['merkle_root'],
                                                      None)
                if xxx is None:
                    print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
                    return False
                share_info, transactions, getwork_time = xxx

                hash_ = bitcoin_data.block_header_type.hash256(header)

                pow_hash = net.BITCOIN_POW_FUNC(header)

                if pow_hash <= header['target'] or p2pool.DEBUG:
                    if factory.conn.value is not None:
                        factory.conn.value.send_block(
                            block=dict(header=header, txs=transactions))
                    else:
                        print 'No bitcoind connection! Erp!'
                    if pow_hash <= header['target']:
                        print
                        print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (
                            hash_, )
                        print

                if current_work.value[
                        'aux_work'] is not None and pow_hash <= current_work.value[
                            'aux_work']['target']:
                    try:
                        aux_pow = dict(
                            merkle_tx=dict(
                                tx=transactions[0],
                                block_hash=hash_,
                                merkle_branch=[
                                    x['hash'] for x in p2pool_data.
                                    calculate_merkle_branch(transactions, 0)
                                ],
                                index=0,
                            ),
                            merkle_branch=[],
                            index=0,
                            parent_block_header=header,
                        )

                        a, b = transactions[0]['tx_ins'][0]['script'][
                            -32 - 8:-8].encode(
                                'hex'), bitcoin_data.aux_pow_type.pack(
                                    aux_pow).encode('hex')
                        #print a, b
                        merged = jsonrpc.Proxy(args.merged_url,
                                               (args.merged_userpass, ))

                        def _(res):
                            print "MERGED RESULT:", res

                        merged.rpc_getauxblock(a, b).addBoth(_)
                    except:
                        log.err(None,
                                'Error while processing merged mining POW:')

                target = share_info['target']
                if pow_hash > target:
                    print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (
                        pow_hash, target)
                    return False
                share = p2pool_data.Share(net,
                                          header,
                                          share_info,
                                          other_txs=transactions[1:])
                my_shares.add(share.hash)
                if share.previous_hash != current_work.value['best_share_hash']:
                    doa_shares.add(share.hash)
                print 'GOT SHARE! %s %s prev %s age %.2fs' % (
                    user, p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash), time.time() -
                    getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash
                                     != current_work.value['best_share_hash']
                                     else '')
                good = share.previous_hash == current_work.value[
                    'best_share_hash']
                # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
                p2p_shares([share])
                # eg. good = share.hash == current_work.value['best_share_hash'] here
                return good
            except:
                log.err(None, 'Error processing data received from worker:')
                return False
Ejemplo n.º 33
0
        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH - 4] + coinbase_nonce + packed_gentx[
                -4:] if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = decred_data.tx_type.unpack(
                new_packed_gentx
            ) if coinbase_nonce != '\0' * self.COINBASE_NONCE_LENGTH else gentx

            header_hash = decred_data.hash256(
                decred_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(
                decred_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header,
                             txs=[new_gentx] + other_transactions), False,
                        self.node.factory, self.node.dcrd, self.node.dcrd_work,
                        self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to dcrd! %s%064x' % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')

            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == decred_data.check_merkle_link(
                decred_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']

            on_time = self.new_work_event.times == lp_count

            if pow_hash <= share_info[
                    'bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(
                    8 * self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)

                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header['bits'].target or
                            p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')

                self.share_received.happened(
                    decred_data.target_to_average_attempts(share.target),
                    not on_time, share.hash)

            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (
                    user, )
                print '    Hash:   %56x' % (pow_hash, )
                print '    Target: %56x' % (target, )
            elif header_hash in received_header_hashes:
                print >> sys.stderr, 'Worker %s submitted share more than once!' % (
                    user, )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(
                    decred_data.target_to_average_attempts(target),
                    not on_time, user)
                self.recent_shares_ts_work.append(
                    (time.time(),
                     decred_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=decred_data.target_to_average_attempts(target),
                         dead=not on_time,
                         user=user,
                         share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(
                    dict(work=decred_data.target_to_average_attempts(target),
                         pubkey_hash=pubkey_hash))

            return on_time
Ejemplo n.º 34
0
        def got_response(header, user, coinbase_nonce):
            assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
            new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
            new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
            
           
            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header['bits'].target or p2pool.DEBUG:
                    helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
                    if pow_hash <= header['bits'].target:
                        print
                        print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
                        print
            except:
                log.err(None, 'Error while processing potential block:')
            
            user, _, _, _ = self.get_user_details(user)
            assert header['previous_block'] == ba['previous_block']
            assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
            assert header['bits'] == ba['bits']
            
            on_time = self.new_work_event.times == lp_count
            
            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work['target'] or p2pool.DEBUG:
                        df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
                            pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
                           # neoscrypt uses little endian only
                           # pack.IntType(256, 'little').pack(aux_work['hash']).encode('hex'),
                            bitcoin_data.aux_pow_type.pack(dict(
                                merkle_tx=dict(
                                    tx=new_gentx,
                                    block_hash=header_hash,
                                    merkle_link=merkle_link,
                                ),
                                merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                parent_block_header=header,
                            )).encode('hex'),
                        )
                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work['target']):
                                print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
                            else:
                                print 'Merged block submittal result: %s' % (result,)
                        @df.addErrback
                        def _(err):
                            log.err(err, 'Error submitting merged block:')
                except:
                    log.err(None, 'Error while processing merged mining POW:')
            
            if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
                last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
                share = get_share(header, last_txout_nonce)
                
                print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
                    user,
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    ' DEAD ON ARRIVAL' if not on_time else '',
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)
                
                self.node.tracker.add(share)
                self.node.set_best_share()
                
                try:
                    if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, 'Error forwarding block solution:')
                
                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash)
	    if p2pool.DEBUG:
		print 'Hash:   %X' % (pow_hash)
		print 'Target: %X' % (target)
            if pow_hash > target:
                print 'Worker %s submitted share with hash > target:' % (user,)
            elif header_hash in received_header_hashes:
                print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
            else:
                received_header_hashes.add(header_hash)
                
                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
                self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))
            
            return on_time
Ejemplo n.º 35
0
        def got_response(header, request):
            header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
            pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
            try:
                if pow_hash <= header["bits"].target or p2pool.DEBUG:
                    helper.submit_block(
                        dict(header=header, txs=transactions),
                        False,
                        self.node.factory,
                        self.node.bitcoind,
                        self.node.bitcoind_work,
                        self.node.net,
                    )
                    if pow_hash <= header["bits"].target:
                        print
                        print "GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x" % (
                            self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                            header_hash,
                        )
                        print
            except:
                log.err(None, "Error while processing potential block:")

            user, _, _, _ = self.get_user_details(request)
            assert header["previous_block"] == ba.previous_block
            assert header["merkle_root"] == ba.merkle_root
            assert header["bits"] == ba.bits

            on_time = self.new_work_event.times == lp_count

            for aux_work, index, hashes in mm_later:
                try:
                    if pow_hash <= aux_work["target"] or p2pool.DEBUG:
                        df = deferral.retry("Error submitting merged block: (will retry)", 10, 10)(
                            aux_work["merged_proxy"].rpc_getauxblock
                        )(
                            pack.IntType(256, "big").pack(aux_work["hash"]).encode("hex"),
                            bitcoin_data.aux_pow_type.pack(
                                dict(
                                    merkle_tx=dict(tx=transactions[0], block_hash=header_hash, merkle_link=merkle_link),
                                    merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
                                    parent_block_header=header,
                                )
                            ).encode("hex"),
                        )

                        @df.addCallback
                        def _(result, aux_work=aux_work):
                            if result != (pow_hash <= aux_work["target"]):
                                print >> sys.stderr, "Merged block submittal result: %s Expected: %s" % (
                                    result,
                                    pow_hash <= aux_work["target"],
                                )
                            else:
                                print "Merged block submittal result: %s" % (result,)

                        @df.addErrback
                        def _(err):
                            log.err(err, "Error submitting merged block:")

                except:
                    log.err(None, "Error while processing merged mining POW:")

            if pow_hash <= share_info["bits"].target and header_hash not in received_header_hashes:
                share = get_share(header, transactions)

                print "GOT SHARE! %s %s prev %s age %.2fs%s" % (
                    request.getUser(),
                    p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash),
                    time.time() - getwork_time,
                    " DEAD ON ARRIVAL" if not on_time else "",
                )
                self.my_share_hashes.add(share.hash)
                if not on_time:
                    self.my_doa_share_hashes.add(share.hash)

                self.node.tracker.add(share)
                if not p2pool.DEBUG:
                    self.node.tracker.verified.add(share)
                self.node.set_best_share()

                try:
                    if (pow_hash <= header["bits"].target or p2pool.DEBUG) and self.node.p2p_node is not None:
                        self.node.p2p_node.broadcast_share(share.hash)
                except:
                    log.err(None, "Error forwarding block solution:")

                self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time)

            if pow_hash > target:
                print "Worker %s submitted share with hash > target:" % (request.getUser(),)
                print "    Hash:   %56x" % (pow_hash,)
                print "    Target: %56x" % (target,)
            elif header_hash in received_header_hashes:
                print >> sys.stderr, "Worker %s @ %s submitted share more than once!" % (
                    request.getUser(),
                    request.getClientIP(),
                )
            else:
                received_header_hashes.add(header_hash)

                self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
                self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
                while len(self.recent_shares_ts_work) > 50:
                    self.recent_shares_ts_work.pop(0)
                self.local_rate_monitor.add_datum(
                    dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user)
                )

            return on_time