コード例 #1
0
    def rpc_getblocktemplate(self, param):
        if param['mode'] == 'template':
            pass
        elif param['mode'] == 'submit':
            result = param['data']
            block = dash_data.block_type.unpack(result.decode('hex'))
            if sum(tx_out['value']
                   for tx_out in block['txs'][0]['tx_outs']) != sum(
                       tx['tx_outs'][0]['value']
                       for tx in block['txs'][1:]) + 5000000000:
                print 'invalid fee'
            if block['header']['previous_block'] != self.blocks[-1]:
                return False
            if dash_data.hash256(
                    result.decode('hex')) > block['header']['bits'].target:
                return False
            header_hash = dash_data.hash256(
                dash_data.block_header_type.pack(block['header']))
            self.blocks.append(header_hash)
            self.headers[header_hash] = block['header']
            reactor.callLater(0, self.new_block.happened)
            return True
        else:
            raise jsonrpc.Error_for_code(-1)('invalid request')

        txs = []
        for i in xrange(100):
            fee = i
            txs.append(
                dict(
                    data=dash_data.tx_type.pack(
                        dict(version=1,
                             type=0,
                             tx_ins=[],
                             tx_outs=[dict(value=fee, script='hello!' * 100)],
                             lock_time=0,
                             payload=None)).encode('hex'),
                    fee=fee,
                ))
        return {
            "version": 3,
            "previousblockhash": '%064x' % (self.blocks[-1], ),
            "transactions": txs,
            "coinbaseaux": {
                "flags": "062f503253482f"
            },
            "coinbasevalue": 5000000000 + sum(tx['fee'] for tx in txs),
            "target":
            "00000000000044b9f20000000000000000000000000000000000000000000000",
            "mintime": 1351655621,
            "mutable": ["time", "transactions", "prevblock"],
            "noncerange": "00000000ffffffff",
            "sigoplimit": 20000,
            "sizelimit": 1000000,
            "curtime": 1351659940,
            "bits": "21008000",
            "height": len(self.blocks),
        }
コード例 #2
0
ファイル: test_node.py プロジェクト: DiCE1904/p2pool-drk
    def rpc_getblocktemplate(self, param):
        if param["mode"] == "template":
            pass
        elif param["mode"] == "submit":
            result = param["data"]
            block = dash_data.block_type.unpack(result.decode("hex"))
            if (
                sum(tx_out["value"] for tx_out in block["txs"][0]["tx_outs"])
                != sum(tx["tx_outs"][0]["value"] for tx in block["txs"][1:]) + 5000000000
            ):
                print "invalid fee"
            if block["header"]["previous_block"] != self.blocks[-1]:
                return False
            if dash_data.hash256(result.decode("hex")) > block["header"]["bits"].target:
                return False
            header_hash = dash_data.hash256(dash_data.block_header_type.pack(block["header"]))
            self.blocks.append(header_hash)
            self.headers[header_hash] = block["header"]
            reactor.callLater(0, self.new_block.happened)
            return True
        else:
            raise jsonrpc.Error_for_code(-1)("invalid request")

        txs = []
        for i in xrange(100):
            fee = i
            txs.append(
                dict(
                    data=dash_data.tx_type.pack(
                        dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script="hello!" * 100)], lock_time=0)
                    ).encode("hex"),
                    fee=fee,
                )
            )
        return {
            "version": 2,
            "previousblockhash": "%064x" % (self.blocks[-1],),
            "transactions": txs,
            "coinbaseaux": {"flags": "062f503253482f"},
            "coinbasevalue": 5000000000 + sum(tx["fee"] for tx in txs),
            "target": "0000000000000513c50000000000000000000000000000000000000000000000",
            "mintime": 1351655621,
            "mutable": ["time", "transactions", "prevblock"],
            "noncerange": "00000000ffffffff",
            "sigoplimit": 20000,
            "sizelimit": 1000000,
            "curtime": 1351659940,
            "bits": "21008000",
            "height": len(self.blocks),
        }
コード例 #3
0
ファイル: test_node.py プロジェクト: coins-dev/p2pool-dash
 def rpc_getblocktemplate(self, param):
     if param['mode'] == 'template':
         pass
     elif param['mode'] == 'submit':
         result = param['data']
         block = dash_data.block_type.unpack(result.decode('hex'))
         if sum(tx_out['value'] for tx_out in block['txs'][0]['tx_outs']) != sum(tx['tx_outs'][0]['value'] for tx in block['txs'][1:]) + 5000000000:
             print 'invalid fee'
         if block['header']['previous_block'] != self.blocks[-1]:
             return False
         if dash_data.hash256(result.decode('hex')) > block['header']['bits'].target:
             return False
         header_hash = dash_data.hash256(dash_data.block_header_type.pack(block['header']))
         self.blocks.append(header_hash)
         self.headers[header_hash] = block['header']
         reactor.callLater(0, self.new_block.happened)
         return True
     else:
         raise jsonrpc.Error_for_code(-1)('invalid request')
     
     txs = []
     for i in xrange(100):
         fee = i
         txs.append(dict(
             data=dash_data.tx_type.pack(dict(version=1, tx_ins=[], tx_outs=[dict(value=fee, script='hello!'*100)], lock_time=0)).encode('hex'),
             fee=fee,
         ))
     return {
         "version" : 3,
         "previousblockhash" : '%064x' % (self.blocks[-1],),
         "transactions" : txs,
         "coinbaseaux" : {
             "flags" : "062f503253482f"
         },
         "coinbasevalue" : 5000000000 + sum(tx['fee'] for tx in txs),
         "target" : "00000000000044b9f20000000000000000000000000000000000000000000000",
         "mintime" : 1351655621,
         "mutable" : [
             "time",
             "transactions",
             "prevblock"
         ],
         "noncerange" : "00000000ffffffff",
         "sigoplimit" : 20000,
         "sizelimit" : 1000000,
         "curtime" : 1351659940,
         "bits" : "21008000",
         "height" : len(self.blocks),
     }
コード例 #4
0
    def handle_shares(self, shares, peer):
        if len(shares) > 5:
            print 'Processing %i shares from %s...' % (
                len(shares), '%s:%i' % peer.addr if peer is not None else None)

        new_count = 0
        all_new_txs = {}
        for share, new_txs in shares:
            if new_txs is not None:
                all_new_txs.update(
                    (dash_data.hash256(dash_data.tx_type.pack(new_tx)), new_tx)
                    for new_tx in new_txs)

            if share.hash in self.node.tracker.items:
                #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
                continue

            new_count += 1

            #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr)

            self.node.tracker.add(share)

        self.node.known_txs_var.add(all_new_txs)

        if new_count:
            self.node.set_best_share()

        if len(shares) > 5:
            print '... done processing %i shares. New: %i Have: %i/~%i' % (
                len(shares), new_count, len(
                    self.node.tracker.items), 2 * self.node.net.CHAIN_LENGTH)
コード例 #5
0
 def test_tx_hash(self):
     assert data.hash256(
         data.tx_type.pack(
             dict(
                 version=1,
                 tx_ins=[
                     dict(
                         previous_output=None,
                         sequence=None,
                         script=
                         '70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88'
                         .decode('hex'),
                     )
                 ],
                 tx_outs=[
                     dict(
                         value=5003880250,
                         script=data.pubkey_hash_to_script2(
                             pack.IntType(160).unpack(
                                 'ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.
                                 decode('hex'))),
                     )
                 ],
                 lock_time=0,
             ))
     ) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
コード例 #6
0
ファイル: data.py プロジェクト: sriharikapu/p2pool-dash
 def check(self, tracker):
     from p2pool import p2p
     if self.share_data['previous_share_hash'] is not None:
         previous_share = tracker.items[self.share_data['previous_share_hash']]
         if type(self) is type(previous_share):
             pass
         elif type(self) is type(previous_share).SUCCESSOR:
             if tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH:
                 from p2pool import p2p
                 raise p2p.PeerMisbehavingError('switch without enough history')
             
             # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
             counts = get_desired_version_counts(tracker,
                 tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
             if counts.get(self.VERSION, 0) < sum(counts.itervalues())*85//100:
                 raise p2p.PeerMisbehavingError('switch without enough hash power upgraded')
         else:
             raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
     
     other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
     
     share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce'])
     assert other_tx_hashes2 == other_tx_hashes
     if share_info != self.share_info:
         raise ValueError('share_info invalid')
     if dash_data.hash256(dash_data.tx_type.pack(gentx)) != self.gentx_hash:
         raise ValueError('''gentx doesn't match hash_link''')
     
     if dash_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link:
         raise ValueError('merkle_link and other_tx_hashes do not match')
     
     return gentx # only used by as_block
コード例 #7
0
ファイル: test_data.py プロジェクト: 690254282/p2pool-dash
 def test_hashlink3(self):
     for i in xrange(100):
         d = random_bytes(random.randrange(2048))
         d2 = random_bytes(random.randrange(200))
         d3 = random_bytes(random.randrange(2048))
         x = data.prefix_to_hash_link(d + d2, d2)
         assert data.check_hash_link(x, d3, d2) == dash_data.hash256(d + d2 + d3)
コード例 #8
0
ファイル: data.py プロジェクト: coins-dev/p2pool-dash
 def check(self, tracker):
     from p2pool import p2p
     if self.share_data['previous_share_hash'] is not None:
         previous_share = tracker.items[self.share_data['previous_share_hash']]
         if type(self) is type(previous_share):
             pass
         elif type(self) is type(previous_share).SUCCESSOR:
             if tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH:
                 from p2pool import p2p
                 raise p2p.PeerMisbehavingError('switch without enough history')
             
             # switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
             counts = get_desired_version_counts(tracker,
                 tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
             if counts.get(self.VERSION, 0) < sum(counts.itervalues())*85//100:
                 raise p2p.PeerMisbehavingError('switch without enough hash power upgraded')
         else:
             raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
     
     other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
     
     share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce'])
     assert other_tx_hashes2 == other_tx_hashes
     if share_info != self.share_info:
         raise ValueError('share_info invalid')
     if dash_data.hash256(dash_data.tx_type.pack(gentx)) != self.gentx_hash:
         raise ValueError('''gentx doesn't match hash_link''')
     
     if dash_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link:
         raise ValueError('merkle_link and other_tx_hashes do not match')
     
     return gentx # only used by as_block
コード例 #9
0
ファイル: node.py プロジェクト: coins-dev/p2pool-dash
 def handle_shares(self, shares, peer):
     if len(shares) > 5:
         print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None)
     
     new_count = 0
     all_new_txs = {}
     for share, new_txs in shares:
         if new_txs is not None:
             all_new_txs.update((dash_data.hash256(dash_data.tx_type.pack(new_tx)), new_tx) for new_tx in new_txs)
         
         if share.hash in self.node.tracker.items:
             #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
             continue
         
         new_count += 1
         
         #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer_addr)
         
         self.node.tracker.add(share)
     
     self.node.known_txs_var.add(all_new_txs)
     
     if new_count:
         self.node.set_best_share()
     
     if len(shares) > 5:
         print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(self.node.tracker.items), 2*self.node.net.CHAIN_LENGTH)
コード例 #10
0
ファイル: stratum.py プロジェクト: mdilai/p2pool-dash
    def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce,
                   *args):
        if job_id not in self.handler_map:
            print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!'''
            #self.other.svc_client.rpc_reconnect().addErrback(lambda err: None)
            return False
        x, got_response = self.handler_map[job_id]
        coinb_nonce = extranonce2.decode('hex')
        assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH
        new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2']
        header = dict(
            version=x['version'],
            previous_block=x['previous_block'],
            merkle_root=dash_data.check_merkle_link(
                dash_data.hash256(new_packed_gentx), x['merkle_link']),
            timestamp=pack.IntType(32).unpack(
                getwork._swap4(ntime.decode('hex'))),
            bits=x['bits'],
            nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))),
        )
        res = got_response(header, worker_name, coinb_nonce)

        # Disconnect miners with large DOA rates to prevent DoS
        if len(self.wb._inner.my_share_hashes) > 20:
            if float(len(self.wb._inner.my_doa_share_hashes)) / float(
                    len(self.wb._inner.my_share_hashes)) > 0.60:
                self.transport.loseConnection()

        # Disconnect miners with large hash > target to prevent DoS
        if self.wb._inner.total_hashes > 20:
            if float(self.wb._inner.invalid_hashes) / float(
                    self.wb._inner.total_hashes) > 0.05:
                self.transport.loseConnection()

        return res
コード例 #11
0
 def test_hashlink3(self):
     for i in xrange(100):
         d = random_bytes(random.randrange(2048))
         d2 = random_bytes(random.randrange(200))
         d3 = random_bytes(random.randrange(2048))
         x = data.prefix_to_hash_link(d + d2, d2)
         assert data.check_hash_link(x, d3,
                                     d2) == dash_data.hash256(d + d2 + d3)
コード例 #12
0
ファイル: helper.py プロジェクト: poiuty/p2pool-dash
def submit_block_p2p(block, factory, net):
    if factory.conn.value is None:
        print >>sys.stderr, "No dashd connection when block submittal attempted! %s%064x" % (
            net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
            dash_data.hash256(dash_data.block_header_type.pack(block["header"])),
        )
        raise deferral.RetrySilentlyException()
    factory.conn.value.send_block(block=block)
コード例 #13
0
ファイル: helper.py プロジェクト: AYCH-Inc/aych.pool
def submit_block_p2p(block, factory, net):
    if factory.conn.value is None:
        print >> sys.stderr, 'No dashd connection when block submittal attempted! %s%064x' % (
            net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
            dash_data.hash256(dash_data.block_header_type.pack(
                block['header'])))
        raise deferral.RetrySilentlyException()
    factory.conn.value.send_block(block=block)
コード例 #14
0
ファイル: data.py プロジェクト: komato3huk/p2pool-pac
 def get_ref_hash(cls, net, share_info, ref_merkle_link):
     return pack.IntType(256).pack(
         dash_data.check_merkle_link(
             dash_data.hash256(
                 cls.ref_type.pack(
                     dict(
                         identifier=net.IDENTIFIER,
                         share_info=share_info,
                     ))), ref_merkle_link))
コード例 #15
0
ファイル: test_data.py プロジェクト: thelazier/p2pool-dash
 def test_header_hash(self):
     assert data.hash256(data.block_header_type.pack(dict(
         version=1,
         previous_block=0x000000000000038a2a86b72387f93c51298298a732079b3b686df3603d2f6282,
         merkle_root=0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44,
         timestamp=1323752685,
         bits=data.FloatingInteger(437159528),
         nonce=3658685446,
     ))) == 0x000000000000003aaaf7638f9f9c0d0c60e8b0eb817dcdb55fd2b1964efc5175
コード例 #16
0
 def test_header_hash(self):
     assert data.hash256(data.block_header_type.pack(dict(
         version=1,
         previous_block=0x000000000000038a2a86b72387f93c51298298a732079b3b686df3603d2f6282,
         merkle_root=0x37a43a3b812e4eb665975f46393b4360008824aab180f27d642de8c28073bc44,
         timestamp=1323752685,
         bits=data.FloatingInteger(437159528),
         nonce=3658685446,
     ))) == 0x000000000000003aaaf7638f9f9c0d0c60e8b0eb817dcdb55fd2b1964efc5175
コード例 #17
0
ファイル: test_p2p.py プロジェクト: coins-dev/p2pool-dash
 def test_get_block(self):
     factory = p2p.ClientFactory(networks.nets['dash'])
     c = reactor.connectTCP('127.0.0.1', 9999, factory)
     try:
         h = 0x00000000000132b9afeca5e9a2fdf4477338df6dcff1342300240bc70397c4bb
         block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))()
         assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root']
         assert data.hash256(data.block_header_type.pack(block['header'])) == h
     finally:
         factory.stopTrying()
         c.disconnect()
コード例 #18
0
ファイル: test_p2p.py プロジェクト: wuhuaping/p2pool-dash
 def test_get_block(self):
     factory = p2p.ClientFactory(networks.nets['dash'])
     c = reactor.connectTCP('127.0.0.1', 8333, factory)
     try:
         h = 0x000000000000046acff93b0e76cd10490551bf871ce9ac9fad62e67a07ff1d1e
         block = yield deferral.retry()(defer.inlineCallbacks(lambda: defer.returnValue((yield (yield factory.getProtocol()).get_block(h)))))()
         assert data.merkle_hash(map(data.hash256, map(data.tx_type.pack, block['txs']))) == block['header']['merkle_root']
         assert data.hash256(data.block_header_type.pack(block['header'])) == h
     finally:
         factory.stopTrying()
         c.disconnect()
コード例 #19
0
 def _getwork(self, request, data, long_poll):
     request.setHeader('X-Long-Polling', '/long-polling')
     request.setHeader('X-Roll-NTime', 'expire=100')
     request.setHeader('X-Is-P2Pool', 'true')
     if request.getHeader('Host') is not None:
         request.setHeader('X-Stratum', 'stratum+tcp://' + request.getHeader('Host'))
     
     if data is not None:
         header = getwork.decode_data(data)
         if header['merkle_root'] not in self.merkle_root_to_handler:
             print >>sys.stderr, '''Couldn't link returned work's merkle root with its handler. This should only happen if this process was recently restarted!'''
             defer.returnValue(False)
         defer.returnValue(self.merkle_root_to_handler[header['merkle_root']](header, request.getUser() if request.getUser() is not None else '', '\0'*self.worker_bridge.COINBASE_NONCE_LENGTH))
     
     if p2pool.DEBUG:
         id = random.randrange(1000, 10000)
         print 'POLL %i START is_long_poll=%r user_agent=%r user=%r' % (id, long_poll, request.getHeader('User-Agent'), request.getUser())
     
     if long_poll:
         request_id = request.getClientIP(), request.getHeader('Authorization')
         if self.worker_views.get(request_id, self.worker_bridge.new_work_event.times) != self.worker_bridge.new_work_event.times:
             if p2pool.DEBUG:
                 print 'POLL %i PUSH' % (id,)
         else:
             if p2pool.DEBUG:
                 print 'POLL %i WAITING' % (id,)
             yield self.worker_bridge.new_work_event.get_deferred()
         self.worker_views[request_id] = self.worker_bridge.new_work_event.times
     
     x, handler = self.worker_bridge.get_work(*self.worker_bridge.preprocess_request(request.getUser() if request.getUser() is not None else ''))
     res = getwork.BlockAttempt(
         version=x['version'],
         previous_block=x['previous_block'],
         merkle_root=dash_data.check_merkle_link(dash_data.hash256(x['coinb1'] + '\0'*self.worker_bridge.COINBASE_NONCE_LENGTH + x['coinb2']), x['merkle_link']),
         timestamp=x['timestamp'],
         bits=x['bits'],
         share_target=x['share_target'],
     )
     assert res.merkle_root not in self.merkle_root_to_handler
     
     self.merkle_root_to_handler[res.merkle_root] = handler
     
     if p2pool.DEBUG:
         print 'POLL %i END identifier=%i' % (id, self.worker_bridge.new_work_event.times)
     
     extra_params = {}
     if request.getHeader('User-Agent') == 'Jephis PIC Miner':
         # ASICMINER BE Blades apparently have a buffer overflow bug and
         # can't handle much extra in the getwork response
         extra_params = {}
     else:
         extra_params = dict(identifier=str(self.worker_bridge.new_work_event.times), submitold=True)
     defer.returnValue(res.getwork(**extra_params))
コード例 #20
0
ファイル: test_data.py プロジェクト: coins-dev/p2pool-dash
 def test_tx_hash(self):
     assert data.hash256(data.tx_type.pack(dict(
         version=1,
         tx_ins=[dict(
             previous_output=None,
             sequence=None,
             script='70736a0468860e1a0452389500522cfabe6d6d2b2f33cf8f6291b184f1b291d24d82229463fcec239afea0ee34b4bfc622f62401000000000000004d696e656420627920425443204775696c6420ac1eeeed88'.decode('hex'),
         )],
         tx_outs=[dict(
             value=5003880250,
             script=data.pubkey_hash_to_script2(pack.IntType(160).unpack('ca975b00a8c203b8692f5a18d92dc5c2d2ebc57b'.decode('hex'))),
         )],
         lock_time=0,
     ))) == 0xb53802b2333e828d6532059f46ecf6b313a42d79f97925e457fbbfda45367e5c
コード例 #21
0
ファイル: stratum.py プロジェクト: DiCE1904/p2pool-drk
 def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce):
     if job_id not in self.handler_map:
         print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!'''
         return False
     x, got_response = self.handler_map[job_id]
     coinb_nonce = extranonce2.decode('hex')
     assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH
     new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2']
     header = dict(
         version=x['version'],
         previous_block=x['previous_block'],
         merkle_root=dash_data.check_merkle_link(dash_data.hash256(new_packed_gentx), x['merkle_link']),
         timestamp=pack.IntType(32).unpack(getwork._swap4(ntime.decode('hex'))),
         bits=x['bits'],
         nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))),
     )
     return got_response(header, worker_name, coinb_nonce)
コード例 #22
0
ファイル: test_p2p.py プロジェクト: AYCH-Inc/aych.pool
 def test_get_block(self):
     factory = p2p.ClientFactory(networks.nets['dash'])
     c = reactor.connectTCP('127.0.0.1', 9999, factory)
     try:
         h = 0x00000000000132b9afeca5e9a2fdf4477338df6dcff1342300240bc70397c4bb
         block = yield deferral.retry()(
             defer.inlineCallbacks(lambda: defer.returnValue(
                 (yield (yield factory.getProtocol()).get_block(h)))))()
         assert data.merkle_hash(
             map(data.hash256,
                 map(data.tx_type.pack,
                     block['txs']))) == block['header']['merkle_root']
         assert data.hash256(data.block_header_type.pack(
             block['header'])) == h
     finally:
         factory.stopTrying()
         c.disconnect()
コード例 #23
0
ファイル: p2p.py プロジェクト: mygirl8893/p2pool-dash
    def handle_remember_tx(self, tx_hashes, txs):
        for tx_hash in tx_hashes:
            if tx_hash in self.remembered_txs:
                print >> sys.stderr, 'Peer referenced transaction twice, disconnecting'
                self.disconnect()
                return

            if tx_hash in self.node.known_txs_var.value:
                tx = self.node.known_txs_var.value[tx_hash]
            else:
                for cache in self.known_txs_cache.itervalues():
                    if tx_hash in cache:
                        tx = cache[tx_hash]
                        print 'Transaction %064x rescued from peer latency cache!' % (
                            tx_hash, )
                        break
                else:
                    print >> sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (
                        tx_hash, )
                    self.disconnect()
                    return

            self.remembered_txs[tx_hash] = tx
            self.remembered_txs_size += 100 + dash_data.tx_type.packed_size(tx)
        added_known_txs = {}
        warned = False
        for tx in txs:
            tx_hash = dash_data.hash256(dash_data.tx_type.pack(tx))
            if tx_hash in self.remembered_txs:
                print >> sys.stderr, 'Peer referenced transaction twice, disconnecting'
                self.disconnect()
                return

            if tx_hash in self.node.known_txs_var.value and not warned:
                print 'Peer sent entire transaction %064x that was already received' % (
                    tx_hash, )
                warned = True

            self.remembered_txs[tx_hash] = tx
            self.remembered_txs_size += 100 + dash_data.tx_type.packed_size(tx)
            added_known_txs[tx_hash] = tx
        self.node.known_txs_var.add(added_known_txs)
        if self.remembered_txs_size >= self.max_remembered_txs_size:
            raise PeerMisbehavingError('too much transaction data stored')
コード例 #24
0
 def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce):
     if job_id not in self.handler_map:
         print >> sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!'''
         return False
     x, got_response = self.handler_map[job_id]
     coinb_nonce = extranonce2.decode('hex')
     assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH
     new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2']
     header = dict(
         version=x['version'],
         previous_block=x['previous_block'],
         merkle_root=dash_data.check_merkle_link(
             dash_data.hash256(new_packed_gentx), x['merkle_link']),
         timestamp=pack.IntType(32).unpack(
             getwork._swap4(ntime.decode('hex'))),
         bits=x['bits'],
         nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))),
     )
     return got_response(header, worker_name, coinb_nonce)
コード例 #25
0
ファイル: test_p2p.py プロジェクト: coins-dev/p2pool-dash
 def got_conn(self, conn):
     p2p.Node.got_conn(self, conn)
     
     yield deferral.sleep(.5)
     
     new_mining_txs = dict(self.mining_txs_var.value)
     for i in xrange(3):
         huge_tx = dict(
             version=0,
             tx_ins=[],
             tx_outs=[dict(
                 value=0,
                 script='x'*900000,
             )],
             lock_time=i,
         )
         new_mining_txs[dash_data.hash256(dash_data.tx_type.pack(huge_tx))] = huge_tx
     self.mining_txs_var.set(new_mining_txs)
     
     self.sent_time = reactor.seconds()
コード例 #26
0
ファイル: p2p.py プロジェクト: DiCE1904/p2pool-drk
    def handle_remember_tx(self, tx_hashes, txs):
        for tx_hash in tx_hashes:
            if tx_hash in self.remembered_txs:
                print >> sys.stderr, "Peer referenced transaction twice, disconnecting"
                self.disconnect()
                return

            if tx_hash in self.node.known_txs_var.value:
                tx = self.node.known_txs_var.value[tx_hash]
            else:
                for cache in self.known_txs_cache.itervalues():
                    if tx_hash in cache:
                        tx = cache[tx_hash]
                        print "Transaction %064x rescued from peer latency cache!" % (tx_hash,)
                        break
                else:
                    print >> sys.stderr, "Peer referenced unknown transaction %064x, disconnecting" % (tx_hash,)
                    self.disconnect()
                    return

            self.remembered_txs[tx_hash] = tx
            self.remembered_txs_size += 100 + dash_data.tx_type.packed_size(tx)
        new_known_txs = dict(self.node.known_txs_var.value)
        warned = False
        for tx in txs:
            tx_hash = dash_data.hash256(dash_data.tx_type.pack(tx))
            if tx_hash in self.remembered_txs:
                print >> sys.stderr, "Peer referenced transaction twice, disconnecting"
                self.disconnect()
                return

            if tx_hash in self.node.known_txs_var.value and not warned:
                print "Peer sent entire transaction %064x that was already received" % (tx_hash,)
                warned = True

            self.remembered_txs[tx_hash] = tx
            self.remembered_txs_size += 100 + dash_data.tx_type.packed_size(tx)
            new_known_txs[tx_hash] = tx
        self.node.known_txs_var.set(new_known_txs)
        if self.remembered_txs_size >= self.max_remembered_txs_size:
            raise PeerMisbehavingError("too much transaction data stored")
コード例 #27
0
ファイル: test_p2p.py プロジェクト: sriharikapu/p2pool-dash
            def got_conn(self, conn):
                p2p.Node.got_conn(self, conn)

                yield deferral.sleep(.5)

                new_mining_txs = dict(self.mining_txs_var.value)
                for i in xrange(3):
                    huge_tx = dict(
                        version=0,
                        tx_ins=[],
                        tx_outs=[dict(
                            value=0,
                            script='x' * 900000,
                        )],
                        lock_time=i,
                    )
                    new_mining_txs[dash_data.hash256(
                        dash_data.tx_type.pack(huge_tx))] = huge_tx
                self.mining_txs_var.set(new_mining_txs)

                self.sent_time = reactor.seconds()
コード例 #28
0
 def test_hashlink1(self):
     for i in xrange(100):
         d = random_bytes(random.randrange(2048))
         x = data.prefix_to_hash_link(d)
         assert data.check_hash_link(x, '') == dash_data.hash256(d)
コード例 #29
0
ファイル: node.py プロジェクト: coins-dev/p2pool-dash
 def _(tx):
     self.known_txs_var.add({
         dash_data.hash256(dash_data.tx_type.pack(tx)): tx,
     })
コード例 #30
0
 def from_header(cls, header):
     return cls(dash_data.hash256(dash_data.block_header_type.pack(header)),
                header['previous_block'])
コード例 #31
0
ファイル: test_data.py プロジェクト: 690254282/p2pool-dash
 def test_hashlink1(self):
     for i in xrange(100):
         d = random_bytes(random.randrange(2048))
         x = data.prefix_to_hash_link(d)
         assert data.check_hash_link(x, '') == dash_data.hash256(d)
コード例 #32
0
ファイル: data.py プロジェクト: coins-dev/p2pool-dash
 def get_ref_hash(cls, net, share_info, ref_merkle_link):
     return pack.IntType(256).pack(dash_data.check_merkle_link(dash_data.hash256(cls.ref_type.pack(dict(
         identifier=net.IDENTIFIER,
         share_info=share_info,
     ))), ref_merkle_link))
コード例 #33
0
    def _getwork(self, request, data, long_poll):
        request.setHeader('X-Long-Polling', '/long-polling')
        request.setHeader('X-Roll-NTime', 'expire=100')
        request.setHeader('X-Is-P2Pool', 'true')
        if request.getHeader('Host') is not None:
            request.setHeader('X-Stratum',
                              'stratum+tcp://' + request.getHeader('Host'))

        if data is not None:
            header = getwork.decode_data(data)
            if header['merkle_root'] not in self.merkle_root_to_handler:
                print >> sys.stderr, '''Couldn't link returned work's merkle root with its handler. This should only happen if this process was recently restarted!'''
                defer.returnValue(False)
            defer.returnValue(
                self.merkle_root_to_handler[header['merkle_root']](
                    header,
                    request.getUser() if request.getUser() is not None else '',
                    '\0' * self.worker_bridge.COINBASE_NONCE_LENGTH))

        if p2pool.DEBUG:
            id = random.randrange(1000, 10000)
            print 'POLL %i START is_long_poll=%r user_agent=%r user=%r' % (
                id, long_poll, request.getHeader('User-Agent'),
                request.getUser())

        if long_poll:
            request_id = request.getClientIP(), request.getHeader(
                'Authorization')
            if self.worker_views.get(
                    request_id, self.worker_bridge.new_work_event.times
            ) != self.worker_bridge.new_work_event.times:
                if p2pool.DEBUG:
                    print 'POLL %i PUSH' % (id, )
            else:
                if p2pool.DEBUG:
                    print 'POLL %i WAITING' % (id, )
                yield self.worker_bridge.new_work_event.get_deferred()
            self.worker_views[
                request_id] = self.worker_bridge.new_work_event.times

        x, handler = self.worker_bridge.get_work(
            *self.worker_bridge.preprocess_request(
                request.getUser() if request.getUser() is not None else ''))
        res = getwork.BlockAttempt(
            version=x['version'],
            previous_block=x['previous_block'],
            merkle_root=dash_data.check_merkle_link(
                dash_data.hash256(x['coinb1'] + '\0' *
                                  self.worker_bridge.COINBASE_NONCE_LENGTH +
                                  x['coinb2']), x['merkle_link']),
            timestamp=x['timestamp'],
            bits=x['bits'],
            share_target=x['share_target'],
        )
        assert res.merkle_root not in self.merkle_root_to_handler

        self.merkle_root_to_handler[res.merkle_root] = handler

        if p2pool.DEBUG:
            print 'POLL %i END identifier=%i' % (
                id, self.worker_bridge.new_work_event.times)

        extra_params = {}
        if request.getHeader('User-Agent') == 'Jephis PIC Miner':
            # ASICMINER BE Blades apparently have a buffer overflow bug and
            # can't handle much extra in the getwork response
            extra_params = {}
        else:
            extra_params = dict(identifier=str(
                self.worker_bridge.new_work_event.times),
                                submitold=True)
        defer.returnValue(res.getwork(**extra_params))
コード例 #34
0
 def _(tx):
     new_known_txs = dict(self.known_txs_var.value)
     new_known_txs[dash_data.hash256(dash_data.tx_type.pack(tx))] = tx
     self.known_txs_var.set(new_known_txs)
コード例 #35
0
ファイル: node.py プロジェクト: DiCE1904/p2pool-drk
 def _(tx):
     new_known_txs = dict(self.known_txs_var.value)
     new_known_txs[dash_data.hash256(dash_data.tx_type.pack(tx))] = tx
     self.known_txs_var.set(new_known_txs)
コード例 #36
0
 def _(tx):
     self.known_txs_var.add({
         dash_data.hash256(dash_data.tx_type.pack(tx)):
         tx,
     })
コード例 #37
0
 def from_header(cls, header):
     return cls(dash_data.hash256(dash_data.block_header_type.pack(header)), header['previous_block'])