示例#1
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls,
                 worker_fee, args, pubkeys, dashd):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.node = node

        self.dashd = dashd
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = args.donation_percentage
        self.worker_fee = args.worker_fee

        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)
        self.local_addr_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable({})

        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        self.tracker_view = forest.TrackerView(
            self.node.tracker,
            forest.get_attributedelta_type(
                dict(
                    forest.AttributeDelta.attrs,
                    my_count=lambda share: 1
                    if share.hash in self.my_share_hashes else 0,
                    my_doa_count=lambda share: 1
                    if share.hash in self.my_doa_share_hashes else 0,
                    my_orphan_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'orphan' else 0,
                    my_dead_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'doa' else 0,
                )))

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # MERGED WORK

        self.merged_work = variable.Variable({})

        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(
                merged_url,
                dict(Authorization='Basic ' +
                     base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock on %s:' %
                    (merged_url, ), 30)(merged_proxy.rpc_getauxblock)()
                self.merged_work.set(
                    math.merge_dicts(
                        self.merged_work.value, {
                            auxblock['chainid']:
                            dict(
                                hash=int(auxblock['hash'], 16),
                                target='p2pool' if auxblock['target']
                                == 'p2pool' else pack.IntType(256).unpack(
                                    auxblock['target'].decode('hex')),
                                merged_proxy=merged_proxy,
                            )
                        }))
                yield deferral.sleep(1)

        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)

        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.node.dashd_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and self.node.net.PARENT.POW_FUNC(
                        dash_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x! NewHeight=%s' % (
                    bb['previous_block'],
                    self.node.net.PARENT.BLOCKHASH_FUNC(
                        dash_data.block_header_type.pack(bb)),
                    t['height'] + 1,
                )
                '''
                # New block template from Dash daemon only
                t = dict(
                    version=bb['version'],
                    previous_block=self.node.net.PARENT.BLOCKHASH_FUNC(dash_data.block_header_type.pack(bb)),
                    bits=bb['bits'], # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 600, # better way?
                    transactions=[],
                    transaction_fees=[],
                    merkle_link=dash_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.dashd_work.value['subsidy'],
                    last_update=self.node.dashd_work.value['last_update'],
                    payment_amount=self.node.dashd_work.value['payment_amount'],
                    packed_payments=self.node.dashd_work.value['packed_payments'],
                )
                '''

            self.current_work.set(t)

        self.node.dashd_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()

        self.merged_work.changed.watch(
            lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
示例#2
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, worker_fee,
                 args, pubkeys, dcrd):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.node = node

        self.dcrd = dcrd
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = args.donation_percentage
        self.worker_fee = args.worker_fee

        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)
        self.local_addr_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable({})
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        self.tracker_view = forest.TrackerView(
            self.node.tracker,
            forest.get_attributedelta_type(
                dict(
                    forest.AttributeDelta.attrs,
                    my_count=lambda share: 1
                    if share.hash in self.my_share_hashes else 0,
                    my_doa_count=lambda share: 1
                    if share.hash in self.my_doa_share_hashes else 0,
                    my_orphan_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'orphan' else 0,
                    my_dead_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'doa' else 0,
                )))

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.node.dcrd_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and self.node.net.PARENT.POW_FUNC(
                        decred_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (
                    bb['previous_block'],
                    decred_data.hash256(
                        decred_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=decred_data.hash256(
                        decred_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 300,  # better way?
                    transactions=[],
                    transaction_fees=[],
                    merkle_link=decred_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(
                        self.node.dcrd_work.value['height']),
                    last_update=self.node.dcrd_work.value['last_update'],
                )

            self.current_work.set(t)

        self.node.dcrd_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()

        self.node.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
示例#3
0
    def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work,
                 best_block_header, merged_urls, best_share_var, tracker,
                 my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node,
                 submit_block, set_best_share, broadcast_share,
                 block_height_var):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.my_pubkey_hash = my_pubkey_hash
        self.net = net
        self.donation_percentage = donation_percentage
        self.bitcoind_work = bitcoind_work
        self.best_block_header = best_block_header
        self.best_share_var = best_share_var
        self.tracker = tracker
        self.my_share_hashes = my_share_hashes
        self.my_doa_share_hashes = my_doa_share_hashes
        self.worker_fee = worker_fee
        self.p2p_node = p2p_node
        self.submit_block = submit_block
        self.set_best_share = set_best_share
        self.broadcast_share = broadcast_share
        self.block_height_var = block_height_var

        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        @tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and tracker.is_child_of(
                    share.hash, self.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.tracker.is_child_of(
                    share.hash, self.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # MERGED WORK

        self.merged_work = variable.Variable({})

        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.Proxy(
                merged_url,
                dict(Authorization='Basic ' +
                     base64.b64encode(merged_userpass)))
            while True:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock:',
                    30)(merged_proxy.rpc_getauxblock)()
                self.merged_work.set(
                    dict(
                        self.merged_work.value, **{
                            auxblock['chainid']:
                            dict(
                                hash=int(auxblock['hash'], 16),
                                target='p2pool' if auxblock['target']
                                == 'p2pool' else pack.IntType(256).unpack(
                                    auxblock['target'].decode('hex')),
                                merged_proxy=merged_proxy,
                            )
                        }))
                yield deferral.sleep(1)

        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)

        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.bitcoind_work.value
            bb = self.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and net.PARENT.POW_FUNC(
                        bitcoin_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (
                    bb['previous_block'],
                    bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 600,  # better way?
                    transactions=[],
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=net.PARENT.SUBSIDY_FUNC(
                        self.block_height_var.value),
                    last_update=self.bitcoind_work.value['last_update'],
                )

            self.current_work.set(t)

        self.bitcoind_work.changed.watch(lambda _: compute_work())
        self.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()

        self.merged_work.changed.watch(
            lambda _: self.new_work_event.happened())
        self.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
示例#4
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []
        
        self.node = node
        self.my_pubkey_hash = my_pubkey_hash
        self.donation_percentage = donation_percentage
        self.worker_fee = worker_fee
        
        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10*60)
        self.local_addr_rate_monitor = math.RateMonitor(10*60)
        
        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable( {} )        
        
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()
        
        self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
            my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
            my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
            my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0, # 고아
            my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0, # 죽음
        )))


        # Python Logger
        self.logger = logging.getLogger('pool_log')
        fomatter = logging.Formatter('[%(levelname)s|%(filename)s - %(lineno)s] : %(message)s')

        fileHandler = logging.FileHandler('./difficulty.log')
        streamHandler = logging.StreamHandler()

        fileHandler.setFormatter(fomatter)
        streamHandler.setFormatter(fomatter)

        self.logger.addHandler(fileHandler)
        self.logger.addHandler(streamHandler)

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
        
        # MERGED WORK
        
        self.merged_work = variable.Variable({})
        
        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
                self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
                    hash=int(auxblock['hash'], 16),
                    target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
                    merged_proxy=merged_proxy,
                )}))
                yield deferral.sleep(1)
        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)
        
        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'
        
        # COMBINE WORK
        
        self.current_work = variable.Variable(None)
        def compute_work():
            t = self.node.bitcoind_work.value # nTime?
            bb = self.node.best_block_header.value # Tx?
            if bb is not None and bb['previous_block'] == t['previous_block'] and bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (bb['previous_block'],
                    #bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
                    bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.scrypt(bitcoin_data.block_header_type.pack(bb)),
                    bits=self.node.pow_bits, # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['time'] + 600, # better way?
                    transactions=[],
                    transaction_fees=[],
                    txn_timestamp=0,
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.pow_subsidy,
                    last_update=self.node.bitcoind_work.value['last_update'],
                )
            
            self.current_work.set(t)
        self.node.bitcoind_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()
        
        self.new_work_event = variable.Event()
        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
                self.new_work_event.happened()
        self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
示例#5
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls,
                 worker_fee, min_difficulty, share_rate, share_rate_type, args,
                 pubkeys, bitcoind):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.node = node

        self.bitcoind = bitcoind
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = donation_percentage
        self.worker_fee = worker_fee
        self.min_difficulty = min_difficulty
        self.share_rate = share_rate
        self.share_rate_type = share_rate_type

        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)
        self.local_addr_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable({})
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        self.tracker_view = forest.TrackerView(
            self.node.tracker,
            forest.get_attributedelta_type(
                dict(
                    forest.AttributeDelta.attrs,
                    my_count=lambda share: 1
                    if share.hash in self.my_share_hashes else 0,
                    my_doa_count=lambda share: 1
                    if share.hash in self.my_doa_share_hashes else 0,
                    my_orphan_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'orphan' else 0,
                    my_dead_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'doa' else 0,
                )))

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # MERGED WORK

        self.merged_work = variable.Variable({})

        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(
                merged_url,
                dict(Authorization='Basic ' +
                     base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock on %s:' %
                    (merged_url, ), 30)(merged_proxy.rpc_getauxblock)()
                target = auxblock[
                    'target'] if 'target' in auxblock else auxblock['_target']
                self.merged_work.set(
                    math.merge_dicts(
                        self.merged_work.value, {
                            auxblock['chainid']:
                            dict(
                                hash=int(auxblock['hash'], 16),
                                target='p2pool' if target == 'p2pool' else
                                pack.IntType(256).unpack(target.decode('hex')),
                                merged_proxy=merged_proxy,
                            )
                        }))
                yield deferral.sleep(1)

        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)

        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.node.bitcoind_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and self.node.net.PARENT.POW_FUNC(
                        bitcoin_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (
                    bb['previous_block'],
                    bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=max(int(time.time() + 0.5), bb['timestamp'] + 1),
                    transactions=[],
                    transaction_hashes=[],
                    transaction_fees=[],
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(
                        self.node.bitcoind_work.value['height']),
                    last_update=t['last_update'],
                    skipping=self.current_work.value.get('skipping', 3) -
                    1 if self.current_work.value is not None else 2)

            self.current_work.set(t)

        self.node.bitcoind_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()
            # refetch block template if best block changed
            if after.get('skipping', -2) >= 0:
                time.sleep(0.5)
                self.node.bitcoind_work.set((yield helper.getwork(
                    self.bitcoind,
                    self.node.bitcoind_work.value['use_getblocktemplate'])))
            elif after.get('skipping', -2) == -1:
                # revert if bitcoind doesn't have the new block
                h = yield self.bitcoind.rpc_getblockheader(
                    (yield self.bitcoind.rpc_getbestblockhash()))
                self.node.best_block_header.set(
                    dict(version=h['version'],
                         previous_block=int(h['previousblockhash'], 16),
                         merkle_root=int(h['merkleroot'], 16),
                         timestamp=h['time'],
                         bits=bitcoin_data.FloatingIntegerType().unpack(
                             h['bits'].decode('hex')[::-1]) if isinstance(
                                 h['bits'], (str, unicode)) else
                         bitcoin_data.FloatingInteger(h['bits']),
                         nonce=h['nonce']))

        self.merged_work.changed.watch(
            lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
示例#6
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee, args, pubkeys, bitcoind):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []
        
        self.node = node

        self.bitcoind = bitcoind
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = args.donation_percentage
        self.worker_fee = args.worker_fee
        
        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10*60)
        self.local_addr_rate_monitor = math.RateMonitor(10*60)
        
        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)
        
        self.last_work_shares = variable.Variable( {} )
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        # DONATION_SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
        # print data.script2_to_address(DONATION_SCRIPT, node.net.PARENT)
        # print data.address_to_pubkey_hash("1E482inuE9GckE6kXoX5sBCTD7g4rgGgfN",node.net.PARENT)


        self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
            my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
            my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
            my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
            my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
        )))
        
        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
        
        # MERGED WORK
        
        self.merged_work = variable.Variable({})
        
        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
                target = auxblock['target'] if 'target' in auxblock else auxblock['_target']
                self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
                    hash=int(auxblock['hash'], 16),
                    target='p2pool' if target == 'p2pool' else pack.IntType(256).unpack(target.decode('hex')),
                    merged_proxy=merged_proxy,
                )}))
                yield deferral.sleep(1)
        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)
        
        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'
        
        # COMBINE WORK
        
        self.current_work = variable.Variable(None)
        def compute_work():
            t = self.node.bitcoind_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (bb['previous_block'],
                    bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
                    bits=bb['bits'], # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 600, # better way?
                    transactions=[],
                    transaction_fees=[],
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['height']),
                    last_update=self.node.bitcoind_work.value['last_update'],
                )
            
            self.current_work.set(t)
        self.node.bitcoind_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()
        
        self.new_work_event = variable.Event()
        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
                self.new_work_event.happened()
        self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())