예제 #1
0
파일: mixup.py 프로젝트: weifind/abepos
def mixup_blocks(store, ds, count, datadir_chain_id = None, seed = None):
    bytes_done = 0
    offsets = []

    for i in xrange(count):
        if ds.read_cursor + 8 <= len(ds.input):
            offsets.append(ds.read_cursor)
            magic = ds.read_bytes(4)
            length = ds.read_int32()
            ds.read_cursor += length
            if ds.read_cursor <= len(ds.input):
                continue
        raise IOError("End of input after %d blocks" % i)

    if seed > 1 and seed <= count:
        for i in xrange(0, seed * int(count/seed), seed):
            offsets[i : i + seed] = offsets[i : i + seed][::-1]
    elif seed == -3:
        for i in xrange(0, 3 * int(count/3), 3):
            offsets[i : i + 3] = offsets[i+1 : i + 3] + [offsets[i]]
        print offsets
    elif seed:
        offsets = offsets[::-1]  # XXX want random

    for offset in offsets:
        ds.read_cursor = offset
        magic = ds.read_bytes(4)
        length = ds.read_int32()

        # Assume blocks obey the respective policy if they get here.
        chain_id = datadir_chain_id
        if chain_id is None:
            rows = store.selectall("""
                SELECT chain.chain_id
                  FROM chain
                  JOIN magic ON (chain.magic_id = magic.magic_id)
                 WHERE magic.magic = ?""",
                                   (store.binin(magic),))
            if len(rows) == 1:
                chain_id = rows[0][0]
        if chain_id is None:
            ds.read_cursor = offset
            raise ValueError(
                "Chain not found for magic number %s in block file at"
                " offset %d.", repr(magic), offset)
            break

        # XXX pasted out of DataStore.import_blkdat
        end = ds.read_cursor + length

        hash = util.scrypt(
            ds.input[ds.read_cursor : ds.read_cursor + 80])
        # XXX should decode target and check hash against it to
        # avoid loading garbage data.  But not for merged-mined or
        # CPU-mined chains that use different proof-of-work
        # algorithms.  Time to resurrect policy_id?

        block_row = store.selectrow("""
            SELECT block_id, block_height, block_chain_work,
                   block_nTime, block_total_seconds,
                   block_total_satoshis, block_satoshi_seconds
              FROM block
             WHERE block_hash = ?
        """, (store.hashin(hash),))

        if block_row:
            # Block header already seen.  Don't import the block,
            # but try to add it to the chain.
            if chain_id is not None:
                b = {
                    "block_id":   block_row[0],
                    "height":     block_row[1],
                    "chain_work": store.binout_int(block_row[2]),
                    "nTime":      block_row[3],
                    "seconds":    block_row[4],
                    "satoshis":   block_row[5],
                    "ss":         block_row[6]}
                if store.selectrow("""
                    SELECT 1
                      FROM chain_candidate
                     WHERE block_id = ?
                       AND chain_id = ?""",
                                (b['block_id'], chain_id)):
                    #store.log.info("block %d already in chain %d",b['block_id'], chain_id)
                    b = None
                else:
                    if b['height'] == 0:
                        b['hashPrev'] = GENESIS_HASH_PREV
                    else:
                        b['hashPrev'] = 'dummy'  # Fool adopt_orphans.
                    store.offer_block_to_chains(b, frozenset([chain_id]))
        else:
            b = store.parse_block(ds, chain_id, magic, length)
            b["hash"] = hash
            chain_ids = frozenset([] if chain_id is None else [chain_id])
            store.import_block(b, chain_ids = chain_ids)
            if ds.read_cursor != end:
                store.log.debug("Skipped %d bytes at block end",
                                end - ds.read_cursor)

        bytes_done += length
        if bytes_done >= store.commit_bytes:
            store.log.debug("commit")
            store.commit()
            bytes_done = 0

    if bytes_done > 0:
        store.commit()
    def submit_share(self, job_id, worker_name, session, extranonce1_bin, extranonce2, ntime, nonce,
                     difficulty):
        '''Check parameters and finalize block template. If it leads
           to valid block candidate, asynchronously submits the block
           back to the bitcoin network.
        
            - extranonce1_bin is binary. No checks performed, it should be from session data
            - job_id, extranonce2, ntime, nonce - in hex form sent by the client
            - difficulty - decimal number from session, again no checks performed
            - submitblock_callback - reference to method which receive result of submitblock()
        '''
        
        # Check if extranonce2 looks correctly. extranonce2 is in hex form...
        if len(extranonce2) != self.extranonce2_size * 2:
            raise SubmitException("Incorrect size of extranonce2. Expected %d chars" % (self.extranonce2_size*2))
        
        # Check for job
        job = self.get_job(job_id)
        if job == None:
            raise SubmitException("Job '%s' not found" % job_id)
                
        # Check if ntime looks correct
        if len(ntime) != 8:
            raise SubmitException("Incorrect size of ntime. Expected 8 chars")

        if not job.check_ntime(int(ntime, 16)):
            raise SubmitException("Ntime out of range")
        
        # Check nonce        
        if len(nonce) != 8:
            raise SubmitException("Incorrect size of nonce. Expected 8 chars")
        
        # Check for duplicated submit
        if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce):
            log.info("Duplicate from %s, (%s %s %s %s)" % \
                    (worker_name, binascii.hexlify(extranonce1_bin), extranonce2, ntime, nonce))
            raise SubmitException("Duplicate share")
        
        # Now let's do the hard work!
        # ---------------------------
        
        # 0. Some sugar
        extranonce2_bin = binascii.unhexlify(extranonce2)
        ntime_bin = binascii.unhexlify(ntime)
        nonce_bin = binascii.unhexlify(nonce)
                
        # 1. Build coinbase
        coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin)
        coinbase_hash = util.doublesha(coinbase_bin)
        
        # 2. Calculate merkle root
        merkle_root_bin = job.merkletree.withFirst(coinbase_hash)
        merkle_root_int = util.uint256_from_str(merkle_root_bin)
                
        # 3. Serialize header with given merkle, ntime and nonce
        header_bin = job.serialize_header(merkle_root_int, ntime_bin, nonce_bin)
    
        # 4. Reverse header and compare it with target of the user
        hash_bin = util.scrypt(''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]))
        hash_int = util.uint256_from_str(hash_bin)
        block_hash_hex = "%064x" % hash_int
        header_hex = binascii.hexlify(header_bin)
                 
        target_user = self.diff_to_target(difficulty)        
        if hash_int > target_user and \
		( 'prev_jobid' not in session or session['prev_jobid'] < job_id \
		or 'prev_diff' not in session or hash_int > self.diff_to_target(session['prev_diff']) ):
            raise SubmitException("Share is above target")

        # Mostly for debugging purposes
        target_info = self.diff_to_target(100000)
        if hash_int <= target_info:
            log.info("Yay, share with diff above 100000")

	# Algebra tells us the diff_to_target is the same as hash_to_diff
	share_diff = int(self.diff_to_target(hash_int))

        # 5. Compare hash with target of the network        
        if hash_int <= job.target:
            # Yay! It is block candidate! 
            log.info("We found a block candidate! %s" % block_hash_hex)
           
            # 6. Finalize and serialize block object 
            job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin, int(ntime, 16), int(nonce, 16))
            
            if not job.is_valid():
                # Should not happen
                log.error("Final job validation failed!")
                            
            # 7. Submit block to the network
            serialized = binascii.hexlify(job.serialize())
            on_submit = self.bitcoin_rpc.submitblock(serialized)
            
            return (header_hex, block_hash_hex, share_diff, on_submit)
        
        return (header_hex, block_hash_hex, share_diff, None)
예제 #3
0
def checkShare(share):
	shareTime = share['time'] = time()
	
	username = share['username']
	if 'data' in share:
		# getwork/GBT
		checkData(share)
		data = share['data']
		
		if username not in workLog:
			raise RejectedShare('unknown-user')
		MWL = workLog[username]
		
		shareMerkleRoot = data[36:68]
		if 'blkdata' in share:
			pl = share['blkdata']
			(txncount, pl) = varlenDecode(pl)
			cbtxn = bitcoin.txn.Txn(pl)
			othertxndata = cbtxn.disassemble(retExtra=True)
			coinbase = cbtxn.getCoinbase()
			wliPos = coinbase[0] + 2
			wliLen = coinbase[wliPos - 1]
			wli = coinbase[wliPos:wliPos+wliLen]
			mode = 'MC'
			moden = 1
		else:
			wli = shareMerkleRoot
			mode = 'MRD'
			moden = 0
			coinbase = None
	else:
		# Stratum
		MWL = workLog[None]
		wli = share['jobid']
		buildStratumData(share, b'\0' * 32)
		mode = 'MC'
		moden = 1
		othertxndata = b''
	
	if wli not in MWL:
		raise RejectedShare('unknown-work')
	(wld, issueT) = MWL[wli]
	share[mode] = wld
	
	share['issuetime'] = issueT
	
	(workMerkleTree, workCoinbase) = wld[1:3]
	share['merkletree'] = workMerkleTree
	if 'jobid' in share:
		cbtxn = deepcopy(workMerkleTree.data[0])
		coinbase = workCoinbase + share['extranonce1'] + share['extranonce2']
		cbtxn.setCoinbase(coinbase)
		cbtxn.assemble()
		data = buildStratumData(share, workMerkleTree.withFirst(cbtxn))
		shareMerkleRoot = data[36:68]
	
	if data in DupeShareHACK:
		raise RejectedShare('duplicate')
	DupeShareHACK[data] = None
	
	if ScryptCoin:
		blkhash = scrypt(data)
	else:
		blkhash = dblsha(data)
		if blkhash[28:] != b'\0\0\0\0':
			raise RejectedShare('H-not-zero')
	blkhashn = LEhash2int(blkhash)

	global networkTarget
	logfunc = getattr(checkShare.logger, 'info' if blkhashn <= networkTarget else 'debug')
	logfunc('BLKHASH: %64x' % (blkhashn,))
	logfunc(' TARGET: %64x' % (networkTarget,))
	
	# NOTE: this isn't actually needed for MC mode, but we're abusing it for a trivial share check...
	txlist = workMerkleTree.data
	txlist = [deepcopy(txlist[0]),] + txlist[1:]
	cbtxn = txlist[0]
	cbtxn.setCoinbase(coinbase or workCoinbase)
	cbtxn.assemble()
	
	if blkhashn <= networkTarget:
		logfunc("Submitting upstream")
		RBDs.append( deepcopy( (data, txlist, share.get('blkdata', None), workMerkleTree, share, wld) ) )
		if not moden:
			payload = assembleBlock(data, txlist)
		else:
			payload = share['data']
			if len(othertxndata):
				payload += share['blkdata']
			else:
				payload += assembleBlock(data, txlist)[80:]
		logfunc('Real block payload: %s' % (b2a_hex(payload).decode('utf8'),))
		RBPs.append(payload)
		threading.Thread(target=blockSubmissionThread, args=(payload, blkhash, share)).start()
		bcnode.submitBlock(payload)
		if config.DelayLogForUpstream:
			share['upstreamRejectReason'] = PendingUpstream
		else:
			share['upstreamRejectReason'] = None
			share['upstreamResult'] = True
		MM.updateBlock(blkhash)
	
	# Gotwork hack...
	if gotwork and blkhashn <= config.GotWorkTarget:
		try:
			coinbaseMrkl = cbtxn.data
			coinbaseMrkl += blkhash
			steps = workMerkleTree._steps
			coinbaseMrkl += pack('B', len(steps))
			for step in steps:
				coinbaseMrkl += step
			coinbaseMrkl += b"\0\0\0\0"
			info = {}
			info['hash'] = b2a_hex(blkhash).decode('ascii')
			info['header'] = b2a_hex(data).decode('ascii')
			info['coinbaseMrkl'] = b2a_hex(coinbaseMrkl).decode('ascii')
			thr = threading.Thread(target=submitGotwork, args=(info,))
			thr.daemon = True
			thr.start()
		except:
			checkShare.logger.warning('Failed to build gotwork request')
	
	if 'target' in share:
		workTarget = share['target']
	elif len(wld) > 6:
		workTarget = wld[6]
	else:
		workTarget = None
	
	if workTarget is None:
		workTarget = config.ShareTarget
	if blkhashn > workTarget:
		print(int(blkhashn))
		print(workTarget)
		raise RejectedShare('high-hash')
	share['target'] = workTarget
	share['_targethex'] = '%064x' % (workTarget,)
	
	shareTimestamp = unpack('<L', data[68:72])[0]
	if shareTime < issueT - 120:
		raise RejectedShare('stale-work')
	if shareTimestamp < shareTime - 300:
		raise RejectedShare('time-too-old')
	if shareTimestamp > shareTime + 7200:
		raise RejectedShare('time-too-new')
	
	if config.DynamicTargetting and username in userStatus:
		# NOTE: userStatus[username] only doesn't exist across restarts
		status = userStatus[username]
		target = status[0] or config.ShareTarget
		if target == workTarget:
			userStatus[username][2] += 1
		else:
			userStatus[username][2] += float(target) / workTarget
	
	if moden:
		cbpre = workCoinbase
		cbpreLen = len(cbpre)
		if coinbase[:cbpreLen] != cbpre:
			raise RejectedShare('bad-cb-prefix')
		
		# Filter out known "I support" flags, to prevent exploits
		for ff in (b'/P2SH/', b'NOP2SH', b'p2sh/CHV', b'p2sh/NOCHV'):
			if coinbase.find(ff) > max(-1, cbpreLen - len(ff)):
				raise RejectedShare('bad-cb-flag')
		
		if len(coinbase) > 100:
			raise RejectedShare('bad-cb-length')
		
		if shareMerkleRoot != workMerkleTree.withFirst(cbtxn):
			raise RejectedShare('bad-txnmrklroot')
		
		if len(othertxndata):
			allowed = assembleBlock(data, txlist)[80:]
			if allowed != share['blkdata']:
				raise RejectedShare('bad-txns')
    def submit_share(self, job_id, worker_name, session, extranonce1_bin,
                     extranonce2, ntime, nonce, difficulty):
        '''Check parameters and finalize block template. If it leads
           to valid block candidate, asynchronously submits the block
           back to the bitcoin network.
        
            - extranonce1_bin is binary. No checks performed, it should be from session data
            - job_id, extranonce2, ntime, nonce - in hex form sent by the client
            - difficulty - decimal number from session, again no checks performed
            - submitblock_callback - reference to method which receive result of submitblock()
        '''

        # Check if extranonce2 looks correctly. extranonce2 is in hex form...
        if len(extranonce2) != self.extranonce2_size * 2:
            raise SubmitException(
                "Incorrect size of extranonce2. Expected %d chars" %
                (self.extranonce2_size * 2))

        # Check for job
        job = self.get_job(job_id)
        if job == None:
            raise SubmitException("Job '%s' not found" % job_id)

        # Check if ntime looks correct
        if len(ntime) != 8:
            raise SubmitException("Incorrect size of ntime. Expected 8 chars")

        if not job.check_ntime(int(ntime, 16)):
            raise SubmitException("Ntime out of range")

        # Check nonce
        if len(nonce) != 8:
            raise SubmitException("Incorrect size of nonce. Expected 8 chars")

        # Check for duplicated submit
        if not job.register_submit(extranonce1_bin, extranonce2, ntime, nonce):
            log.info("Duplicate from %s, (%s %s %s %s)" % \
                    (worker_name, binascii.hexlify(extranonce1_bin), extranonce2, ntime, nonce))
            raise SubmitException("Duplicate share")

        # Now let's do the hard work!
        # ---------------------------

        # 0. Some sugar
        extranonce2_bin = binascii.unhexlify(extranonce2)
        ntime_bin = binascii.unhexlify(ntime)
        nonce_bin = binascii.unhexlify(nonce)

        # 1. Build coinbase
        coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin)
        coinbase_hash = util.doublesha(coinbase_bin)

        # 2. Calculate merkle root
        merkle_root_bin = job.merkletree.withFirst(coinbase_hash)
        merkle_root_int = util.uint256_from_str(merkle_root_bin)

        # 3. Serialize header with given merkle, ntime and nonce
        header_bin = job.serialize_header(merkle_root_int, ntime_bin,
                                          nonce_bin)

        # 4. Reverse header and compare it with target of the user
        hash_bin = util.scrypt(''.join(
            [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)]))
        hash_int = util.uint256_from_str(hash_bin)
        block_hash_hex = "%064x" % hash_int
        header_hex = binascii.hexlify(header_bin)

        target_user = self.diff_to_target(difficulty)
        if hash_int > target_user and \
  ( 'prev_jobid' not in session or session['prev_jobid'] < job_id \
  or 'prev_diff' not in session or hash_int > self.diff_to_target(session['prev_diff']) ):
            raise SubmitException("Share is above target")

        # Mostly for debugging purposes
        target_info = self.diff_to_target(100000)
        if hash_int <= target_info:
            log.info("Yay, share with diff above 100000")

# Algebra tells us the diff_to_target is the same as hash_to_diff
        share_diff = int(self.diff_to_target(hash_int))

        # 5. Compare hash with target of the network
        if hash_int <= job.target:
            # Yay! It is block candidate!
            log.info("We found a block candidate! %s" % block_hash_hex)

            # 6. Finalize and serialize block object
            job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin,
                         int(ntime, 16), int(nonce, 16))

            if not job.is_valid():
                # Should not happen
                log.error("Final job validation failed!")

            # 7. Submit block to the network
            serialized = binascii.hexlify(job.serialize())
            on_submit = self.bitcoin_rpc.submitblock(serialized)

            return (header_hex, block_hash_hex, share_diff, on_submit)

        return (header_hex, block_hash_hex, share_diff, None)
예제 #5
0
파일: mixup.py 프로젝트: flywalk/arcoin-abe
def mixup_blocks(store, ds, count, datadir_chain_id=None, seed=None):
    bytes_done = 0
    offsets = []

    for i in xrange(count):
        if ds.read_cursor + 8 <= len(ds.input):
            offsets.append(ds.read_cursor)
            magic = ds.read_bytes(4)
            length = ds.read_int32()
            ds.read_cursor += length
            if ds.read_cursor <= len(ds.input):
                continue
        raise IOError("End of input after %d blocks" % i)

    if seed > 1 and seed <= count:
        for i in xrange(0, seed * int(count / seed), seed):
            offsets[i:i + seed] = offsets[i:i + seed][::-1]
    elif seed == -3:
        for i in xrange(0, 3 * int(count / 3), 3):
            offsets[i:i + 3] = offsets[i + 1:i + 3] + [offsets[i]]
        print offsets
    elif seed:
        offsets = offsets[::-1]  # XXX want random

    for offset in offsets:
        ds.read_cursor = offset
        magic = ds.read_bytes(4)
        length = ds.read_int32()

        # Assume blocks obey the respective policy if they get here.
        chain_id = datadir_chain_id
        if chain_id is None:
            rows = store.selectall(
                """
                SELECT chain.chain_id
                  FROM chain
                  JOIN magic ON (chain.magic_id = magic.magic_id)
                 WHERE magic.magic = ?""", (store.binin(magic), ))
            if len(rows) == 1:
                chain_id = rows[0][0]
        if chain_id is None:
            ds.read_cursor = offset
            raise ValueError(
                "Chain not found for magic number %s in block file at"
                " offset %d.", repr(magic), offset)
            break

        # XXX pasted out of DataStore.import_blkdat
        end = ds.read_cursor + length

        hash = util.scrypt(ds.input[ds.read_cursor:ds.read_cursor + 80])
        # XXX should decode target and check hash against it to
        # avoid loading garbage data.  But not for merged-mined or
        # CPU-mined chains that use different proof-of-work
        # algorithms.  Time to resurrect policy_id?

        block_row = store.selectrow(
            """
            SELECT block_id, block_height, block_chain_work,
                   block_nTime, block_total_seconds,
                   block_total_satoshis, block_satoshi_seconds
              FROM block
             WHERE block_hash = ?
        """, (store.hashin(hash), ))

        if block_row:
            # Block header already seen.  Don't import the block,
            # but try to add it to the chain.
            if chain_id is not None:
                b = {
                    "block_id": block_row[0],
                    "height": block_row[1],
                    "chain_work": store.binout_int(block_row[2]),
                    "nTime": block_row[3],
                    "seconds": block_row[4],
                    "satoshis": block_row[5],
                    "ss": block_row[6]
                }
                if store.selectrow(
                        """
                    SELECT 1
                      FROM chain_candidate
                     WHERE block_id = ?
                       AND chain_id = ?""", (b['block_id'], chain_id)):
                    store.log.info("block %d already in chain %d",
                                   b['block_id'], chain_id)
                    b = None
                else:
                    if b['height'] == 0:
                        b['hashPrev'] = GENESIS_HASH_PREV
                    else:
                        b['hashPrev'] = 'dummy'  # Fool adopt_orphans.
                    store.offer_block_to_chains(b, frozenset([chain_id]))
        else:
            b = store.parse_block(ds, chain_id, magic, length)
            b["hash"] = hash
            chain_ids = frozenset([] if chain_id is None else [chain_id])
            store.import_block(b, chain_ids=chain_ids)
            if ds.read_cursor != end:
                store.log.debug("Skipped %d bytes at block end",
                                end - ds.read_cursor)

        bytes_done += length
        if bytes_done >= store.commit_bytes:
            store.log.debug("commit")
            store.commit()
            bytes_done = 0

    if bytes_done > 0:
        store.commit()
예제 #6
0
    def block_header_hash(chain, header):

        return util.scrypt(header)