def __init__(self, blk_hash, raw_hex): self.hash = blk_hash pos = 0 n_version, i = _read_varint(raw_hex[pos:]) pos += i self.height, i = _read_varint(raw_hex[pos:]) pos += i self.status, i = _read_varint(raw_hex[pos:]) pos += i self.n_tx, i = _read_varint(raw_hex[pos:]) pos += i if self.status & (BLOCK_HAVE_DATA | BLOCK_HAVE_UNDO): self.file, i = _read_varint(raw_hex[pos:]) pos += i else: self.file = -1 if self.status & BLOCK_HAVE_DATA: self.data_pos, i = _read_varint(raw_hex[pos:]) pos += i else: self.data_pos = -1 if self.status & BLOCK_HAVE_UNDO: self.undo_pos, i = _read_varint(raw_hex[pos:]) pos += i assert (pos + 80 == len(raw_hex)) self.version, p, m, time, bits, self.nonce = unpack( "<I32s32sIII", raw_hex[-80:]) self.prev_hash = format_hash(p) self.merkle_root = format_hash(m)
def parse_from_hex(self, raw_hex): assert (len(raw_hex) == 80) self._version = decode_uint32(raw_hex[:4]) self._previous_block_hash = format_hash(raw_hex[4:36]) self._merkle_root = format_hash(raw_hex[36:68]) self._timestamp = decode_uint32(raw_hex[68:72]) self._bits = decode_uint32(raw_hex[72:76]) self._nonce = decode_uint32(raw_hex[76:80]) self._difficulty = self.calc_difficulty(self._bits) self.hash = format_hash(double_sha256(raw_hex)) print self.__repr__() return self
def parse_from_hex(self, raw_hex): assert (len(raw_hex) == 80) self._version = decode_uint32(raw_hex[:4]) self._previous_block_hash = format_hash(raw_hex[4:36]) self._merkle_root = format_hash(raw_hex[36:68]) self._timestamp = decode_uint32(raw_hex[68:72]) # time_local = decode_uint32(raw_hex[68:72]) # time_local = time.localtime(time_local) # self._timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time_local) self._bits = decode_uint32(raw_hex[72:76]) self._nonce = decode_uint32(raw_hex[76:80]) self._difficulty = self.calc_difficulty(self._bits) self.hash = format_hash(double_sha256(raw_hex)) # print(self.__repr__()) return self
async def stats(): """ .stats - Returns all network stats """ data = daemon.getlastblockheader() hashrate = format_hash(float(data["block_header"]["difficulty"]) / 120) data = daemon.getlastblockheader() height = int(data["block_header"]["height"]) deposits = int(data["block_header"]["deposits"]) / 1000000 supply = get_supply() data = daemon.getlastblockheader() difficulty = float(data["block_header"]["difficulty"]) stats_embed = discord.Embed(title="Conceal", url="https://github.com/TheCircleFoundation/", description="Complete Network Stats", color=0x7F7FFF) stats_embed.set_thumbnail(url=config['logo_url']) hashFromPools = 0 allPools = session2.query(pool).all() totalPools = len(allPools) for poolNumber in range(0, totalPools): poolHash = allPools[poolNumber].hashrate hashFromPools = hashFromPools + poolHash stats_embed.add_field(name="Hashrate (from Pools)", value="{}KH/s".format(hashFromPools / 1000)) stats_embed.add_field(name="Hashrate (from Difficulty)", value="{}/s".format(hashrate)) stats_embed.add_field(name="Height", value="{:,}".format(height)) stats_embed.add_field(name="Difficulty", value="{0:,.0f}".format(difficulty)) stats_embed.add_field(name="Circulating Supply", value="{:0,.2f} CCX".format(supply)) stats_embed.add_field(name="Deposits", value="{:0,.2f}".format(deposits)) stats_embed.set_footer( text="Powered by the Conceal Discord bot. Message @katz for any issues." ) await client.say(embed=stats_embed)
def hash(self): """Returns the transaction's id. Equivalent to the hash for non SegWit transactions, it differs from it for SegWit ones. """ if self._hash is None: self._hash = format_hash(double_sha256(self.hex)) return self._hash
def submit(self, header, worker_name): # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]) hash_bin = utils.doublesha(rev) block_hash = ''.join([ hash_bin[i*4:i*4+4][::-1] for i in range(0, 8) ]) #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17*8 # 17th integer in datastring noncepos = 19*8 # 19th integer in datastring ntime = header[ntimepos:ntimepos+8] nonce = header[noncepos:noncepos+8] # 5. Submit share to the pool return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def parse_from_hex(self, raw_hex): offset = 4 self._version = decode_uint32(raw_hex[:4]) input_cnt, varint_size = decode_varint(raw_hex[offset:]) self.input_cnt = input_cnt offset += varint_size self._inputs = [] for i in range(input_cnt): tx_input = TransactionInput().parse_from_hex(raw_hex[offset:]) print tx_input.__repr__() offset += tx_input.size self._inputs.append(input) output_cnt, varint_size = decode_varint(raw_hex[offset:]) offset += varint_size self.output_cnt = output_cnt self._outputs = [] for i in range(output_cnt): tx_output = TransactionOutput().parse_from_hex(raw_hex[offset:]) print tx_output.__repr__() offset += tx_output.size self._outputs.append(tx_output) self.size = offset + 4 self.hash = format_hash(double_sha256(raw_hex[:self.size])) return self
def submit(self, header, worker_name): # Drop unused padding header = header[:80] # 1. Check if blockheader meets requested difficulty hash_bin = utils.gen_hash(header) block_hash = hash_bin log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) #if utils.uint256_from_str(hash_bin) > self.target: # log.debug("Share does not meet expected target") # return True # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 5*8 # 4th integer in datastring noncepos = 4*8 # 5th integer in datastring ntime = binascii.hexlify(header[ntimepos:ntimepos+8]) nonce = binascii.hexlify(header[noncepos:noncepos+8]) # 5. Submit share to the pool return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def submit(self, newjob, nonce, worker_name): #job_id = newjob['library_number'] #merkle = newjob['solutions'] ntime = newjob['timestamp'] merkle = ''.join('{:02x}'.format(x) for x in newjob['merkle tree root']) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_merkle(merkle) except KeyError: log.info("Job not found from merkle") return False ### GET HEADER header = job.serialize_header(merkle, ntime, nonce) #print(header[:160]) ### BELOW IS LIKE SUBMIT(header, worker_name) # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]) hash_bin = utils.doublesha(rev) block_hash = ''.join([ hash_bin[i*4:i*4+4][::-1] for i in range(0, 8) ]) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True else: #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found from header") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17*8 # 17th integer in datastring noncepos = 19*8 # 19th integer in datastring ntime = header[ntimepos:ntimepos+8] nonce = header[noncepos:noncepos+8] # 5. Submit share to the pool #print(worker_name, job.job_id, extranonce2_hex, ntime, nonce) return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def parse_from_hex(self, raw_hex): script_length, varint_length = decode_varint(raw_hex[36:]) script_start = 36 + varint_length self.size = script_start + script_length + 4 real_hex = raw_hex[:self.size] self._previous_transaction_hash = format_hash(real_hex[:32]) self._previous_transaction_index = decode_uint32(real_hex[32:36]) self._script = Script(real_hex[script_start:(script_start + script_length)]) self._sequence_number = decode_uint32(real_hex[(self.size - 4):self.size]) return self
def hash(self): """Returns the transaction's hash""" if self._hash is None: # segwit transactions have two transaction ids/hashes, txid and wtxid # txid is a hash of all of the legacy transaction fields only if self.is_segwit: txid = self.hex[:4] + self.hex[ 6:self._offset_before_tx_witnesses] + self.hex[-4:] else: txid = self.hex self._hash = format_hash(double_sha256(txid)) return self._hash
def txid(self): """Returns the transaction's id. Equivalent to the hash for non SegWit transactions, it differs from it for SegWit ones. """ if self._txid is None: # segwit transactions have two transaction ids/hashes, txid and wtxid # txid is a hash of all of the legacy transaction fields only if self.is_segwit: txid_data = self.hex[:4] + self.hex[ 6:self._offset_before_tx_witnesses] + self.hex[-4:] else: txid_data = self.hex self._txid = format_hash(double_sha256(txid_data)) return self._txid
def __getBlockIndexes(self, index): """There is no method of leveldb to close the db (and release the lock). This creates problem during concurrent operations. This function also provides caching of indexes. """ if self.indexPath != index: db = plyvel.DB(index, compression=None) self.blockIndexes = [ DBBlockIndex(format_hash(k[1:]), v) for k, v in db.iterator() if k[0] == ord('b') ] db.close() self.blockIndexes.sort(key=lambda x: x.height) self.indexPath = index return self.blockIndexes
def submit(self, header, worker_name): # Drop unused padding header = header[:80] # 1. Check if blockheader meets requested difficulty hash_bin = utils.gen_hash(header) block_hash = hash_bin log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) #if utils.uint256_from_str(hash_bin) > self.target: # log.debug("Share does not meet expected target") # return True # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify( self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 5 * 8 # 4th integer in datastring noncepos = 4 * 8 # 5th integer in datastring ntime = binascii.hexlify(header[ntimepos:ntimepos + 8]) nonce = binascii.hexlify(header[noncepos:noncepos + 8]) # 5. Submit share to the pool return self.f.rpc( 'mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def hash(self): """Returns the block's hash (double sha256 of its 80 bytes header""" if self._hash is None: self._hash = format_hash(double_sha256(self.hex[:80])) return self._hash
def transaction_hash(self): """Returns the hash of the transaction containing the output redeemed by this input""" if self._transaction_hash is None: self._transaction_hash = format_hash(self.hex[:32]) return self._transaction_hash
def handle_event(self, method, params, connection_ref): '''Handle RPC calls and notifications from the pool''' # Yay, we received something from the pool, # let's restart the timeout. self.reset_timeout() if method == 'mining.notify': '''Proxy just received information about new mining job''' (job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) = params[:9] #print len(str(params)), len(merkle_branch) ''' log.debug("Received new job #%s" % job_id) log.debug("prevhash = %s" % prevhash) log.debug("version = %s" % version) log.debug("nbits = %s" % nbits) log.debug("ntime = %s" % ntime) log.debug("clean_jobs = %s" % clean_jobs) log.debug("coinb1 = %s" % coinb1) log.debug("coinb2 = %s" % coinb2) log.debug("merkle_branch = %s" % merkle_branch) ''' # Broadcast to Stratum clients stratum_listener.MiningSubscription.on_template( job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) # Broadcast to getwork clients job = Job.build_from_broadcast(job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime) log.info("New job %s for prevhash %s, clean_jobs=%s" % \ (job.job_id, utils.format_hash(job.prevhash), clean_jobs)) self.job_registry.add_template(job, clean_jobs) elif method == 'mining.set_difficulty': difficulty = params[0] log.info("Setting new difficulty: %s" % difficulty) stratum_listener.DifficultySubscription.on_new_difficulty(difficulty) self.job_registry.set_difficulty(difficulty) elif method == 'client.reconnect': (hostname, port, wait) = params[:3] new = list(self.job_registry.f.main_host[::]) if hostname: new[0] = hostname if port: new[1] = port log.info("Server asked us to reconnect to %s:%d" % tuple(new)) self.job_registry.f.reconnect(new[0], new[1], wait) elif method == 'client.add_peers': '''New peers which can be used on connection failure''' return False ''' peerlist = params[0] # TODO for peer in peerlist: self.job_registry.f.add_peer(peer) return True ''' elif method == 'client.get_version': return "stratum-proxy/%s" % _version.VERSION elif method == 'client.show_message': # Displays message from the server to the terminal utils.show_message(params[0]) return True elif method == 'mining.get_hashrate': return {} # TODO elif method == 'mining.get_temperature': return {} # TODO else: '''Pool just asked us for something which we don't support...''' log.error("Unhandled method %s with params %s" % (method, params))
async def hashrate(): """ Returns network hashrate """ data = daemon.getlastblockheader() hashrate = format_hash(float(data["block_header"]["difficulty"]) / 30) await client.say("The current global hashrate is **{}/s**".format(hashrate) )
def handle_event(self, method, params, connection_ref): '''Handle RPC calls and notifications from the pool''' # Yay, we received something from the pool, # let's restart the timeout. self.reset_timeout() if method == 'mining.notify': '''Proxy just received information about new mining job''' (job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) = params[:9] #print len(str(params)), len(merkle_branch) ''' log.debug("Received new job #%s" % job_id) log.debug("prevhash = %s" % prevhash) log.debug("version = %s" % version) log.debug("nbits = %s" % nbits) log.debug("ntime = %s" % ntime) log.debug("clean_jobs = %s" % clean_jobs) log.debug("coinb1 = %s" % coinb1) log.debug("coinb2 = %s" % coinb2) log.debug("merkle_branch = %s" % merkle_branch) ''' # Broadcast to Stratum clients stratum_listener.MiningSubscription.on_template( job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) # Broadcast to getwork clients job = Job.build_from_broadcast(job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime) log.info("New job %s for prevhash %s, clean_jobs=%s" % \ (job.job_id, utils.format_hash(job.prevhash), clean_jobs)) self.job_registry.add_template(job, clean_jobs) elif method == 'mining.set_difficulty': difficulty = params[0] log.info("Setting new difficulty: %s" % difficulty) stratum_listener.DifficultySubscription.on_new_difficulty( difficulty) self.job_registry.set_difficulty(difficulty) elif method == 'client.reconnect': (hostname, port, wait) = params[:3] new = list(self.job_registry.f.main_host[::]) if hostname: new[0] = hostname if port: new[1] = port log.info("Server asked us to reconnect to %s:%d" % tuple(new)) self.job_registry.f.reconnect(new[0], new[1], wait) elif method == 'client.add_peers': '''New peers which can be used on connection failure''' return False ''' peerlist = params[0] # TODO for peer in peerlist: self.job_registry.f.add_peer(peer) return True ''' elif method == 'client.get_version': return "stratum-proxy/%s" % _version.VERSION elif method == 'client.show_message': # Displays message from the server to the terminal utils.show_message(params[0]) return True elif method == 'mining.get_hashrate': return {} # TODO elif method == 'mining.get_temperature': return {} # TODO else: '''Pool just asked us for something which we don't support...''' log.error("Unhandled method %s with params %s" % (method, params))
def submit(self, newjob, nonce, worker_name): #job_id = newjob['library_number'] #merkle = newjob['solutions'] ntime = newjob['timestamp'] merkle = ''.join('{:02x}'.format(x) for x in newjob['merkle tree root']) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_merkle(merkle) except KeyError: log.info("Job not found from merkle") return False ### GET HEADER header = job.serialize_header(merkle, ntime, nonce) #print(header[:160]) ### BELOW IS LIKE SUBMIT(header, worker_name) # Drop unused padding header = header[:160] # 1. Check if blockheader meets requested difficulty header_bin = binascii.unhexlify(header[:160]) rev = ''.join( [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)]) hash_bin = utils.doublesha(rev) block_hash = ''.join( [hash_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 8)]) if utils.uint256_from_str(hash_bin) > self.target: log.debug("Share is below expected target") return True else: #log.info('!!! %s' % header[:160]) log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) # 2. Lookup for job and extranonce used for creating given block header try: (job, extranonce2) = self.get_job_from_header(header) except KeyError: log.info("Job not found from header") return False # 3. Format extranonce2 to hex string extranonce2_hex = binascii.hexlify( self.extranonce2_padding(extranonce2)) # 4. Parse ntime and nonce from header ntimepos = 17 * 8 # 17th integer in datastring noncepos = 19 * 8 # 19th integer in datastring ntime = header[ntimepos:ntimepos + 8] nonce = header[noncepos:noncepos + 8] # 5. Submit share to the pool #print(worker_name, job.job_id, extranonce2_hex, ntime, nonce) return self.f.rpc( 'mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce])
def handle_event(self, method, params, connection_ref): """Handle RPC calls and notifications from the pool""" log.warn("@@ %s @@" % method) # Yay, we received something from the pool, # let's restart the timeout. self.reset_timeout() if method == "mining.notify": """Proxy just received information about new mining job""" (job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) = params[:9] # print len(str(params)), len(merkle_branch) if clean_jobs: log.warn("--Clean Jobs!--") rpc_tx = {"method": "clean_job", "params": [], "id": "cj"} for c in ws_svr: print "ws_svr" print c c.sendMessage(json.dumps(rpc_tx)) """ log.debug("Received new job #%s" % job_id) log.debug("prevhash = %s" % prevhash) log.debug("version = %s" % version) log.debug("nbits = %s" % nbits) log.debug("ntime = %s" % ntime) log.debug("clean_jobs = %s" % clean_jobs) log.debug("coinb1 = %s" % coinb1) log.debug("coinb2 = %s" % coinb2) log.debug("merkle_branch = %s" % merkle_branch) """ # Broadcast to Stratum clients stratum_listener.MiningSubscription.on_template( job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs ) # Broadcast to getwork clients job = Job.build_from_broadcast(job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime) log.info( "New job %s for prevhash %s, clean_jobs=%s" % (job.job_id, utils.format_hash(job.prevhash), clean_jobs) ) self.job_registry.add_template(job, clean_jobs) elif method == "mining.set_difficulty": difficulty = params[0] log.info("Setting new difficulty: %s" % difficulty) stratum_listener.DifficultySubscription.on_new_difficulty(difficulty) self.job_registry.set_difficulty(difficulty) elif method == "client.reconnect": (hostname, port, wait) = params[:3] new = list(self.job_registry.f.main_host[::]) if hostname: new[0] = hostname if port: new[1] = port log.info("Server asked us to reconnect to %s:%d" % tuple(new)) self.job_registry.f.reconnect(new[0], new[1], wait) elif method == "client.add_peers": """New peers which can be used on connection failure""" return False """ peerlist = params[0] # TODO for peer in peerlist: self.job_registry.f.add_peer(peer) return True """ elif method == "client.get_version": return "stratum-proxy/%s" % _version.VERSION elif method == "client.show_message": # Displays message from the server to the terminal utils.show_message(params[0]) return True elif method == "mining.get_hashrate": return {} # TODO elif method == "mining.get_temperature": return {} # TODO else: """Pool just asked us for something which we don't support...""" log.error("Unhandled method %s with params %s" % (method, params))
def merkle_root(self): """Returns the block's merkle root""" if self._merkle_root is None: self._merkle_root = format_hash(self.hex[36:68]) return self._merkle_root
def previous_block_hash(self): """Return the hash of the previous block""" if self._previous_block_hash is None: self._previous_block_hash = format_hash(self.hex[4:36]) return self._previous_block_hash