def transacted(self, txDict): #validate possible_block TX = Transaction.parseTransaction(txDict) Node.logger.info("recieve transaction {}".format(TX.hash)) if TX.isValid(): utxoSet = copy.deepcopy(self.isolateUTXO.utxoSet) #log.critical("1",utxoSet) if self.isolateUTXO.updateWithTX(TX, utxoSet): self.isolateUTXO.utxoSet = utxoSet #save to file to transaction pool self.database["transaction"].update( {"hash": TX.hash}, {"$set": utils.obj2dict(TX, sort_keys=True)}, upsert=True) #handle isolatePool isolatePool = copy.copy(self.isolatePool) for isolateTX in isolatePool: if self.isolateUTXO.updateWithTX(isolateTX, utxoSet): self.isolatePool.remove(isolateTX) #save to file to transaction pool self.database["transaction"].update( {"hash": isolateTX.hash}, { "$set": utils.obj2dict(isolateTX, sort_keys=True) }, upsert=True) else: utxoSet = copy.deepcopy(self.isolateUTXO.utxoSet) else: self.isolatePool.append(TX) return True else: #ditch it utils.warning("transaction is not valid,hash is:", TX.hash) return False
def getRemoteBlocks(peer,index): result=node.httpProcess("http://"+peer+"/blockchain/index/"+str(index)) print(result) blocksDict=result["response"].json() if blocksDict: block = Block(blocksDict[0]) if block.isValid(): #save to file to possible folder node.database["blockpool"].update({"hash":block.hash},{"$set":utils.obj2dict(block,sort_keys=True)},upsert=True) return jsonify(utils.obj2dict(block)) else: return "no index {} from peer {}".format(index,peer)
def mine(self, coinbase): Node.logger.info("is Mining...") #sync transaction from txPool txPool = self.txPoolSync() txPoolDict = [] txPoolDict.append(coinbase) for item in txPool: txPoolDict.append(utils.obj2dict(item, sort_keys=True)) #currentChain = self.syncLocalChain() #gather last node #prevBlock = currentChain.lastblock() prevBlock = self.blockchain.lastblock() #mine a block with a valid nonce index = int(prevBlock.index) + 1 timestamp = date.datetime.now().strftime('%s') data = txPoolDict prev_hash = prevBlock.hash nonce = 0 blockDict = utils.args2dict(CONVERSIONS=BLOCK_VAR_CONVERSIONS, index=index, timestamp=timestamp, data=data, prev_hash=prev_hash, nonce=nonce) Node.logger.info("begin mine...{}".format(index)) newBlock = self.findNonce(Block(blockDict)) if newBlock == None: Node.logger.warn("other miner mined") return "other miner mined" Node.logger.info("end mine {}-{}.".format(index, newBlock.nonce)) #remove transaction from txPool self.txPoolRemove(newBlock) blockDict = utils.obj2dict(newBlock) #push to blockPool self.mined(blockDict) Node.logger.info("broadcast block {}-{}".format( newBlock.index, newBlock.nonce)) self.broadcast(blockDict, type="newBlock") Node.logger.info("mine广播完成") #以下由blockPoolSync处理 #newBlock.save() #self.blockchain.add_block(newBlock) #self.updateUTXO(newBlock) return newBlock
def preprocess_cameras(self, cameras): ret = [] for camera in cameras: image = camera.image_data.reshape(camera.capture_height, camera.capture_width, 3) depth = camera.depth_data.reshape(camera.capture_height, camera.capture_width) start_preprocess = time.time() if self.preprocess_with_tensorflow: import tf_utils # avoid hard requirement on tensorflow if self.sess is None: raise Exception( 'No tensorflow session. Did you call set_tf_session?') # This runs ~2x slower (18ms on a gtx 980) than CPU when we are not running a model due to # transfer overhead, but we do it anyway to keep training and testing as similar as possible. image = tf_utils.preprocess_image(image, self.sess) depth = tf_utils.preprocess_depth(depth, self.sess) else: image = utils.preprocess_image(image) depth = utils.preprocess_depth(depth) end_preprocess = time.time() log.debug('preprocess took %rms', (end_preprocess - start_preprocess) * 1000.) camera_out = obj2dict(camera, exclude=['image', 'depth']) camera_out['image'] = image if self.pyglet_render: # Keep copy of image without mean subtraction etc that agent does camera_out['image_raw'] = image camera_out['depth'] = depth ret.append(camera_out) return ret
def saveToPool(self): index = self.index nonce = self.nonce Block.logger.warn("save block {}-{} to pool".format(index, nonce)) Block.database["blockpool"].update( {"hash": self.hash}, {"$set": utils.obj2dict(self, sort_keys=True)}, upsert=True)
def main(): episode_return = EpisodeReturn() from utils import obj2dict now = time.time() ser = obj2dict(episode_return) took = time.time() - now print('took %f s' % took) print(ser)
def to_xml(self): extra = dict(id=self.key().id(), cityId=self.cityRef.key().id(), creationDateTime=unicode(self.updateDateTime.isoformat()+ "Z"), updateDateTime=unicode(self.updateDateTime.isoformat() + "Z"), locationsUpdated=unicode(self.locationsUpdated.isoformat() + "Z")) provider = obj2dict(self, self.properties(), exclude=['cityRef'], extra=extra) provider = dict_to_xml(provider, "provider") return provider
def to_xml(self): extra = dict(id=self.key().id(), latitude=self.geoloc.lat, longitude=self.geoloc.lon, creationDateTime=unicode(self.updateDateTime.isoformat()+ "Z"), updateDateTime=unicode(self.updateDateTime.isoformat() + "Z")) city = obj2dict(self, self.properties(), exclude=['geoloc'], extra= extra) city = dict_to_xml(city, "city") return city
def to_xml(self): extra = dict(id=self.key().id(), stationRef=self.stationRef.key().id(), cityId=self.city.key().id(), stationId=self.stationRef.key().id(), creationDateTime=unicode(self.updateDateTime.isoformat()+ "Z"), updateDateTime=unicode(self.updateDateTime.isoformat() + "Z")) station = obj2dict(self, self.properties(), exclude=['stationRef', 'geoloc'], extra=extra) station = dict_to_xml(station, "stationStatus") #stationStatuses return station
def serialize(self): defaults = utils.obj2dict(EpisodeReturn) prop_names = defaults.keys() ret = {} for k in prop_names: v = getattr(self, k, defaults[k]) if k in ['start_time', 'end_time']: v = str(arrow.get(v).to('local')) ret[k] = v return ret
def to_xml(self): extra = dict(id=self.key().id(), latitude=self.geoloc.lat, longitude=self.geoloc.lon, providerId=self.providerRef.key().id(), cityId=self.providerRef.cityRef.key().id(), creationDateTime=unicode(self.updateDateTime.isoformat()+ "Z"), updateDateTime=unicode(self.updateDateTime.isoformat() + "Z")) station = obj2dict(self, self.properties(), exclude=['providerRef', 'geoloc'], extra=extra) station = dict_to_xml(station, "station") return station
def get(self): page = self.get_argument('page', 1) rows = self.get_argument('rows', 20) start = (int(page) - 1) * int(rows) limit = rows rows = NewsCategory.gets(start, limit) rows = [obj2dict(r) for r in rows] total = NewsCategory.get_count() response = {'total': total, 'rows': rows} return self.write(date_encode(response))
def preprocess_observation(self, observation): if observation: ret = obj2dict(observation, exclude=['cameras']) if observation.camera_count > 0 and getattr(observation, 'cameras', None) is not None: cameras = observation.cameras ret['cameras'] = self.preprocess_cameras(cameras) else: ret['cameras'] = [] else: ret = None return ret
def mined(self, blockDict): #validate possible_block block = Block(blockDict) Node.logger.info("recieve block index {}-{}".format( block.index, block.nonce)) if block.isValid(): #save to file to possible folder self.database["blockpool"].update( {"hash": block.hash}, {"$set": utils.obj2dict(block, sort_keys=True)}, upsert=True) self.otherMined = True return True else: return False
def get(self): page = self.get_argument('page', 1) rows = self.get_argument('rows', 20) sort = self.get_argument('sort', '') order = self.get_argument('order', 'ASC') start = (int(page) - 1) * int(rows) limit = rows role_list = AdminRole.gets(start, limit) total = AdminRole.get_count() response = { 'total': total, 'rows': [obj2dict(role) for role in role_list] } return self.write(json_encode(response))
def trade(self, inPrvkey, inPubkey, outPubkey, amount, script=""): newTX = Transaction.newTransaction(inPrvkey, inPubkey, outPubkey, amount, self.tradeUTXO, script) newTXdict = None if type(newTX) == dict: errObj = newTX Node.logger.critical(errObj["errCode"], errObj["errText"]) return errObj else: newTXdict = utils.obj2dict(newTX) self.transacted(newTXdict) # use socket to broadcast instead of http Node.logger.info("broadcast transaction {}".format(newTX.hash)) self.broadcast(newTXdict, type="newTX") Node.logger.info("transaction广播完成") return newTXdict
def minerProcess(): while True: if args.debug and len(threading.enumerate())!=4: #debug调试时使用 continue node.eMining.wait() node.eBlockSyncing.wait() node.eMining.clear() #log.info("minerProcess start.") try: txPoolCount = node.database["transaction"].count() if txPoolCount >= TRANSACTION_TO_BLOCK: t1=Transaction.newCoinbase(mywallet.address) coinbase=utils.obj2dict(t1) #mine newBlock=node.mine(coinbase) except Exception as e: log.critical(traceback.format_exc()) #log.info("minerProcess stop.") node.eMining.set()
def get(self): page = self.get_argument('page', 1) rows = self.get_argument('rows', 20) title = self.get_argument('title', '') begin = self.get_argument('begin', '') end = self.get_argument('end', '') query = {} if title: query['title'] = title if begin: query['begin'] = begin if end: query['end'] = end offset = (int(page) - 1) * int(rows) limit = rows rows = News.gets(offset, limit, **query) rows = [obj2dict(r) for r in rows] total = News.get_count() response = {'total': total, 'rows': rows} return self.write(date_encode(response))
def gets(cls, start=0, limit=20): rs = db_session.query(Admin.user_id, Admin.realname, Admin.email, Admin.username, Admin.last_login_ip, Admin.last_login_time, Admin.login_times, Admin.status).offset(start).limit(limit) return [obj2dict(r) for r in rs.all()]
def getRangeBlocks(fromIndex,toIndex): blocks = node.blockchain.findRangeBlocks(fromIndex,toIndex) return jsonify(utils.obj2dict(blocks)),200
def mine(): t1=Transaction.newCoinbase(mywallet.address) coinbase=utils.obj2dict(t1) #mine newBlock=node.mine(coinbase) return jsonify(utils.obj2dict(newBlock,indent=2)),200
def lastblock(): newBlock=node.blockchain.lastblock() return jsonify(utils.obj2dict(newBlock)),200
def utxoGetTrade(): utxoSet = node.tradeUTXO.utxoSet utxoSummary = node.tradeUTXO.getSummary() return jsonify({"summary":utxoSummary,"utxoSet":utils.obj2dict(utxoSet,sort_keys=False)})
def utxoReindex(): utxoSet = node.resetUTXO() return jsonify(utils.obj2dict(utxoSet))
def findTransaction(hash): transaction = node.blockchain.findTransaction(hash) return jsonify(utils.obj2dict(transaction))
def findIsolateUTXO(address): utxo = node.isolateUTXO.findUTXO(address) return jsonify(utils.obj2dict(utxo))
def getBlockByIndex(blockIndex): block = node.blockchain.findRangeBlocks(blockIndex,blockIndex) return jsonify(utils.obj2dict(block))
import sys sys.path.append("../") import utils import logger class A(object): log = logger.logger def __init__(self): self.a = 1 self.b = 2 def update(self, c): self.c = 3 print(A.log) def __dicta__(self): return {"a": self.a, "c": self.c} x = A() x.update(3) print(x) print(x.__dicta__()) print(utils.obj2dict(x))
def getIsolatePool(): return jsonify(utils.obj2dict(node.isolatePool,sort_keys=False))
def utxoGet(): utxoSet = node.blockchain.utxo.utxoSet utxoSummary = node.blockchain.utxo.getSummary() return jsonify({"summary":utxoSummary,"utxoSet":utils.obj2dict(utxoSet,sort_keys=False)})
def getBlockByHash(blockHash): block = node.blockchain.findBlockByHash(blockHash) return jsonify(utils.obj2dict(block))
def save(self): Block.database["blockchain"].update( {"index": self.index}, {"$set": utils.obj2dict(self, sort_keys=True)}, upsert=True)
def findUTXO(address): utxo = node.blockchain.utxo.findUTXO(address) return jsonify(utils.obj2dict(utxo))
def save(self): try: UTXO.database["utxo"].remove({}) UTXO.database["utxo"].insert(utils.obj2dict(self.utxoSet)) except Exception as e: UTXO.logger.error("error write utxo file. {}".format(e))
def findTradeUTXO(address): utxo = node.tradeUTXO.findUTXO(address) return jsonify(utils.obj2dict(utxo))