def process_new_transaction(request_data: bytes) -> str: global BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json: try: tx = Transaction.from_json(transaction_json).object() # Add transaction to Mempool if tx not in BLOCKCHAIN.mempool: if BLOCKCHAIN.active_chain.is_transaction_valid(tx): logger.debug( "Valid Transaction received, Adding to Mempool") BLOCKCHAIN.mempool.add(tx) # Broadcast block to other peers send_to_all_peers("/newtransaction", request_data) else: return "Transaction Already received" else: logger.debug( "The transation is not valid, not added to Mempool") return "Not Valid Transaction" except Exception as e: logger.error("Server: New Transaction: Invalid tx received: " + str(e)) raise e return "Not Valid Transaction" return "Done"
def process_new_block(request_data: bytes) -> str: global BLOCKCHAIN block_json = decompress(request_data) if block_json: try: block = Block.from_json(block_json).object() # Check if block already exists if get_block_from_db(dhash(block.header)): logger.info("Server: Received block exists, doing nothing") return "Block already Received Before" if BLOCKCHAIN.add_block(block): logger.info( "Server: Received a New Valid Block, Adding to Chain") logger.debug("Server: Sending new block to peers") # Broadcast block to other peers send_to_all_peers("/newblock", request_data) else: return "Block Received, but was not added, due to some error" except Exception as e: logger.error("Server: New Block: invalid block received " + str(e)) return "Invalid Block Received" # Kill Miner t = Timer(1, miner.stop_mining) t.start() return "Block Received" logger.error("Server: Invalid Block Received") return "Invalid Block"
def get_tx(): log_ip(request, inspect.stack()[0][3]) contract_address = request.body.read() contract_address = decompress(contract_address) cc, co, cp = get_cc_co_cp_by_contract_address(contract_address) return json.dumps({ 'cc': cc, 'co': co, 'cp': cp, })
def sync(max_peer): fork_height = BLOCKCHAIN.active_chain.length r = requests.post(get_peer_url(max_peer) + "/getblockhashes", data={"myheight": fork_height}) hash_list = json.loads(decompress(r.text.encode())) for hhash in hash_list: block = receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error("Sync: Block received is invalid, Cannot Sync") break return
def sync(max_peer): fork_height = find_fork_height(max_peer) r = requests.post(get_peer_url(max_peer) + "/getblockhashes", data={"myheight": fork_height}) hash_list = json.loads(decompress(r.text.encode())) # logger.debug("Received the Following HashList from peer " + str(get_peer_url(max_peer))) # logger.debug(hash_list) for hhash in hash_list: block = receive_block_from_peer(max_peer, hhash) if not BLOCKCHAIN.add_block(block): logger.error("Sync: Block received is invalid, Cannot Sync") break return
def process(self, p=None): petition = self.dequeue() if p is None else p names = self.splitPath(petition) try: list = decompress(names[0], petition) except OSError as osE: print "error with a file" os.chdir(self.path) shutil.rmtree("../out/" + names[0]) self.process(petition) listOut = [] os.mkdir(list['dir'] + "/result/") try: for l in list['listDir']: if names[0].split("_")[1] == "wiki": params = self.wiki.search(l, names[0], list['dir'] + "/result/") if params and len(params) > 0: for d in params: listOut.append(d) compress_file = names[0] + "_out.tar.gz" compress(listOut, list['dir'] + "/result/", list['dir'] + "/../" + compress_file) elif names[0].split("_")[1] == "youtube": self.youtube.search(l, names[0], list['dir'] + "/result/", names[0]) except OSError as osE: print osE print "error with a file" os.chdir(self.path) shutil.rmtree(list['dir']) except Exception as eEx: print eEx os.chdir(self.path) shutil.rmtree(list['dir']) time.sleep(15) self.process(petition) except e: print e print "cannot get resources, check internet connection!" os.chdir(self.path) shutil.rmtree(list['dir']) time.sleep(15) self.process(petition) print "remove" print petition print os.getcwd() os.remove(petition) os.chdir(self.path)
def post(self, request, *args, **kwargs): if 'file' not in request.data: raise ParseError("Empty content") logger.debug("Getting message content of upload".format( request.data["file"])) msg_content = get_uploaded_mediafile_content(request) logger.debug("Decompressing file") msg_content["content"] = utils.decompress(msg_content["content"]) logger.debug("Checking mediafile crc") check_mediafile_crc(msg_content) media_dir_path = get_mediafile_dir(msg_content) mediafile_path = os.path.join(media_dir_path, msg_content["filename"]) if not os.path.isdir(media_dir_path): logger.debug("Creating path: {}".format(media_dir_path)) filesystem.create_media_path_if_needed(media_dir_path) logger.debug("Checking whether file exists") if os.path.isfile(mediafile_path): existing_file_content = filesystem.read_file_as_binary( mediafile_path) existing_file_crc = utils.md5_raw_content(existing_file_content) if existing_file_crc == msg_content["md5_crc"]: logger.info("File w/ same CRC already exists, skipping") return Response( dict(msg="File w/ same CRC already exists, skipping"), status=200) else: msg_content["filename"] = "{}.{}".format( msg_content["filename"], str(uuid4())) logger.warning( "File w/ same filename already exists, but CRC differ. Saving new file as {}" "".format(msg_content["filename"])) mediafile_path = os.path.join(media_dir_path, msg_content["filename"]) logger.info("Saving {}".format(mediafile_path)) try: filesystem.write_file_as_binary(mediafile_path, msg_content["content"]) except Exception as e: logger.error("{}".format(str(e))) return Response(dict(msg=str(e)), status=500) return Response(dict(msg="created"), status=201)
def process(self): petition = self.dequeue() names = self.splitPath(petition) list = decompress(names[0], petition) listOut = [] os.mkdir(list['dir']+"/result/") for l in list['listDir']: params = self.wiki.search(l, names[0], list['dir']+"/result/") for d in params: listOut.append(d) print listOut compress_file = names[0]+"_out.tar.gz" compress(listOut,list['dir']+"/result/", list['dir']+"/../"+compress_file) os.chdir(self.path) shutil.rmtree(list['dir'])
def process_new_transaction(request_data: bytes) -> Tuple[bool, str]: global BLOCKCHAIN transaction_json = decompress(request_data) if transaction_json: try: tx = Transaction.from_json(transaction_json).object() # Validations for txIn in tx.vin.values(): if is_valid_contract_address(txIn.pub_key): return False, "Cannot send funds from a contract address" if tx.contract_output is not None: return False, "Contract Output should be None" if tx.contract_code != "": # This is a contract contract_address = tx.get_contract_address() # Ensure that there is no other contract on this contract address cc, _, _ = get_cc_co_cp_by_contract_address(contract_address) if cc != "": return False, "There is already some other contract at this contract address" # Add transaction to Mempool if tx not in BLOCKCHAIN.mempool: ok, msg = BLOCKCHAIN.active_chain.is_transaction_valid(tx) if ok: logger.debug( "Valid Transaction received, Adding to Mempool") BLOCKCHAIN.mempool.add(tx) logger.debug( f"Mempool now contains {len(BLOCKCHAIN.mempool)} transaction(s)" ) # Broadcast block to other peers send_to_all_peers("/newtransaction", request_data) else: logger.debug( "The transation is not valid, not added to Mempool - " + msg) return False, "Not Valid Transaction: " + msg else: return True, "Transaction Already received" except Exception as e: logger.error("Server: New Transaction: Invalid tx received: " + str(e)) return False, "Not Valid Transaction" return True, "Done"
def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block: r = requests.post(get_peer_url(peer) + "/getblock", data={"headerhash": header_hash}) return Block.from_json(decompress(r.text)).object()