def handleClient(self, conn, addr): trans = Transfer(conn) username = trans.recvData().decode() if username in self.clients: trans.send(b"in-use") return self.clients[username] = trans print(f"{username} has connected!") trans.send(b"success") while True: raw = trans.recvData() if not raw: break if raw == b"drop": break data = pickle.loads(raw) if data["type"] == "msg": msg = data["content"] date = datetime.datetime.now().strftime("%d-%m-%Y %H:%M") data = {"type": "msg", "sender": username, "date": date, "content": msg} self.sendAll(pickle.dumps(data)) print(f"{username} has disconnected.") del self.clients[username]
def mine_block(self): """Create a new block and add open transfers to it.""" if self.hosting_node == None: return None # Fetch the currently last block of the blockchain last_block = self.__chain[-1] print(last_block) # Hash the last block (to be able to compare it to the stored hash value) hashed_block = hash_block(last_block) proof = self.proof_of_work() # Miners should be rewarded, so let's create a reward transaction reward_transaction = Transfer(self.hosting_node, "MINING", MINING_REWARD) # Copy transaction instead of manipulating the original open_transactions list # This ensures that if for some reason the mining should fail, we don't have the reward transaction stored in the open transactions copied_transactions = self.__open_transfers[:] for tx in copied_transactions: if not Wallet.verify_transfer(tx): return None copied_transactions.append(reward_transaction) block = Block(len(self.__chain), hashed_block, copied_transactions, proof) self.__chain.append(block) self.__open_transfers = [] self.save_data() return block
def test_evaluate_entire_cnn_model(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) model_file = "{}/models/trained/{}".format( c.PROJECT_ROOT, "densenet121_500_128_0045-0.1972-0.9267.h5") cnn.evaluate_entire_cnn_model(SAMPLE_FIlENAME, 100, model_file)
def run(self): # Startup Code tx = Transfer(self.radioNum) tx.run() # Spin until shutdown is signaled while not self.shutdown_flag.is_set(): time.sleep(0.5)
def setUp(self): sp = MagicMock(spotipy.Spotify()) self.transfer = Transfer(sp, True) self.abbey_road = ['Abbey Road', 'The Beatles'] self.transfer.get_itunes_album = MagicMock( return_value=self.abbey_road) self.transfer.curr_album_artist = None
def test_fine_tuning_inception_v3_249(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) cnn.fine_tuning_model_with_freezed("T_NC_500_128", batch_size=32, freezed_num=311, epochs=500, initial_epoch=0)
def audio(self): print("audio") if len(self.audio_folder) != 0: files = Transfer(self.audio_folder) self.s.send(str.encode("!list_server_audio_files")) self.listen_list(files) else: message_box = Popups("Error", "Please select audio folder") message_box.message_box()
def get_chosen_transfer(self, startTime, flightTime): """Returns the transfer with the specified start and flight times. Arguments: startTime (float): time in seconds since epoch of transfer start flightTIme (float): time in seconds of transfer duration Returns: The transfer at with the specified start and flight times """ if self.transferType == 'ballistic': trs = Transfer(self.startOrbit, self.endOrbit, startTime, \ flightTime, False, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) elif self.transferType == 'plane change': trs = Transfer(self.startOrbit, self.endOrbit, startTime, \ flightTime, True, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) elif self.transferType == 'optimal': btr = Transfer(self.startOrbit, self.endOrbit, \ startTime, flightTime, \ False, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) ptr = Transfer(self.startOrbit, self.endOrbit, \ startTime, flightTime, \ True, self.ignoreInsertion, \ self.cheapStartOrb, self.cheapEndOrb) bdv = btr.get_total_delta_v() pdv = ptr.get_total_delta_v() if bdv <= pdv: trs = btr else: trs = ptr else: raise Exception('uncrecognized transfer type') # gen = trs.genetic_refine() return trs # , gen
def test_train_top_rf(self): train_file = "{}_{}_train_features.npz".format(MODEL_NAME, SAMPLE_FIlENAME) test_file = "{}_{}_test_features.npz".format(MODEL_NAME, SAMPLE_FIlENAME) c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) cnn.train_top_rf(train_file, test_file)
def debit_points(self, user, signature, amount=0.0): """Debit points from user. Need to verify sufficient points.""" if self.hosting_node == None: return False transfer = Transfer(user, signature, amount) if Verification.verify_single_transfer(transfer, self.get_balance): self.__open_transfers.append(transfer) # participants.add(user) self.save_data() return True return False
def credit_points(self, user, signature, amount=0.0): """Credit points to user. No checks required""" if self.hosting_node == None: return False transfer = Transfer(user, signature, amount) if not Wallet.verify_transfer(transfer): return False self.__open_transfers.append(transfer) # participants.add(user) self.save_data() return True
def load_data(self): """Initialize blockchain + open transfers data from a file""" try: with open("blockchain.txt", mode="r") as f: file_content = f.readlines() blockchain = json.loads(file_content[0][:-1]) # OrderedDict updated_blockchain = [] for block in blockchain: converted_transfers = [ Transfer(tx["user"], tx["signature"], tx["amount"]) for tx in block["transfers"] ] # converted_transfers = [OrderedDict( # [('user', tx['user']), ('amount', tx['amount'])]) for tx in block['transfers']] updated_block = Block( block["index"], block["previous_hash"], converted_transfers, block["proof"], block["timestamp"], ) updated_blockchain.append(updated_block) self.__chain = updated_blockchain open_transfers = json.loads(file_content[1][:-1]) # OrderedDict updated_transfers = [] for tx in open_transfers: updated_transfer = Transfer(tx["user"], tx["signature"], tx["amount"]) # updated_transfer = OrderedDict( # [('user', tx['user']), ('amount', tx['amount'])]) updated_transfers.append(updated_transfer) self.__open_transfers = updated_transfers peer_nodes = json.loads(file_content[2]) self.__peer_nodes = set(peer_nodes) except (IOError, IndexError): pass
def test_extract_features(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) # model = cnn.load_model(mode = 0, weights_file=c.PROJECT_ROOT+"/models/trained/inception_v3_500_128.ckpt") imgCone = ImageCone(c, Open_Slide()) # 读取数字全扫描切片图像 tag = imgCone.open_slide("Tumor/Tumor_004.tif", None, "Tumor_004") seeds = [(8800, 12256)] * 10 # C, C, S result = cnn.extract_features(None, imgCone, 5, 128, seeds) print(np.std(result, axis=1)) print(np.std(result, axis=0))
def getEntity(self, id): """get an entity from db by id Arguments: id {[string]} -- [entity id] Returns: [Transfer] -- [Transfer with matching id] """ if (transferData := self._client.get( # noqa: E231,E203,E999,E251,E261 self._client.key(self._kind, id))) is not None: transfer = Transfer(**transferData) transfer.transferId = id return transfer
def test_fine_tuning_data_file(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) train_file = "{}_{}_train_features.npz".format(MODEL_NAME, SAMPLE_FIlENAME) test_file = "{}_{}_test_features.npz".format(MODEL_NAME, SAMPLE_FIlENAME) cnn.fine_tuning_top_cnn_model_saved_file(train_file, test_file, batch_size=None, epochs=500, initial_epoch=0)
def __init__(self): wx.Frame.__init__(self, None, -1, "Budget ver. 1.0", size=(1100, 650)) self.Centre(wx.BOTH) panel = wx.Panel(self, -1) notebook = wx.Notebook(panel) notebook.AddPage(Expenses(notebook), "Expenses") notebook.AddPage(Income(notebook), "Income") notebook.AddPage(Transfer(notebook), "Transfer") notebook.AddPage(Accounts(notebook), "Accounts") notebook.AddPage(Analysis(notebook), "Analysis") notebook.AddPage(Manage(notebook), "Manage") sizer = wx.BoxSizer() sizer.Add(notebook, 1, wx.EXPAND) panel.SetSizer(sizer)
def updateEntity(self, id, updated_entity): """update existing entity by id Arguments: id {[string]} -- [entity id to update] updated_entity {[type]} -- [new transfer data] Returns: [Transfer] -- [updated Transfer] """ transfer = self._client.get(self._client.key(self._kind, id)) transfer.update(updated_entity.get_dict()) self._client.put(transfer) updatedTransfer = Transfer(**transfer) updatedTransfer.transferId = id return updatedTransfer
def create_transfer(self): """ Creates an bare transfer that we will use later to add our items and upload them. If creation is successfull it will return a Transfer object otherwise None. """ client_options = { "key": self.key, "name": self.name, "token": self.token, "server": self.server } transfer = Transfer(**client_options) if not transfer.create(): return None return transfer
def add_transfer(self, outgoing_id, receiving_id, transfer_amount): """ Convenient transfer wrapper """ transfer = Transfer(user_id=self.user_id, outgoing_program=outgoing_id, receiving_program=receiving_id, outgoing_amount=transfer_amount) db.session.add(transfer) outgoing_program = self.get_balance(outgoing_id) receiving_program = self.get_balance(receiving_id) transfer_ratio = ratio_instance(outgoing_id, receiving_id).ratio_to() # Update balance for outgoing & receiving program in balances table outgoing_program.transferred_from(transfer_amount) receiving_program.transferred_to(transfer_amount, transfer_ratio) return transfer
def updateTransfer(userId, transferId): """update transfer in db with matching Entity id Returns: [Response] -- [updated transfer,response code] """ try: if validateTransferBody(connexion.request.json) is True: transfer = dsHelper.updateEntity( transferId, Transfer(userId=userId, **connexion.request.json)) transfer.get_dict().pop("deleted", None) return transfer.get_dict(), 200 return validateTransferBody(connexion.request.json), 400 except Exception as e: print(e) traceback.print_tb(e.__traceback__) return False, 400 return None
def test_predict(self): c = Params() c.load_config_file(JSON_PATH) cnn = Transfer(c, MODEL_NAME, PATCH_TYPE) model = cnn.load_model(mode=0) model.compile(optimizer="RMSprop", loss='categorical_crossentropy', metrics=['accuracy']) imgCone = ImageCone(c, Open_Slide()) # 读取数字全扫描切片图像 tag = imgCone.open_slide("Tumor/Tumor_004.tif", None, "Tumor_004") seeds = [(34880, 48960), (36224, 49920), (13312, 57856)] # C, C, S result = cnn.predict(model, imgCone, 20, 256, seeds) print(result) result = cnn.predict_on_batch(model, imgCone, 20, 256, seeds, 2) print(result)
def getEntityByFilter(self, filters): """get all entities matching a filter Arguments: filter {[[array of filters]]} -- [filters for datastore of the form ["key","operator","val"]] Returns: [list] -- [list of matching entities] """ query = self._client.query(kind=self._kind) for filter in filters: query.add_filter(*filter) transfers_list = [] transfers = list(query.fetch()) for entity in transfers: transfer = Transfer(**entity) transfer.transferId = entity.key.id transfers_list.append(transfer.get_dict()) return transfers_list
def test_extract_features_for_train_batch(self): c = Params() c.load_config_file(JSON_PATH) # "inception_v3", "densenet121", "densenet169", "densenet201", "resnet50", "inception_resnet_v2", # "vgg16", "mobilenet_v2" MODEL_NAME_set = ["densenet169"] SAMPLE_FIlENAME_set = [ "T_NC_2000_256", "T_NC_4000_256" ] # "T_NC_500_128", , "T_NC_4000_256" "T_NC_2000_256" PATCH_TYPE_set = ["2000_256", "4000_256"] # "500_128", , "4000_256" "2000_256" type_set = zip(PATCH_TYPE_set, SAMPLE_FIlENAME_set) for p_item, f_item in type_set: for net_item in MODEL_NAME_set: cnn = Transfer(c, net_item, p_item) cnn.extract_features_for_train(f_item, 100)
def connect(self): self.s = socket.socket() self.s.settimeout(5) try: self.s.connect(self.addr) except socket.error: return "Server not found. Try again." self.trans = Transfer(self.s) self.trans.send(self.username.encode()) response = self.trans.recvData() if not response: return "Server not responding." if response == b"in-use": return "Username already in use." if response == b"success": self.s.settimeout(None) self.connected = True self.s.ioctl(socket.SIO_KEEPALIVE_VALS, (1, 5000, 3000)) threading.Thread(target=self.mainThread, daemon=True).start() return True
def show_menu(self): print() print(' MAIN MENU') print(' ---------') print(' 1: View Pokemon List') print(' 2: Transfer Menu') print(' 3: Evolve Menu') print(' 4: Rename Menu') print(' 0: Exit') choice = int(input("\nEnter choice: ")) if choice == 1: self.print_pokemons(self.pokemons) elif choice == 2: Transfer(self).run() elif choice == 3: Evolve(self).run() elif choice == 4: Renamer(self).run() elif choice == 0: quit() else: quit()
def rekordbox_sync(self, drive): username = os.getlogin() files = Transfer( f"{drive[:3]}Users\\{username}\\AppData\\Roaming\\Pioneer\\rekordbox\\" ) # rb_files = ["automixPlaylist6.xml"] rb_files = [ "master.db", "master.backup.db", "networkAnalyze6.db", "masterPlaylists6.xml", "automixPlaylist6.xml" ] for rb_file in rb_files: files.prepare_to_send_file(self.s, files.path, rb_file) for root, d, file in os.walk(f"{files.path}share\\PIONEER"): if files.send_root(self.s, root): for f in file: files.prepare_to_send_file(self.s, root, f) message_box = Popups( "Finished", "Finished sending database, artwork and waveform files") message_box.message_box()
def postTransfer(userId): """add a new transfer to db Returns: [Response] -- [id of the newly inserted datastore entity,status code] """ try: if validateTransferBody(connexion.request.json) is True: if (transactionCheck := makeTransaction( # noqa: E231,E203,E999,E251,E261 userId, connexion.request.json["amount"])) is not True: print(transactionCheck) return ( { "error": "error creating transaction: {}".format( transactionCheck["error"]) }, 400, ) transferId = dsHelper.putEntity( Transfer(userId=userId, **connexion.request.json)) return {"transferId": transferId}, 201 return validateTransferBody(connexion.request.json), 400
async def upload_file( *, path, force: bool = Query( False, title='Force create even existed, old file will be deleted', ), transfer_id: str = Query( None, alias='transfer', title='Transfer UUIDv4', ), request: Request, ): ensure_me(request) f = File(path) ensure_not_type(f, NodeType.Dir) if f and not force: raise HTTPException(409, 'Existed') if transfer_id: transfer = Transfer(transfer_id, int(request.headers['content-length'])) else: transfer = None await f.create(request.stream(), transfer=transfer)
return self.DAILY_REPORT def daily_activity_report_output(self): self.file_name = 'SuburbanDigitalAdReport_' + self.date_strftime( ) + '.txt' output = open(self.file_name, 'w') output.write('Date: ' + str(self.todays_date()) + '\n' + str(self.daily_activity_report())) output.close() def monthly_activity_report(self): self.MONTHLY_REPORT = str(self.analytics.getMonthlyReport()) return self.MONTHLY_REPORT def monthly_activity_report_output(self): self.file_name = 'MonthlyDealerBudgetTemplate_' + self.date_strftime( ) + '.txt' output = open(self.file_name, 'w') output.write('Date: ' + str(self.todays_date()) + '\n' + str(self.monthly_activity_report())) output.close() if __name__ == "__main__": suburban = SuburbanCollection() print(suburban.todays_date()) suburban.daily_activity_report_output() suburban.monthly_activity_report_output() send = Transfer() send.ftp_daily_report()
print("Precione 6 para realizar um estorno com split") print("Precione 7 para verificar o status de um postback") esco = raw_input("") if (esco == "1"): print("A transação numero " + str(Split()) + " foi criada!") elif (esco == "2"): print("A assinatura numero " + str(Recorrencia()) + " foi criada!") elif (esco == "3"): retorno = Transfer() print("A transferencia: " + str(retorno[0]) + ", de " + str(retorno[1]) + " centavos foi realizada para a conta bancária do recebedor " + str(retorno[2]) + "!") elif (esco == "4"): retorno = Antecipacao() print("A antecipação: " + str(retorno[0]) + ", de " + str(retorno[1]) + " centavos foi criada para o recebedor " + str(retorno[2]) + "!") elif (esco == "5"): print("A assinatura com split numero " + str(RecorrenciaSplit()) + " foi criada!")