def begin_invoke(namespase): if namespase.infld and namespase.exec: tree = [] files = None for d, dirs, files in os.walk(namespase.infld): for f in files: path = os.path.join(d,f) tree.append(path) func = None settings = serialization.deserialize() output_folder = '' if namespase.exec == 'boundaries': func = BoundariesOperation.execute elif namespase.exec == 'semantic_segmentation': func = SemanticSegmentation.execute elif namespase.exec == 'saliency': func = Saliency.execute elif namespase.exec == 'face_recognition': func = FaceRecognition.execute if namespase.outfld: output_folder = namespase.outfld multithreading.execute_processing(tree, files, func, settings, output_folder) return True
def do_POST(self): content_length = int(self.headers['Content-Length']) post_data = self.rfile.read(content_length) callback_res = callback(serialization.deserialize(post_data)) self._set_response() if callback_res is not None: self.wfile.write(serialization.serialize(callback_res))
def _handle_pipe(self, pipe): data = pipe.recv() if data == 'STOP': return True method, params = serialization.deserialize(data) result = getattr(self, 'on_%s' % method)(params) pipe.send(serialization.s_res(result)) return False
def load_course(infile): try: course = serialization.deserialize(infile) except: print("error") category_list.delete(0, tk.END) for category in course.categories: category_list.insert(tk.END, category.name) assignment_list.delete(0, tk.END)
def join_room(player, room, sock): request = { "command": Command.JOIN_ROOM.value, "player": player, "room": room } sock.send(serialize(request)) response = deserialize(sock.recv(4096)) return response["board"], response["game_status"], response[ "opponent_id"], response["symbol"]
def test_serialize_and_deserialize_other_data(): data = [ Bite('Sum of Numbers', 1, 'Beginner'), Bite('Regex Fun', 2, 'Advanced'), ] pkl_file = TMP / str(int(time.time())) serialize(pkl_file, data=data) actual = deserialize(pkl_file) expected = data assert actual == expected
def test_deserialize_movie_rented_data(): download_pickle_file() expected = [ MovieRented('Mad Max Fury Road', 4, date(2020, 12, 1)), MovieRented('Mad Max Fury Road', 4, date(2020, 12, 17)), MovieRented('Die Hard', 4, date(2020, 12, 3)), MovieRented('Tenet', 20, date(2020, 12, 1)), MovieRented('Breach', 7, date(2020, 11, 17)), MovieRented('Spider-Man', 12, date(2020, 12, 28)), MovieRented('Sonic', 10, date(2020, 11, 4)) ] actual = deserialize() assert actual == expected
def add_file(self, filename): """Add file to be shared. filename is the full path of the file to be shared. The basename of the filename cannot collide with the basename of another file already shared. This is because when serving files on the frontend, basenames are used to identify the files. """ self.pipe.send(serialization.s_req('add_file', filename)) response = self.pipe.recv() if response and serialization.deserialize(response).result is False: raise IndexError("File already present")
def parse_request(data): request = deserialize(data) return request["command"], request
def send(self, data): try: res = requests.post(self._url, serialization.serialize(data)) except requests.exceptions.ConnectionError: return False, None return res.ok, serialization.deserialize(res.content)
def get_bound_port(self): self.pipe.send(serialization.s_req('get_bound_port', None)) response = self.pipe.recv() return serialization.deserialize(response).result
def get_files(self): self.pipe.send(serialization.s_req('get_files', None)) response = self.pipe.recv() return serialization.deserialize(response).result
def refresh_game_status(sock): request = {"command": Command.GET_GAME_STATUS.value} sock.send(serialize(request)) response = deserialize(sock.recv(4096)) return response["board"], response["game_status"], response["opponent_id"]
learning_rate = 1e-5 device = 'cuda' maxPatience = 30 train_loss = [] test_loss = [] # Instatiation model = AddNet() model.to(device) cost_func = nn.MSELoss() optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) earlyStopper = EarlyStopper(maxPatience=maxPatience) tensorboard = SummaryWriter() # Load Dataset inputs_train, inputs_val, inputs_test = deserialize( 'C:\\Users\\kaspe\\Code Projects\\Add_DNN\\inputs.pickle') # Visualize print(inputs_train.shape, inputs_val.shape, inputs_test.shape) # Create tensor dataset dataset_train = TensorDataset(inputs_train) dataset_val = TensorDataset(inputs_val) dataset_test = TensorDataset(inputs_test) # Create dataloaders dataloader_train = DataLoader(dataset_train, batch_size=batch_size, shuffle=False) dataloader_val = DataLoader(dataset_val, batch_size=batch_size, shuffle=False) dataloader_test = DataLoader(dataset_test, batch_size=batch_size,
import torch import pickle import numpy as np from torch.utils.data import TensorDataset, DataLoader from sklearn.model_selection import train_test_split from os import path from serialization import deserialize, serialize # Check if dataset already exists? if path.isfile('C:\\Users\\kaspe\\Code Projects\\Add_DNN\\inputs.pickle'): print('Data does already exist..') # Load Data inputs = deserialize( 'C:\\Users\\kaspe\\Code Projects\\Add_DNN\\inputs.pickle') # Visualize print(inputs[0].shape, inputs[1].shape, inputs[2].shape) else: print('Data does not already exist..') # Create Data inputs = np.random.randint(low=0, high=10, size=[10000, 2]) # Convert to torch tensors inputs = torch.from_numpy(inputs) # Convert to float inputs = inputs.float()
# -*- coding: utf-8 -*- import sys, os, os.path import serialization import report if __name__ == "__main__": if len(sys.argv) > 1: for reportForm in sys.argv[1:]: output = "./%s.pdf" % os.path.splitext(reportForm)[0] print "Creating %s from report form %s..." % (output, reportForm) reportObj = serialization.deserialize(reportForm, report.Report) reportObj.evaluate({}) reportObj.write(output, reportObj.getTestCursor()) else: print "Usage: reportWriter <specFile> [<specFile>...]"
def fill_position(position, sock): request = {"command": Command.FILL_POSITION.value, "position": position} sock.send(serialize(request)) deserialize(sock.recv(4096))