Exemple #1
0
 def printBlock(self, height):
     latest = self.db.get('latest')
     current_block = utils.deserialize(self.db.get(latest))
     while current_block.height != height:
         current_block = utils.deserialize(
             self.db.get(current_block.prev_hash))
     current_block.printBlock()
Exemple #2
0
 def printChain(self):
     latest = self.db.get('latest')
     current_block = utils.deserialize(self.db.get(latest))
     while True:
         current_block.printBlock()
         if current_block.prev_hash == None:
             break
         current_block = utils.deserialize(
             self.db.get(current_block.prev_hash))
Exemple #3
0
def calculate_req_water(type, age, temperature, shower, moisture):
    min_l_reg = deserialize('min_l_reg')
    rec_l_reg = deserialize('rec_l_reg')

    X = pd.DataFrame({'TYPE': type, 'AGE': age, 'TEMPERATURE': temperature, 'SHOWER': shower, 'MOISTURE': moisture},
                     index=[0])
    X = X[['TYPE', 'AGE', 'TEMPERATURE', 'SHOWER', 'MOISTURE']]

    min_prediction = min_l_reg.predict(X)[0]
    rec_prediction = rec_l_reg.predict(X)[0]

    # return [min_prediction, rec_prediction]
    return [int(min_prediction), int(rec_prediction)]
Exemple #4
0
    def remote_gateway_info(self, gateway):
        '''
          Return remote gateway information for the specified gateway string id.

          @param gateways : gateway id string to search for
          @type string
          @return remote gateway information
          @rtype gateway_msgs.RemotGateway or None
        '''
        firewall = self._redis_server.get(
            hub_api.create_rocon_gateway_key(gateway, 'firewall'))
        ip = self._redis_server.get(
            hub_api.create_rocon_gateway_key(gateway, 'ip'))
        if firewall is None:
            return None  # equivalent to saying no gateway of this id found
        else:
            remote_gateway = gateway_msgs.RemoteGateway()
            remote_gateway.name = gateway
            remote_gateway.ip = ip
            remote_gateway.firewall = True if int(firewall) else False
            remote_gateway.public_interface = []
            encoded_advertisements = self._redis_server.smembers(
                hub_api.create_rocon_gateway_key(gateway, 'advertisements'))
            for encoded_advertisement in encoded_advertisements:
                advertisement = utils.deserialize_connection(
                    encoded_advertisement)
                remote_gateway.public_interface.append(advertisement.rule)
            remote_gateway.flipped_interface = []
            encoded_flips = self._redis_server.smembers(
                hub_api.create_rocon_gateway_key(gateway, 'flips'))
            for encoded_flip in encoded_flips:
                [target_gateway, name, connection_type,
                 node] = utils.deserialize(encoded_flip)
                remote_rule = gateway_msgs.RemoteRule(
                    target_gateway,
                    gateway_msgs.Rule(connection_type, name, node))
                remote_gateway.flipped_interface.append(remote_rule)
            remote_gateway.pulled_interface = []
            encoded_pulls = self._redis_server.smembers(
                hub_api.create_rocon_gateway_key(gateway, 'pulls'))
            for encoded_pull in encoded_pulls:
                [target_gateway, name, connection_type,
                 node] = utils.deserialize(encoded_pull)
                remote_rule = gateway_msgs.RemoteRule(
                    target_gateway,
                    gateway_msgs.Rule(connection_type, name, node))
                remote_gateway.pulled_interface.append(remote_rule)
            return remote_gateway
Exemple #5
0
def get_classes_dict(serialized_file: str,
                     sparql_file: str,
                     repository: str,
                     endpoint: str,
                     endpoint_type: str,
                     limit: int = 1000) -> ResourceDictionary:
    """
    Return a ResourceDictionary with the list of classes in the ontology
    :param serialized_file: The file where the properties ResourceDictionary is serialized
    :param sparql_file: The file containing the SPARQL query
    :param repository: The repository containing the ontology
    :param endpoint: The SPARQL endpoint
    :param endpoint_type: GRAPHDB or VIRTUOSO (to change the way the endpoint is called)
    :param limit: The sparql query limit
    :return: A ResourceDictionary with the list of classes in the ontology
    """
    classes_dictionary = deserialize(serialized_file)
    if classes_dictionary:
        return classes_dictionary
    classes_dictionary = ResourceDictionary()
    classes_sparql_query = open(sparql_file).read()
    classes_sparql_query_template = Template(classes_sparql_query +
                                             " limit $limit offset $offset ")
    for class_uri in get_sparql_results(classes_sparql_query_template,
                                        ["class"], endpoint, repository,
                                        endpoint_type, limit):
        classes_dictionary.add(class_uri[0])

    serialize(classes_dictionary, serialized_file)
    return classes_dictionary
Exemple #6
0
def get_properties_dict(serialized_file: str,
                        sparql_file: str,
                        repository: str,
                        endpoint: str,
                        endpoint_type: str,
                        limit: int = 1000) -> ResourceDictionary:
    """
    Return a ResourceDictionary with the list of properties in the ontology
    :param serialized_file: The file where the properties ResourceDictionary is serialized
    :param sparql_file: The file containing the SPARQL query
    :param repository: The repository containing the ontology
    :param endpoint: The SPARQL endpoint
    :param endpoint_type: GRAPHDB or VIRTUOSO (to change the way the endpoint is called)
    :param limit: The sparql query limit
    :return: A ResourceDictionary with the list of properties in the ontology
    """
    global_properties_dict = deserialize(serialized_file)
    if global_properties_dict:
        return global_properties_dict

    global_properties_dict = ResourceDictionary()
    global_properties_dict.add(RDF.type)
    properties_sparql_query = open(sparql_file).read()
    properties_sparql_query_template = Template(
        properties_sparql_query + " limit $limit offset $offset ")
    for rdf_property in get_sparql_results(properties_sparql_query_template,
                                           ["property"], endpoint, repository,
                                           endpoint_type, limit):
        global_properties_dict.add(rdf_property[0])

    serialize(global_properties_dict, serialized_file)
    return global_properties_dict
Exemple #7
0
def dataset(Dir, fontList = None):
    imgs, other_info = utils.deserialize(Dir)#, fontList)
    labels = np.concatenate(
        [np.ones(i.shape[0], np.int32) * k for k, i in enumerate(imgs)],
        0
    )
    imgs = np.concatenate(imgs, 0).astype(np.float32) / 255
    imgs = np.expand_dims(imgs, -1)

    random.seed(23333)
    indices = [i for i in range(imgs.shape[0])]
    random.shuffle(indices)
    imgs = imgs[indices]
    labels = labels[indices]
    num_train = imgs.shape[0] * 3 // 5
    num_valid = imgs.shape[0] // 5

    train = tf.data.Dataset.from_tensor_slices(
        (imgs[:num_train], labels[:num_train])
    ).map(
        lambda img, label: (tf.image.resize_image_with_pad(
            tf.random_crop(img, [48, 48, 1]),
            64, 64
        ), label)
    ).shuffle(256).repeat().batch(batch_size)
    valid = tf.data.Dataset.from_tensor_slices((imgs[num_train:num_train+num_valid], labels[num_train:num_train+num_valid])).repeat().batch(batch_size)
    test = tf.data.Dataset.from_tensor_slices((imgs[num_train+num_valid:], labels[num_train+num_valid:])).batch(batch_size)

    return train, valid, test, imgs.shape[0]*3//5
Exemple #8
0
def main():
    random.seed(23333)
    imgs, other_infos = utils.deserialize("../data", fontList)
    imgs_shuffled = []
    for i in imgs:
        indices = [j for j in range(i.shape[0])]
        random.shuffle(indices)
        imgs_shuffled.append(i[indices])
    train = np.array([
        utils.daisy(img) for i in imgs_shuffled for img in i[:len(i) * 4 // 5]
    ])
    train = np.reshape(train, [train.shape[0], -1])
    train_label = np.array(
        [i for i, _ in enumerate(imgs) for j in _[:len(_) * 4 // 5]])
    test = np.array([
        utils.daisy(img) for i in imgs_shuffled for img in i[len(i) * 4 // 5:]
    ])
    test = np.reshape(test, [test.shape[0], -1])
    test_label = np.array(
        [i for i, _ in enumerate(imgs) for j in _[len(_) * 4 // 5:]])

    print(train.shape)

    label, center, cnt = meanshift(train, .7, 20, 4e-2, uniform_weight)
    center_label = np.zeros([label.ntree, len(fontList)], int)
    for i in range(train.shape[0]):
        center_label[label.classidx[label.find(i)], train_label[i]] += 1

    tp = 0
    for idx, t in enumerate(test):
        lst, dist = utils.nearest_neighbour(t, center)
        if np.argmax(center_label[lst[0]]) == test_label[idx]:
            tp += 1
    print('acc =', tp / test.shape[0])
def test_on_Jaconbsen_with_50_stages():
    #     # Just to document how the test DAG was created
    #     from equations import read_bipartite_graph
    #     g, eqs, forbidden = read_bipartite_graph('JacobsenILOSimpBounds')
    #     for eq, var in g.edges_iter(eqs):
    #         g[eq][var]['weight'] = 1 if (eq, var) in forbidden else 10
    #     mate = nx.max_weight_matching(g, maxcardinality=True)
    #     # Orient according to the matching, and also label
    #     dig = nx.DiGraph()
    #     for eq, var in g.edges_iter(eqs):
    #         if mate[eq]==var:
    #             dig.add_edge(eq, var, weight=1, orig_edges=[(eq,var)])
    #         else:
    #             dig.add_edge(var, eq, weight=1, orig_edges=[(var,eq)])
    #     assert not nx.is_directed_acyclic_graph(dig)
    #     from utils import serialize
    #     serialize(dig, 'data/JacobsenILOSimpBounds_as_DAG.pkl.gz')
    dig = deserialize(DATADIR + 'JacobsenILOSimpBounds_as_DAG.pkl.gz')
    #     # Uncomment to prove that this graph has more than 10M simple cycles:
    #     cutoff = 10000000
    #     from itertools import islice
    #     n_cycles = sum(1 for _ in islice(nx.simple_cycles(dig), cutoff+1))
    #     if n_cycles == cutoff+1:
    #         print('More than', cutoff, 'simple cycles, giving up...')
    #     else:
    #         print('There are', n_cycles, 'simple cycles in total')
    _, cost = solve_problem(dig)
    print('Cost with ILP:', cost)  # 107 with this matching; optimal tearing 53
    cost, _ = run_mfes_heuristic(dig, try_one_cut=True, is_labeled=True)
    print('Cost with heuristic:', cost)  # 160
Exemple #10
0
 def upload_model(self,model_class,result):
     from google.appengine.ext import db
     from utils import deserialize
                 
     save = []
     to_delete = []
     for entity in result:
                         
         if "id" in entity:
             existing_entry = model_class.get(db.Key.from_path(model_class.kind(),entity ["id"]))
             if existing_entry:
                 to_delete.append(existing_entry) # Remove the existing entry with numeric ID
             
         object = deserialize(model_class , entity)
                     
         save.append(object)
         
         if len(to_delete) > 100:
             db.delete(to_delete)
             to_delete = []
         if len(save) > 100:
             db.put(save)
             save = []
     
     db.delete(to_delete)    
     db.put(save)
Exemple #11
0
def send_message(address, command, data, response=False):
    message = prepare_message(command, data)
    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
        s.connect(address)
        s.sendall(serialize(message))
        if response:
            return deserialize(s.recv(5000))
def load_nar_module_resources(nar_encoders_dict_path):
    nar_encoders_dict = \
              deserialize(nar_encoders_dict_path)

    print("Read NAR label encoders dict for: {}".format(nar_encoders_dict.keys()))

    return nar_encoders_dict
Exemple #13
0
    def handle(self):
        message_bytes = self.request.recv(5000).strip()
        message = deserialize(message_bytes)
        command = message["command"]
        data = message["data"]

        logger.info(f"Received {message}")

        if command == "ping":
            self.respond(command="pong", data="")

        if command == "tx":
            try:
                bank.handle_tx(data)
                self.respond(command="tx-response", data="accepted")
            except:
                self.respond(command="tx-response", data="rejected")

        if command == "block":
            bank.handle_block(data)

        if command == "utxos":
            balance = bank.fetch_utxos(data)
            self.respond(command="utxos-response", data=balance)

        if command == "balance":
            balance = bank.fetch_balance(data)
            self.respond(command="balance-response", data=balance)
Exemple #14
0
    def handle(self):
        message_bytes = self.request.recv(1024 * 4).strip()
        message = deserialize(message_bytes)
        command = message["command"]
        data = message["data"]

        if command == "ping":
            self.respond(command="pong", data="")

        if command == "block":
            # If the block isn't new, ignore it
            chain_index, height = node.find_block(data)
            if chain_index == height == None:
                logging.info(f"Received block from peer")

                node.handle_block(data)
                # Tell the mining thread mine the new tip
                logger.info(f"in tcphandler. mempool has {len(node.mempool)}")
                mining_interrupt.set()

            # logging.info(f"Ignoring block: {data}")

        if command == "tx":
            node.handle_tx(data)

        if command == "balance":
            balance = node.fetch_balance(data)
            self.respond(command="balance-response", data=balance)

        if command == "utxos":
            utxos = node.fetch_utxos(data)
            self.respond(command="utxos-response", data=utxos)
Exemple #15
0
def get_numeric_jacobian(problem_name):
    # Always return the Jacobian as list of (i, j, J_ij)
    jacfile = 'data/' + problem_name + '.jac.pkl.gz'
    if isfile(jacfile):
        print('Using cached Jacobian for', problem_name)
        return deserialize(jacfile)
    #
    print('Computing then saving the Jacobian for', problem_name)
    g, problem = create_dag(problem_name)
    code = numeric_diff(g, problem)
    globals_ = {}
    try:
        exec_(code, globals_)
    except:
        print(
            '===============================================================')
        print(code)
        print(
            '===============================================================')
        raise
    J = globals_['J']
    #
    Jrows = get_J_rowwise(problem)
    jac = [(i, j, J[i, j]) for i, cols in enumerate(Jrows) for j in cols]
    serialize(jac, 'data/' + problem.name + '.jac.pkl.gz')
    return jac
    def print_utxo(self):
        utxos = []

        for _, outs in self._bucket.kv.items():
            outs = utils.deserialize(outs)
            for out in outs:
                print(out.value)
Exemple #17
0
    def handle(self):
        message_data = self.request.recv(5000).strip()
        message = deserialize(message_data)
        print(f"got a message: {message}")

        command = message["command"]
        if command == "ping":
            self.respond("pong", "")

        if command == "balance":
            public_key = message["data"]
            balance = bank.fetch_balance(public_key)
            self.respond("balance-response", balance)

        if command == "utxo":
            public_key = message["data"]
            utxo = bank.fetch_utxo(public_key)
            self.respond("utxo-response", utxo)

        if command == "tx":
            tx = message["data"]
            try:
                bank.handle_tx(tx)
                self.respond("tx-response", data="accepted")
            except:
                self.respond("tx-response", data="rejected")
Exemple #18
0
    def test_packing(self):
        ik, tk = KG(length=8)
        u = ik.encode(b'\x0f')

        serialize(ik, PACK_TEST_IK_FILE)
        serialize(tk, PACK_TEST_TK_FILE)
        serialize(u, PACK_TEST_CT_FILE)

        ik1 = deserialize(PACK_TEST_IK_FILE)
        tk1 = deserialize(PACK_TEST_TK_FILE)
        u1 = deserialize(PACK_TEST_CT_FILE)

        #call to_affine on all GE objects in codec
        self.assertEqual(u.all_to_affine(), u1)
        self.assertEqual(ik.all_to_affine(), ik1)
        self.assertEqual(tk.all_to_affine(), tk1)
def run_training(n_hidden=256, ds_type='2d_star_11', n_parallel=1000, n_epochs=1000000,
                 random_bias=False, act='relu', n_layers=1, device_number=0, version=0):
    print('Start time:', datetime.datetime.now())

    if n_layers > 1:
        name = f'{n_hidden}x{n_layers}-{n_parallel}-{random_bias}-{act}-v{version}'
    else:
        name = f'{n_hidden}-{n_parallel}-{random_bias}-{act}-v{version}'

    x_train, y_train = get_2d_star_dataset(k=11, dist=0.1)

    print(f'Running model for {n_epochs} epochs on dataset {ds_type}: {name}')
    base_dir = Path(get_results_path())
    file_dir = base_dir/ds_type/name
    file_path = file_dir/'model_trainer.p'
    if utils.existsFile(file_path):
        print('Loading existing model')
        mt = utils.deserialize(file_path)
        mt.to(get_device(device_number))
    else:
        print('Creating new model')
        mt = ModelTrainer(x_train, y_train, n_parallel=n_parallel,
                          hidden_sizes=[n_hidden] * n_layers, n_virtual_samples=n_hidden**2,
                          random_bias=random_bias, act=act, device_number=device_number, version=version)
    mt.train(n_epochs)
    mt.to('cpu')
    utils.serialize(file_path, mt)
    utils.serialize(file_dir/'config.p', dict(ds_type=ds_type, n_parallel=n_parallel, n_layers=n_layers,
                                              random_bias=random_bias, act=act, n_epochs=n_epochs, version=version))
    print('Saved trained model')
    print('End time:', datetime.datetime.now())
Exemple #20
0
    def handle(self):
        message_bytes = self.request.recv(1024 * 8).strip()
        message = deserialize(message_bytes)
        command = message["command"]
        data = message["data"]

        # logger.info(f"Received {message}")
        if command == "ping":
            self.respond(command="pong", data="")

        if command == "tx":
            try:
                node.handle_tx(data)
                self.respond(command="tx-response", data="accepted")
            except:
                self.respond(command="tx-response", data="rejected")

        if command == "block":
            # handle only if it builds on the tip
            if data.prev_id == node.blocks[-1].id:
                node.handle_block(data)
                # interrupt mining thread
                mining_interrupt.set()

        if command == "utxos":
            balance = node.fetch_utxos(data, True)
            self.respond(command="utxos-response", data=balance)

        if command == "balance":
            balance = node.fetch_balance(data, True)
            self.respond(command="balance-response", data=balance)
Exemple #21
0
    def handle(self):
        while True:
            message_data = self.request.recv(5000).strip()
            # Need to break early if we got empty bytes,
            # otherwise we get a deserialization error
            # trying to deserialize empty bytes
            if message_data == b"":
                print("Closing connection, bye!")
                break

            message = deserialize(message_data)

            print(f'Received {message}')

            if message["command"] == "ping":
                self.respond("pong", "")

            if message["command"] == "balance":
                public_key = message["data"]
                balance = bank.fetch_balance(public_key)
                self.respond("balance-response", balance)

            if message["command"] == "utxo":
                public_key = message["data"]
                utxo = bank.fetch_utxo(public_key)
                self.respond("utxo-response", utxo)

            if message["command"] == "tx":
                tx = message["data"]
                try:
                    bank.handle_tx(tx)
                    self.respond("tx-response", data="accepted")
                except:
                    self.respond("tx-response", data="rejected")
Exemple #22
0
def process_result(result):

    value_format = result.headers['Content-Type']
    device_body = utils.deserialize(result, value_format)
    utils.save_data(device_body, value_format)

    print("\nHTTP Response successful!")  #debug
Exemple #23
0
    def handle(self):
        message_bytes = self.request.recv(1024 * 4).strip()
        message = deserialize(message_bytes)
        command = message["command"]
        data = message["data"]

        logger.info(f"received {command}")

        if command == "ping":
            self.respond(command="pong", data="")

        if command == "block":
            if data.prev_id == node.blocks[-1].id:
                node.handle_block(data)
                # Interrupt mining thread
                mining_interrupt.set()

        if command == "tx":
            node.handle_tx(data)

        if command == "balance":
            balance = node.fetch_balance(data)
            self.respond(command="balance-response", data=balance)

        if command == "utxos":
            utxos = node.fetch_utxos(data)
            self.respond(command="utxos-response", data=utxos)
Exemple #24
0
 def __deserialise_from_split(self, name, split):
     data_dir = utils.DATA_DIR
     split_dir = utils.DATA_DIR + '/split-' + str(split)
     utils.DATA_DIR = split_dir
     data = utils.deserialize(name)
     utils.DATA_DIR = data_dir
     return data, split_dir
Exemple #25
0
def solve_difficult_for_ilp(solve_function, log):
    for filename, opt in DIFFICULT_FOR_ILP:
        log(filename)
        g, eqs = deserialize(DATADIR + filename)
        _, _, _, tear_set, _ = solve_function(g, eqs)
        cost = len(tear_set)
        assert opt == cost, (opt, cost)
        log()
Exemple #26
0
    def handle(self):
        message_bytes = self.request.recv(1024 * 4).strip()
        message = deserialize(message_bytes)
        command = message["command"]
        data = message["data"]

        if command == "ping":
            self.respond(command="pong", data="")

        if command == "block":
            if data == None:
                logger.info(f"Initial block download complete")
                node.syncing = False
                return

            logging.info(f"Received block from peer")

            try:
                node.handle_block(data)
                mining_interrupt.set()
            except:
                pass

            # If syncing, request next block
            if node.syncing:
                node.initial_block_download()

        if command == "tx":
            node.handle_tx(data)

        if command == "balance":
            balance = node.fetch_balance(data)
            self.respond(command="balance-response", data=balance)

        if command == "utxos":
            utxos = node.fetch_utxos(data)
            self.respond(command="utxos-response", data=utxos)

        if command == "join":
            node.peers.add(data)
            self.respond(command="peers", data=node.peers)
            logger.info("received join msg")

        if command == "peers":
            logger.info("received peer list")

        if command == "get_blocks":
            next_block = None
            # locate the block in the main chain
            # FIXME: this should call a general-purpose function
            for block in node.active_chain:
                if block.prev_id == data:
                    next_block = block
                    break

            # Says the IBD is done
            send_message(self.peer(), command="block", data=next_block)
            logger.info(f"sent 'block' message: {next_block}")
def run_finer_lrs(init_param='kaiming', device='cpu'):
    dist_grid = [ExampleDistribution()
                 ] + [RadialDataDistribution(d=2**k) for k in range(7)]
    std_grid = [0.1, 0.5, 1.0, 2.0]
    # bi_grid = [('zero', 0.0), ('he+5', 0.0), ('he+1', 0.0), ('kink_uniform', 0.0)] \
    #             + [(bim, big) for big in std_grid for bim in ['normal', 'uniform']] \
    #             + [('pos-unif', 1.0), ('neg-unif', 1.0), ('kink-unif', 1.0), ('kink-neg-unif', 1.0),
    #                ('kink-neg-point', 0.0)]
    bi_grid = [('zero', 0.0), ('unif', 1.0), ('unif-pos', 1.0),
               ('unif-neg', 1.0), ('kink-neg-unif', 1.0), ('pytorch', 1.0),
               ('kink-neg-point', 0.0)]
    for opt in ['gd', 'gd-mom', 'adam']:
        for dist in dist_grid:
            d = dist.get_x_dim()
            for bim, big in bi_grid:
                folder_name = f'{init_param}_{opt}_{dist.get_name()}_{bim}-{big:g}'
                path = Path(custom_paths.get_results_path()
                            ) / 'nn_comparison' / folder_name
                best_lr_file = Path(custom_paths.get_results_path(
                )) / 'nn_comparison' / f'{folder_name}_bestlr.pkl'
                if not utils.existsFile(best_lr_file):
                    sys.stderr.write(
                        'best lr file {best_lr_file} does not exist!\n')
                    continue
                best_lr = utils.deserialize(best_lr_file)
                lr_grid = [best_lr * (2**(k / 8)) for k in range(-3, 4)]
                for lr in lr_grid:
                    print(f'Running combination {folder_name} with lr {lr:g}')
                    file = path / f'{lr:g}.pkl'
                    utils.ensureDir(file)
                    if utils.existsFile(file):
                        continue
                    n_rep = 2 if d == 64 else 1
                    trainer = SimpleParallelTrainer(n_parallel=100 // n_rep,
                                                    n_train=256 * d,
                                                    n_valid=1024,
                                                    n_test=1024,
                                                    data_distribution=dist,
                                                    lr=lr,
                                                    bias_init_gain=big,
                                                    batch_size=256,
                                                    bias_init_mode=bim,
                                                    init_param=init_param,
                                                    n_epochs=8192 // d,
                                                    seed=0,
                                                    device=device,
                                                    n_hidden=512,
                                                    opt=opt,
                                                    valid_epoch_interval=64 //
                                                    d,
                                                    n_rep=n_rep)
                    results = trainer.fit(do_plot=False, verbose=False)
                    if results is None:
                        print('Got NaN values')
                    utils.serialize(file, {
                        'trainer': trainer,
                        'results': results
                    })
Exemple #28
0
    def test_serialization(self):
        serialize(MATRIX, MATRIX_TEST_SERIALIZE_FILE)
        n_matrix = deserialize(MATRIX_TEST_SERIALIZE_FILE)

        MATRIX.all_to_affine()

        self.assertEqual(n_matrix, MATRIX)
        self.assertTrue(MATRIX.all_validate())
        self.assertTrue(n_matrix.all_validate())
    def test_serialization(self):
        serialize(VECTOR, VECTOR_TEST_SERIALIZE_FILE)
        n_vector = deserialize(VECTOR_TEST_SERIALIZE_FILE)

        VECTOR.all_to_affine()

        self.assertEqual(n_vector, VECTOR)
        self.assertTrue(VECTOR.all_validate())
        self.assertTrue(n_vector.all_validate())
Exemple #30
0
def send_message(command, data):
    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
        s.connect(client_address)
        message = prepare_message(command, data)
        serialized_message = serialize(message)
        s.sendall(serialized_message)
        message_data = s.recv(5000)
        message = deserialize(message_data)
        print(f'Received {message}')
Exemple #31
0
def send_message(command, data):
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sock.connect(address)
    message = serialize(prepare_message(command, data))
    sock.sendall(message)
    message_data = sock.recv(5000)
    message = deserialize(message_data)
    print(f"Received data: {message}")

    return message    
Exemple #32
0
 def from_raw(raw):
     result = Message(deserialize(raw))
     result._serialized_size = len(raw)
     return result
Exemple #33
0
def load_workflow(path):
    '''Load workflow from directory path.'''
    tokenizer = cPickle.load(gzip.open(path + "/tok.pkl.gz"))
    scaler    = cPickle.load(gzip.open(path + "/scaler.pkl.gz"))
    projector     = utils.deserialize(cPickle.load(gzip.open(path + "/model.univ.pkl.gz")))
    return Workflow(tokenizer, projector, scaler)