Esempio n. 1
0
    def update_weights(self, content, tag):
        """
            Update weights to the cluster.
            note: only one working process on each node.
                  there can be different working progress among each nodes.
        """
        if self.batch_updater is None:
            self.batch_updater = self.Updater(tag.Node_No)

        self.current_batch = tag.Batch_No

        block = Block_Weight(tag.Layer_No,
                             tag.Batch_No,
                             tag.Block_No,
                             tag.Company,
                             content=content)

        update_packs = self.batch_updater.update_blocks(block)

        if update_packs is None:
            update_packs = []

        for update_pack in update_packs:
            sender = update_pack.target()
            dic = update_pack.content()
            pkg = {
                SynchronizedSGD.STR_BATCH_NO: tag.Batch_No,
                SynchronizedSGD.DATA: dic
            }
            yield (sender, pkg)
Esempio n. 2
0
    def decompose_compack(self, params=None):
        block_id = GlobalSettings.get_default().block_assignment.node_2_block[self.Node_ID]
        content = self.Content

        blockweight = Block_Weight(self.Layer_ID, self.Batch_ID, block_id[0], set(block_id), content)

        return blockweight
Esempio n. 3
0
    def decoding(self, pbw: PartialBlockWeight):

        if len(self.partial_block_weights_buffer) < GlobalSettings.get_default(
        ).redundancy:
            return

        # search for all pieces
        search_ids = [(pbw.Block_ID, pos)
                      for pos in range(GlobalSettings.get_default().redundancy)
                      ]
        search_results = []

        for id in search_ids:
            if self.partial_block_weights_buffer.get(id) is not None:
                search_results.append(self.partial_block_weights_buffer[id])
            else:
                return None

        search_results = sorted(search_results, key=lambda item: item.Position)
        partial_weights = [item.Content for item in search_results]

        result_weights = np.concatenate(partial_weights,
                                        axis=CodedBlockWeight.SPLIT_AXIS)

        self.block_weights_have[pbw.Block_ID] = Block_Weight(
            0, 0, pbw.Block_ID,
            set(GlobalSettings.get_default().block_assignment.block_2_node[
                pbw.Block_ID]), result_weights)
Esempio n. 4
0
        iteration = zip(self.Partial_Block_Weights_Content_ID,
                        self.Partial_Block_Weights_Content_Position)
        content = self.Partial_Block_Weights_Content

        # deprecated usage
        layer_id = list(block_weights_dic.values())[0].Layer_ID
        batch_id = list(block_weights_dic.values())[0].Batch_ID

        parts_absent = 0
        decompose_part_id = 0
        decompose_part_pos = 0

        for id, pos in iteration:
            if block_weights_dic.get(id):
                content ^= block_weights_dic[id].get_by_position(pos).Content
            else:
                parts_absent += 1
                decompose_part_id = id
                decompose_part_pos = pos

        assert parts_absent == 1, 'Invalid decode process, value absent: {}'.format(parts_absent)

        return PartialBlockWeight(layer_id, batch_id, decompose_part_id, decompose_part_pos, content)


if __name__ == '__main__':
    bw = Block_Weight(0, 0, 0, {0, 1}, [1, 2, 3])
    cbw = CodedBlockWeight.from_block_weight(bw)
    print(cbw)
Esempio n. 5
0
# default setting
Default = GlobalSettings.get_default()

# build codec
slave_codec = [SLAVE_CODEC(node_id=i) for i in range(SLAVE_CNT)]

for i in range(TEST_ROUNDS):
    # starting consensus stage
    node_id = 0
    for slave in slave_codec:
        # build each block
        for block_id in Default.block_assignment.node_2_block[node_id]:
            # get random
            arr = np.random.random(size=WEIGHTS_SHAPE)
            # build blockweights
            blockweight = Block_Weight(LAYER, i, block_id, Default.block_assignment.block_2_node[block_id], arr)
            # send consensus package
            for package in slave.update_blocks(blockweight):
                # get proper receiver
                for tgt in package.target():
                    assert tgt in range(SLAVE_CNT)
                    recv = slave_codec[tgt]
                    # recv pkg
                    recv.receive_blocks(package.content())
                    print("INFO: ----------- Node:{} Transmitting to {} successful -----------".format(node_id, tgt))

        node_id += 1

    node_id = 0
    for slave in slave_codec:
        # wait until done
Esempio n. 6
0
LAYER = 0
GlobalSettings.set_default(len(SLAVE_IDS), 1, 1, None)

# build codec
slave_codec = [SLAVE_CODEC(node_id=i) for i in SLAVE_IDS]
master_codec = MASTER_CODEC(node_id=MASTER_ID)

for i in range(TEST_ROUNDS):
    # starting consensus stage
    # set node
    node_id = 0
    for slave in slave_codec:
        # get random
        arr = np.random.random(size=WEIGHTS_SHAPE)
        # build BlockWeight
        blockweight = Block_Weight(LAYER, i, node_id, {node_id}, content=arr)
        # send consensus package
        for package in slave.update_blocks(blockweight):
            # check the package that will be sent to parameter server
            assert Parameter_Server in package.target()
            # reply each package
            for reply in master_codec.receive_blocks(package.content()):
                # check the package header
                assert node_id in reply.target()
                # receive each reply
                slave.receive_blocks(reply.content())
        arr_res = slave.get_result()
        # inc
        node_id += 1

    print("INFO: -----------Test complete {}/{} -----------".format(i, TEST_ROUNDS))