Esempio n. 1
0
def task_parallel(f, jobs, device_list=[0, 1, 2, 3]):
    '''
	jobs is list where each element is params for f
	f still need device parameters as keyword parameter
	'''
    jobs_group = groupby(jobs, len(device_list), key='num')
    jobs, results = jobs_claim(f, jobs_group, device_list=device_list)
Esempio n. 2
0
def train_multi(loader, device_list=[1, 2, 3], num_epoch=100):

    loss = chainer.functions.softmax_cross_entropy
    optimizer = chainer.optimizers.Adam()

    _model = model(device=3)

    optimizer.setup(_model)

    ctx = mp.get_context('forkserver')

    for epoch in range(num_epoch):
        start_time = timer()
        num = 0
        count = 0
        correct = 0
        l = 0
        for x, y, index in loader.get():
            if (index == 0):
                initial(x, y, _model, loss, 3)

            q = ctx.Queue()
            param_dict = _model.param_to_dict()

            groups = groupby([x for x in range(y.shape[0])],
                             len(device_list),
                             key='num')
            # half_size = y.shape[0]//2
            # x_0, x_1 = x[:half_size], x[half_size:]
            # y_0, y_1 = y[:half_size], y[half_size:]

            jobs = []

            for device, group in zip(device_list, groups):
                _x, _y = x[group], y[group]

                p = ctx.Process(name='device {}'.format(0),
                                target=single,
                                args=(_x, _y, param_dict, loss, q, device))
                p.daemon = True
                jobs.append(p)
                p.start()

            for job in jobs:
                grad_dict = q.get()
                print(grad_dict.keys())
                print(_model.grad_to_dict().keys())
                _model.grad_add_from_dict(grad_dict)
                job.join()

            optimizer.update()
Esempio n. 3
0
def multi_inference(img, pos, checkPath, device_list=[1, 2, 3]):
    '''
	img should be numpy array
	pos should be list
	'''
    num_gpu = len(device_list)
    pos_groups = groupby(pos, num_gpu, key='num')

    ctx = mp.get_context('forkserver')

    # ctx = mp.get_context('spawn')
    # que = mp.Queue()

    # que = ctx.Array('i', range(10))

    processes = []
    img = img.astype(np.int32)
    compare = np.arange(4095, dtype=np.int32)
    compare = np.expand_dims(compare, axis=0)

    for device, pos in zip(device_list, pos_groups):
        # with device_guard(device):
        p = ctx.Process(target=inference_kernel,
                        args=(img, pos, checkPath, device))
        p.start()
        processes.append(p)
        print(device, "start")

    for index, p in enumerate(processes):
        p.join()

    PRED = []
    pos = []
    for device in device_list:
        PRED.append(np.load("./log/{}_PRED.npy".format(device)))
        pos.append(np.load("./log/{}_pos.npy".format(device)))

    PRED = np.concatenate(PRED, axis=0)
    pos = np.concatenate(pos, axis=0)

    return PRED, pos
Esempio n. 4
0
 def get(self):
     indices = np.random.permutation(len(self.X)).tolist()
     groups = groupby(indices, self.minibatch, key='mini')
     for group in groups:
         yield self.X[group]
Esempio n. 5
0
 def get(self):
     indices = np.random.permutation(len(self.X)).tolist()
     groups = groupby(indices, self.minibatch, key='mini')
     for index, group in enumerate(groups):
         yield self.X[group], self.Y[group]
Esempio n. 6
0
    def get(self, epoch, pertube=True):
        #in additional to X, Y we get in __init__
        #next we need add pertubation training data using pertubation method
        if (self.shuffle == True):
            p_posi, p_nege = self._shuffle()
        else:
            p_posi, p_nege = np.arange(len(self.X_posi)).tolist(), np.arange(
                len(self.X_nege)).tolist()

        groups_posi = groupby(p_posi, self.minigroup, key='mini')
        groups_nege = groupby(p_nege, self.minigroup, key='mini')

        # if(epoch==0):
        # 	if(pertube==True):
        # 		print("minibatch is :{}(step) * {}(minigroup) * {}(num) = {} with 1 pertubation, where step should be smaller than 15, total should be smaller than 80".format(self.step, self.minigroup, 3, self.step*self.minigroup*3))
        # 	else:
        # 		print("minibatch is :{}(step) * {}(minigroup) * {}(num) = {} with no pertubation, where step should be smaller than 15, total should be smaller than 80".format(self.step, self.minigroup, 2, self.step*self.minigroup*2))

        for index, (g_posi, g_nege) in enumerate(zip(groups_posi,
                                                     groups_nege)):

            x_posi = [self.X_posi[x] for x in g_posi]
            y_posi = [self.Y_posi[x] for x in g_posi]
            POS_posi = [self.POS_posi[x] for x in g_posi]
            img_posi = [self.IMG_posi[x] for x in g_posi]

            x_nege = [self.X_nege[x] for x in g_nege]
            y_nege = [self.Y_nege[x] for x in g_nege]
            POS_nege = [self.POS_nege[x] for x in g_nege]
            img_nege = [self.IMG_nege[x] for x in g_nege]

            x_posi, y_posi = self.op.concatenate([
                self.op.array(x) for x in x_posi
            ]), self.op.concatenate([self.op.array(x) for x in y_posi])
            x_nege, y_nege = self.op.concatenate([
                self.op.array(x) for x in x_nege
            ]), self.op.concatenate([self.op.array(x) for x in y_nege])

            if (pertube == True):
                pertub = [pertubation(x.shape[0]) + x for x in POS_posi]

                # print("start pertubation")
                x_pertub, y_pertub, _, _ = prepare(
                    0,
                    img_posi,
                    pertub,
                    0,
                    channels_pooling_stride=self.channels_pooling_stride,
                    device=self.device,
                    patch_size=self.patch_size,
                    key='return',
                    step=self.step,
                    cpu=False,
                    dataLimit=self.dataLimit,
                    channels=self.channels)

                # print("finished pertubation")

                if (len(x_pertub) > 0):
                    x_pertub, y_pertub = self.op.concatenate(
                        x_pertub), self.op.concatenate(y_pertub)

                    x, y = self.op.concatenate([
                        x_posi, x_nege, x_pertub
                    ]), self.op.concatenate([y_posi, y_nege, y_pertub])
                else:
                    x, y = self.op.concatenate([x_posi,
                                                x_nege]), self.op.concatenate(
                                                    [y_posi, y_nege])

            else:
                x, y = self.op.concatenate(
                    [x_posi, x_nege]), self.op.concatenate([y_posi, y_nege])

            yield x, y, index