Пример #1
0
def fnctot_mean(input_file_list, dist_num):

    split_file_list = utils.chunkIt(input_file_list, dist_num)

    queue_list = []
    procs = []

    for i, file_list in enumerate(split_file_list):
        queue_list.append(
            Queue())  # define queues for saving the outputs of functions
        procs.append(
            Process(target=get_stft_mean,
                    args=(file_list, queue_list[i])))  # define process

    for p in procs:  # process start
        p.start()

    M_list = []

    for i in range(dist_num):  # save results from queues and close queues
        M_list.append(queue_list[i].get())
        queue_list[i].close()

    for p in procs:  # close process
        p.terminate()

    return M_list
Пример #2
0
 def run(self):
     self.m.createBucket(conf.new_bucket_name)
     packet_names = getJsonFile(packetListPath)
     myPrint("Total " + str(len(packet_names)) + " packets found", 12)
     if conf.records is not None:
         packet_names = packet_names[0:conf.records]
     packet_names_chunks = chunkIt(packet_names, conf.threads)
     i = 0
     for packet_names_chunk in packet_names_chunks:
         myPrint("Chunk " + str(i + 1) +
                 ": total packet to be migrated are " +
                 str(len(packet_names_chunk)))
     pool = Pool(conf.threads)
     pool.map(runner, packet_names_chunks)
Пример #3
0
    def lpsd_dist_p(self, data, dist_num, is_patch=True):

        data = data.tolist()
        chunk_list = utils.chunkIt(data, dist_num)
        data_list = []
        for item in chunk_list:
            if len(item) > 1:
                data_list.append(item)

        while True:

            queue_list = []
            procs = []

            for queue_val in queue_list:
                print(queue_val.empty())

            for i, data in enumerate(data_list):
                queue_list.append(Queue(
                ))  # define queues for saving the outputs of functions

                procs.append(
                    Process(target=self.get_lpsd_p,
                            args=(data, queue_list[i])))  # define process

            for queue_val in queue_list:
                print(queue_val.empty())

            for p in procs:  # process start
                p.start()

            for queue_val in queue_list:
                while queue_val.empty():
                    pass

            for queue_val in queue_list:
                print(queue_val.empty())

            M_list = []

            for i in range(
                    dist_num):  # save results from queues and close queues
                # while not queue_list[i].empty():
                get_time = time.time()
                M_list.append(queue_list[i].get(timeout=3))
                get_time = time.time() - get_time
                queue_list[i].close()
                queue_list[i].join_thread()

            for p in procs:  # close process
                p.terminate()

            if get_time < 3:
                break
            else:
                print('Some error occurred, restarting the lpsd extraction...')

        result = np.concatenate(M_list, axis=0)
        # result = np.asarray(M_list)
        # print(result.shape)
        # result = np.reshape(result, (-1, result.shape[2], result.shape[3]))

        lpsd = np.expand_dims(
            result[:, :, 0], axis=2
        )  # expand_dims for normalization (shape matching for broadcast)

        # if not self._is_training:
        self.phase.append(result[:, :, 1])

        pad = np.expand_dims(np.zeros(
            (int(config.time_width / 2), lpsd.shape[1])),
                             axis=2)  # pad for extracting the patches

        if is_patch:
            mean, std = self.norm_process(self._norm_dir + '/norm_noisy.mat')
            lpsd = self._normalize(mean, std, lpsd)
        else:
            mean, std = self.norm_process(self._norm_dir + '/norm_noisy.mat')
            lpsd = self._normalize(mean, std, lpsd)
        # print(result.shape)

        if is_patch:
            lpsd = np.squeeze(np.concatenate((pad, lpsd, pad),
                                             axis=0))  # padding for patching
            # print(result.shape)
            lpsd = image.extract_patches_2d(lpsd,
                                            (config.time_width, lpsd.shape[1]))

        lpsd = np.expand_dims(lpsd, axis=3)

        return lpsd
Пример #4
0
    def lpsd_dist(self, data, dist_num, is_patch=True):

        data = data.tolist()
        data_list = utils.chunkIt(data, dist_num)

        queue_list = []
        procs = []

        for i, data in enumerate(data_list):
            queue_list.append(
                Queue())  # define queues for saving the outputs of functions
            procs.append(
                Process(target=self.get_lpsd,
                        args=(data, queue_list[i])))  # define process

        for p in procs:  # process start
            p.start()

        M_list = []

        for i in range(dist_num):  # save results from queues and close queues
            M_list.append(queue_list[i].get())
            queue_list[i].close()

        for p in procs:  # close process
            p.join()

        result = np.concatenate(M_list, axis=0)
        # result = np.asarray(M_list)
        # print(result.shape)
        # result = np.reshape(result, (-1, result.shape[2], result.shape[3]))

        lpsd = np.expand_dims(
            result[:, :, 0], axis=2
        )  # expand_dims for normalization (shape matching for broadcast)

        if not self._is_training:
            self.phase.append(result[:, :, 1])

        pad = np.expand_dims(np.zeros(
            (int(config.time_width / 2), lpsd.shape[1])),
                             axis=2)  # pad for extracting the patches

        if is_patch:
            mean, std = self.norm_process(self._norm_dir + '/norm_noisy.mat')
            lpsd = self._normalize(mean, std, lpsd)
        else:
            mean, std = self.norm_process(self._norm_dir + '/norm_noisy.mat')
            lpsd = self._normalize(mean, std, lpsd)
        # print(result.shape)

        if is_patch:
            lpsd = np.squeeze(np.concatenate((pad, lpsd, pad),
                                             axis=0))  # padding for patching
            # print(result.shape)
            lpsd = image.extract_patches_2d(lpsd,
                                            (config.time_width, lpsd.shape[1]))

        lpsd = np.expand_dims(lpsd, axis=3)

        return lpsd