Exemplo n.º 1
0
def make_dataset(data_path, mode=None):
    try:
        mixture_array = sa.attach(f"shm://{mode}_mixture_array")
        vocal_array = sa.attach(f"shm://{mode}_vocal_array")

    except:
        mus = musdb.DB(root=data_path, is_wav=True, subsets=mode)
        mixture_list = list()
        vocal_list = list()
        for track in tqdm(mus):
            #mixture_list.append(track.audio.sum(axis=-1))
            mixture_list.append(norm(track.audio)[0])
            #vocal_list.append(track.targets['vocals'].audio.sum(axis=-1))
            vocal_list.append(norm(track.targets['vocals'].audio)[0])
        mixture_array = np.concatenate(mixture_list)
        vocal_array = np.concatenate(vocal_list)

        assert mixture_array.shape == vocal_array.shape

        mixture_array_sa = sa.create(f"shm://{mode}_mixture_array",
                                     mixture_array.shape)
        vocal_array_sa = sa.create(f"shm://{mode}_vocal_array",
                                   vocal_array.shape)
        mixture_array_sa[::] = mixture_array
        vocal_array_sa[::] = vocal_array

    return dict(mixture_array=mixture_array, vocal_array=vocal_array)
Exemplo n.º 2
0
    def shard_array_to_s3_mp(self, array, indices, s3_bucket, s3_keys):
        """Shard array to S3 in parallel.

        :param ndarray array: array to be put into S3
        :param list indices: indices corrsponding to the s3 keys
        :param str s3_bucket: S3 bucket to use
        :param list s3_keys: List of S3 keys corresponding to the indices.
        """
        def work_shard_array_to_s3(s3_key, index, array_name, s3_bucket):
            array = sa.attach(array_name)
            if sys.version_info >= (3, 5):
                data = bytes(array[index].data)
            else:
                data = bytes(np.ascontiguousarray(array[index]).data)

            if self.enable_compression:
                cctx = zstd.ZstdCompressor(level=9, write_content_size=True)
                data = cctx.compress(data)

            self.s3aio.s3io.put_bytes(s3_bucket, s3_key, data)

        array_name = '_'.join(['SA3IO', str(uuid.uuid4()), str(os.getpid())])
        sa.create(array_name, shape=array.shape, dtype=array.dtype)
        shared_array = sa.attach(array_name)
        shared_array[:] = array
        results = self.pool.map(work_shard_array_to_s3, s3_keys, indices,
                                repeat(array_name), repeat(s3_bucket))

        sa.delete(array_name)
Exemplo n.º 3
0
    def transform(self, fundus, vessel, grade):
        shared_array_fundus_mean_subt_name = str(uuid4())
        shared_array_fundus_z_name = str(uuid4())
        shared_array_vessel_name = str(uuid4())
        
        try:
            shared_array_fundus_mean_subt = SharedArray.create(
                shared_array_fundus_mean_subt_name, [len(fundus), img_h, img_w, 3], dtype=np.float32)
            shared_array_fundus_z = SharedArray.create(
                shared_array_fundus_z_name, [len(fundus), img_h, img_w, 3], dtype=np.float32)
            shared_array_vessel = SharedArray.create(
                shared_array_vessel_name, [len(fundus), img_h, img_w, 1], dtype=np.float32)
            
            n_grades = len(grade)
            if self.grade_type == "DR":
                grade_onehot = np.zeros((n_grades, n_grade_dr))
            elif self.grade_type == "DME":
                grade_onehot = np.zeros((n_grades, n_grade_dme))
            for i in range(n_grades):
                grade_onehot[i, grade[i]] = 1
            
            args = []
            for i, _ in enumerate(fundus):
                args.append((i, shared_array_fundus_mean_subt_name, shared_array_fundus_z_name, shared_array_vessel_name, fundus[i], vessel[i], self.is_train, self.normalize))

            self.pool.map(load_shared, args)
            fundus_mean_subt_img = np.array(shared_array_fundus_mean_subt, dtype=np.float32)
            fundus_z_img = np.array(shared_array_fundus_z, dtype=np.float32)
            vessel_img = np.array(shared_array_vessel, dtype=np.float32)
        finally:
            SharedArray.delete(shared_array_fundus_mean_subt_name)
            SharedArray.delete(shared_array_fundus_z_name)
            SharedArray.delete(shared_array_vessel_name)

        return fundus, fundus_mean_subt_img, fundus_z_img, vessel_img, grade_onehot
def load(data_name):

    try:
        Xtr = sa.attach('shm://%s_Xtr' % (data_name))
        Ytr = sa.attach('shm://%s_Ytr' % (data_name))
        Ytr_p = sa.attach('shm://%s_Ytr_pitch' % (data_name))
        Ytr_s = sa.attach('shm://%s_Ytr_stream' % (data_name))
    except:
        # load cqt
        trdata = h5py.File('../ex_data/tr.h5', 'r')
        Xtr = sa.create('shm://%s_Xtr' % (data_name), (trdata['x'].shape),
                        dtype='float32')
        Xtr[:] = trdata['x'][:]
        #load instrument label
        Ytr = sa.create('shm://%s_Ytr' % (data_name), (trdata['yi'].shape),
                        dtype='float32')
        Ytr[:] = trdata['yi'][:]
        #load pitch label
        Ytr_p = sa.create('shm://%s_Ytr_pitch' % (data_name),
                          (trdata['yp'].shape),
                          dtype='float32')
        Ytr_p[:] = trdata['yp'][:]
        #load pianoroll label
        Ytr_s = sa.create('shm://%s_Ytr_stream' % (data_name),
                          (trdata['ys'].shape),
                          dtype='float32')
        Ytr_s[:] = trdata['ys'][:]

    return Xtr, Ytr, Ytr_p, Ytr_s
Exemplo n.º 5
0
    def transform(self, fundus, vessel, coords):
        shared_array_fundus_name = str(uuid4())
        shared_array_vessel_name = str(uuid4())
        shared_array_lm_name = str(uuid4())
        try:
            shared_array_fundus = SharedArray.create(
                shared_array_fundus_name, [len(fundus), img_h, img_w, 3],
                dtype=np.float32)
            shared_array_vessel = SharedArray.create(
                shared_array_vessel_name, [len(fundus), img_h, img_w, 1],
                dtype=np.float32)
            shared_array_lm = SharedArray.create(shared_array_lm_name,
                                                 [len(fundus), 4],
                                                 dtype=np.float32)

            args = []

            for i, fname in enumerate(fundus):
                args.append((i, shared_array_fundus_name,
                             shared_array_vessel_name, shared_array_lm_name,
                             fundus[i], vessel[i], coords[i], self.is_train))

            self.pool.map(load_shared, args)
            fundus_img = np.array(shared_array_fundus, dtype=np.float32)
            vessel_img = np.array(shared_array_vessel, dtype=np.float32)
            coords_arr = np.array(shared_array_lm, dtype=np.float32)
        finally:
            SharedArray.delete(shared_array_fundus_name)
            SharedArray.delete(shared_array_vessel_name)
            SharedArray.delete(shared_array_lm_name)

        return fundus_img, vessel_img, coords_arr, fundus
Exemplo n.º 6
0
    def transform(self, fundus, grade):
        shared_array_fundus_rescale_name = str(uuid4())
        shared_array_fundus_rescale_mean_subtract_name = str(uuid4())

        try:
            shared_array_fundus_mean_subt = SharedArray.create(
                shared_array_fundus_rescale_name,
                [len(fundus), img_h, img_w, 3],
                dtype=np.float32)
            shared_array_fundus_z = SharedArray.create(
                shared_array_fundus_rescale_mean_subtract_name,
                [len(fundus), img_h, img_w, 3],
                dtype=np.float32)

            args = []
            for i, _ in enumerate(fundus):
                args.append((i, shared_array_fundus_rescale_name,
                             shared_array_fundus_rescale_mean_subtract_name,
                             fundus[i], self.is_train))

            self.pool.map(load_shared, args)
            fundus_rescale = np.array(shared_array_fundus_mean_subt,
                                      dtype=np.float32)
            fundus_rescale_mean_subtract = np.array(shared_array_fundus_z,
                                                    dtype=np.float32)
        finally:
            SharedArray.delete(shared_array_fundus_rescale_name)
            SharedArray.delete(shared_array_fundus_rescale_mean_subtract_name)

        return fundus, fundus_rescale, fundus_rescale_mean_subtract, grade
Exemplo n.º 7
0
    def run(self):
        """
        # TODO: write description
        """
        try:
            self.t0 = time.time()
            self.t1 = self.t0
            q = self.channel.queue_declare(queue='detector')
            self.channel.queue_declare(queue='time_logs')
            if q.method.message_count >= 59:
                time.sleep(1)

            frame_num, timestamp, images_list = self.batch_generator.__next__()
            self.log_time("Took next batch:")

            sh_mem_adress = f"shm://{self.module_name}_{frame_num}"
            try:
                shared_mem = sa.create(sh_mem_adress, np.shape(images_list))
            except:
                sa.delete(sh_mem_adress)
                shared_mem = sa.create(sh_mem_adress, np.shape(images_list))
            self.log_time('Created shared memory')
            shared_mem[:] = np.array(images_list)
            self.log_time('Copied to shared memory:')
            self.channel.basic_publish(exchange='',
                                       routing_key='detector',
                                       body=sh_mem_adress)

            self.log_time('Published message:')
            ########################################################################
            del frame_num, timestamp, images_list

            self.log_time('Full time:', from_start=True)
        except StopIteration:  # no more frames left in videos_provider
            print('stop iter')
Exemplo n.º 8
0
def main():
    """Main function"""
    filepath, name, prefix, dtype = parse_arguments()

    if name is None:
        name = os.path.splitext(os.path.basename(filepath))[0]
        if prefix is not None:
            name = prefix + '_' + name

    print("Loading data from '{}'.".format(filepath))
    if filepath.endswith('.npy'):
        data = np.load(filepath)
        data = data.astype(dtype)
        print("Saving data to shared memory.")
        sa.delete(name)
        sa_array = sa.create(name, data.shape, data.dtype)
        np.copyto(sa_array, data)
    else:
        with np.load(filepath) as loaded:
            print("Saving data to shared memory.")
            sa_array = sa.create(name, loaded['shape'], dtype)
            sa_array[[x for x in loaded['nonzero']]] = True

    print("Successfully saved: (name='{}', shape={}, dtype={})".format(
        name, sa_array.shape, sa_array.dtype))
Exemplo n.º 9
0
def CreateShared(Name, shape, dtype):
    try:
        a = SharedArray.create(Name, shape, dtype=dtype)
    except OSError:
        print >> log, ModColor.Str("File %s exists, deleting" % Name)
        DelArray(Name)
        a = SharedArray.create(Name, shape, dtype=dtype)
    return a
Exemplo n.º 10
0
    def callback(self, method, body):
        with self.cycle_time.labels(module=self.module_name,
                                    name=socket.gethostname()).time():
            self.t0 = time.time()
            self.t1 = self.t0
            torch.cuda.set_device(np.random.randint(10 % 3))

            message = body.decode()
            if message == 'END':
                self.channel.basic_publish(exchange='',
                                           routing_key='reid',
                                           body=body)
                return

            frame_num = map(
                int,
                message.split('_')[-1])  # takes frame_num from adress

            images_list = sa.attach(message)
            self.log_time("Read image from shm:")

            bboxes = self.detector.predict_with_scores(images_list)
            self.log_time("Detector predicted:")

            bboxes = np.array([tensor[0].numpy() for tensor in bboxes[0]])
            self.log_time("Detector output into array converted:")

            if bboxes.shape[0] != 0:
                sh_mem_adress = f"shm://{self.module_name}_{frame_num}"
                try:
                    shared_mem = sa.create(sh_mem_adress, bboxes.shape)
                except:
                    sa.delete(sh_mem_adress)
                    shared_mem = sa.create(sh_mem_adress, bboxes.shape)
                self.log_time("Shared memory created:")

                # copy image to shared memory
                shared_mem[:] = np.array(bboxes)
                self.log_time("Detector copied to shared memory:")

                sa.delete(message)
                sa.delete(sh_mem_adress)

                del images_list, bboxes
                torch.cuda.empty_cache()

            self.channel.basic_ack(delivery_tag=method.delivery_tag)
            self.log_time('Full time:', from_start=True)
            self.last_success.labels(
                module=self.module_name,
                name=socket.gethostname()).set_to_current_time()
            #with self.cycle_time.labels(module=self.module_name, name=socket.gethostname()).time():
            push_to_gateway('pushgateway:9091',
                            job='Test ' + str(self.start_time),
                            registry=self.registry)
Exemplo n.º 11
0
    def get_byte_range_mp(self,
                          s3_bucket,
                          s3_key,
                          s3_start,
                          s3_end,
                          block_size,
                          new_session=False):
        """Gets bytes from a S3 object within a range in parallel.

        :param str s3_bucket: name of the s3 bucket.
        :param str s3_key: name of the s3 key.
        :param int s3_start: begin of range.
        :param int s3_end: begin of range.
        :param int block_size: block size for download.
        :param bool new_session: Flag to create a new session or reuse existing session.
            True: create new session
            False: reuse existing session
        :return: Requested bytes
        """
        def work_get(block_number, array_name, s3_bucket, s3_key, s3_max_size,
                     block_size):
            start = block_number * block_size
            end = (block_number + 1) * block_size
            if end > s3_max_size:
                end = s3_max_size
            d = self.get_byte_range(s3_bucket, s3_key, start, end, True)
            # d = np.frombuffer(d, dtype=np.uint8, count=-1, offset=0)
            shared_array = sa.attach(array_name)
            shared_array[start:end] = d

        if not self.enable_s3:
            return self.get_byte_range(s3_bucket, s3_key, s3_start, s3_end,
                                       new_session)

        s3 = self.s3_resource(new_session)

        s3o = s3.Bucket(s3_bucket).Object(s3_key).get()
        s3_max_size = s3o['ContentLength']
        s3_obj_size = s3_end - s3_start
        num_streams = int(np.ceil(s3_obj_size / block_size))
        blocks = range(num_streams)
        array_name = '_'.join(
            ['S3IO', s3_bucket, s3_key,
             str(uuid.uuid4()),
             str(os.getpid())])
        sa.create(array_name, shape=s3_obj_size, dtype=np.uint8)
        shared_array = sa.attach(array_name)

        self.pool.map(work_get, blocks, repeat(array_name), repeat(s3_bucket),
                      repeat(s3_key), repeat(s3_max_size), repeat(block_size))

        sa.delete(array_name)
        return shared_array
Exemplo n.º 12
0
def ToShared(Name, A):

    try:
        a = SharedArray.create(Name, A.shape, dtype=A.dtype)
    except:
        log.print(ModColor.Str("File %s exists, delete it..." % Name))
        #DelArray(Name.decode("byte"))
        DelArray(Name)
        a = SharedArray.create(Name, A.shape, dtype=A.dtype)

    a[:] = A[:]
    return a
Exemplo n.º 13
0
def create_shared_array(name, shape, dtype):
    """Create shared array. Prompt if a file with the same name existed."""
    try:
        return sa.create(name, shape, dtype)
    except FileExistsError:
        response = ""
        while response.lower() not in ["y", "n", "yes", "no"]:
            response = input("Existing array (also named " + name +
                             ") was found. Replace it? (y/n) ")
        if response.lower() in ("n", "no"):
            sys.exit(0)
        sa.delete(name)
        return sa.create(name, shape, dtype)
Exemplo n.º 14
0
def save_on_sa(data_dir, use_only_84_keys = True, rescale = True, postfix=''):
    print('Reading...')
    print('[*]',data_dir)

    ##data_prefix = ['Bass', 'Drum', 'Guitar', 'Other', 'Piano', 'Chord']
    ##data_prefix = ['mel_phr','acc_phr']
    
    subdirs = ['tra', 'val']
#    subdirs = ['val']

    for sd in subdirs:
        data = []
        
        # midi setting for training
#         data_X = np.load(os.path.join(data_dir, sd, 'x_bar_chroma.npy'))
#         if sd is 'tra':
#             data_y = np.load(os.path.join(data_dir, sd ,'y_bar_chroma.npy'))
#         else:
#             data_y = np.load(os.path.join(data_dir, sd ,'y_bar_chroma_humanlifeleadsheet.npy'))
#             #data_y = np.load(os.path.join(data_dir, sd ,'y_bar_chroma.npy'))
            
            
        # midi setting for testing
        data_X = np.load(os.path.join(data_dir, 'val', 'x_bar_chroma.npy'))
        if sd is 'tra':
            data_y = np.load(os.path.join(data_dir, 'val' ,'y_bar_chroma.npy'))
        else:
            data_y = np.load(os.path.join(data_dir, 'val' ,'y_bar_chroma_test.npy'))
            #data_y = np.load(os.path.join(data_dir, sd ,'y_bar_chroma.npy'))

        print(data_X.dtype)
        print(data_y.dtype)
        
        if sd is 'tra':
            print(sd)
            print('Shuffling...')
            data_X, data_y = shuffle(data_X, data_y, random_state=0)
            ##data_X = shuffle(data_X, random_state=0)
        else:
            print(sd)
            pass
        name = sd + '_X_' + postfix
        print(name, data_X.shape)
        tmp_arr_x = sa.create(name, data_X.shape, dtype=bool)
        np.copyto(tmp_arr_x, data_X)

        name = sd + '_y_' + postfix
        print(name, data_y.shape)
        ##tmp_arr_y = sa.create(name, data_y.shape, dtype=bool)
        tmp_arr_y = sa.create(name, data_y.shape, dtype=float)
        np.copyto(tmp_arr_y, data_y)
Exemplo n.º 15
0
def save_on_sa(data_dir, use_only_84_keys=True, rescale=True, postfix=''):
    print('Reading...')
    print('[*]', data_dir)

    ##data_prefix = ['Bass', 'Drum', 'Guitar', 'Other', 'Piano', 'Chord']
    ##data_prefix = ['mel_phr','acc_phr']
    subdirs = ['tra', 'val']

    for sd in subdirs:
        data = []
        # lead sheet setting
        ##for dp in range (2):
        ##    x_name = data_prefix[dp]
        ##    print (os.path.join(data_dir, sd , x_name+'.npy'))
        ##    tmp_data =  np.reshape(np.load(os.path.join(data_dir, sd , x_name+'.npy')),(-1,384,128, 1))
        ##    if(use_only_84_keys):
        ##        tmp_data = tmp_data[:, :, 24:108, :]
        ##    data.append(tmp_data)
        ##
        ##data_X = np.concatenate(data,axis = 3)

        # midi setting
        data_X = np.load(os.path.join(data_dir, sd, 'x_bar_chroma.npy'))
        if sd is 'tra':
            data_y = np.load(os.path.join(data_dir, sd, 'y_bar_chroma.npy'))
        else:
            data_y = np.load(
                os.path.join(data_dir, sd, 'y_bar_chroma_4bar_vae.npy'))

        print(data_X.dtype)
        print(data_y.dtype)

        if sd is 'tra':
            print(sd)
            print('Shuffling...')
            data_X, data_y = shuffle(data_X, data_y, random_state=0)
            ##data_X = shuffle(data_X, random_state=0)
        else:
            print(sd)
            pass
        name = sd + '_X_' + postfix
        print(name, data_X.shape)
        tmp_arr_x = sa.create(name, data_X.shape, dtype=bool)
        np.copyto(tmp_arr_x, data_X)

        name = sd + '_y_' + postfix
        print(name, data_y.shape)
        ##tmp_arr_y = sa.create(name, data_y.shape, dtype=bool)
        tmp_arr_y = sa.create(name, data_y.shape, dtype=float)
        np.copyto(tmp_arr_y, data_y)
Exemplo n.º 16
0
def getDistance(data, func_name, pool, start=0, allowed_missing=0.0):
    with NamedTemporaryFile(dir='.', prefix='HCC_') as file :
        prefix = 'file://{0}'.format(file.name)
        func = eval(func_name)
        mat_buf = '{0}.mat.sa'.format(prefix)
        mat = sa.create(mat_buf, shape = data.shape, dtype = data.dtype)
        mat[:] = data[:]
        dist_buf = '{0}.dist.sa'.format(prefix)
        dist = sa.create(dist_buf, shape = [mat.shape[0] - start, mat.shape[0], 2], dtype = np.int32)
        dist[:] = 0
        __parallel_dist(mat_buf, func, dist_buf, mat.shape, pool, start, allowed_missing)
        sa.delete(mat_buf)
        os.unlink(dist_buf[7:])
    return dist
Exemplo n.º 17
0
def load():
    avg, std = np.load('data/cqt_avg_std.npy')
    try:
        Xtr = sa.attach('shm://%s_Xtr' % (data_name))
        Ytr = sa.attach('shm://%s_Ytr' % (data_name))
    except:
        vadata = h5py.File('ex_data/' + data_name + '/va.h5', 'r')
        trdata = h5py.File('ex_data/' + data_name + '/tr.h5', 'r')
        Xtr = sa.create('shm://%s_Xtr' % (data_name), (trdata['x'].shape),
                        dtype='float32')
        Xtr[:] = trdata['x'][:]
        Ytr = sa.create('shm://%s_Ytr' % (data_name), (trdata['y'].shape),
                        dtype='float32')

    return Xtr, Ytr, avg, std
    def transform(self, Xb, yb):

        shared_array_name = str(uuid4())
        try:
            shared_array = SharedArray.create(
                shared_array_name, [len(Xb), 3, self.config.get('w'), 
                                    self.config.get('h')], dtype=np.float32)
                                        
            fnames, labels = super(SharedIterator, self).transform(Xb, yb)
            args = []

            for i, fname in enumerate(fnames):
                kwargs = {k: self.config.get(k) for k in ['w', 'h']}
                if not self.deterministic:
                    kwargs.update({k: self.config.get(k) 
                                   for k in ['aug_params', 'sigma']})
                kwargs['transform'] = getattr(self, 'tf', None)
                kwargs['color_vec'] = getattr(self, 'color_vec', None)
                args.append((i, shared_array_name, fname, kwargs))

            self.pool.map(load_shared, args)
            Xb = np.array(shared_array, dtype=np.float32)

        finally:
            SharedArray.delete(shared_array_name)

        if labels is not None:
            labels = labels[:, np.newaxis]

        return Xb, labels
Exemplo n.º 19
0
def PackListArray(Name, LArray):
    DelArray(Name)

    NArray = len(LArray)
    ListNDim = [len(LArray[i].shape) for i in xrange(len(LArray))]
    NDimTot = np.sum(ListNDim)
    # [NArray,NDim0...NDimN,shape0...shapeN,Arr0...ArrN]

    dS = LArray[0].dtype
    TotSize = 0
    for i in xrange(NArray):
        TotSize += LArray[i].size

    S = SharedArray.create(Name, (1 + NArray + NDimTot + TotSize, ), dtype=dS)
    S[0] = NArray
    idx = 1
    # write ndims
    for i in xrange(NArray):
        S[idx] = ListNDim[i]
        idx += 1

    # write shapes
    for i in xrange(NArray):
        ndim = ListNDim[i]
        A = LArray[i]
        S[idx:idx + ndim] = A.shape
        idx += ndim

    # write arrays
    for i in xrange(NArray):
        A = LArray[i]
        S[idx:idx + A.size] = A.ravel()
        idx += A.size
Exemplo n.º 20
0
def to_shared_memory(object, name):
    logging.info("Writing to shared memory %s" % name)
    meta_information = {}
    for property_name in object.properties:
        data = object.__getattribute__(property_name)

        if data is None:
            data = np.zeros(0)

        # Wrap single ints in arrays
        if data.shape == ():
            data = np.array([data], dtype=data.dtype)

        data_type = data.dtype
        data_shape = data.shape
        meta_information[property_name] = (data_type, data_shape)

        # Make shared memory and copy data to buffer
        #logging.info("Field %s has shape %s and type %s" % (property_name, data_shape, data_type))
        try:
            sa.delete(name + "_" + property_name)
            logging.info("Deleted already shared memory")
        except FileNotFoundError:
            logging.info("No existing shared memory, can create new one")

        shared_array = sa.create(name + "_" + property_name, data_shape, data_type)
        shared_array[:] = data

    f = open(name + "_meta.shm", "wb")
    pickle.dump(meta_information, f)
    logging.info("Done writing to shared memory")
    def __init__(self, file_path, file_name):
        print('class', DBSCAN.eps, DBSCAN.minpts)
        self.core_points = []
        self.core_point_labels = []
        self.core_points_index = []
        self.border_points_index = []
        self.border_points = []
        self.border_point_labels = []
        self.noise_points = []
        # self.nearest_neighbours = {}      # use for small values, space complexity is O(n^2)
        self.n_threads = cpu_count()
        self.features = []
        self.labels = []
        self.features, self.labels = process_dataset(
            file_path, file_name)  # limit the size of the dataset
        size = 10000
        self.features, self.labels = self.features[:size, :], self.labels[:
                                                                          size]
        print('features: \n', self.features.shape)
        try:
            sa.delete("shm://features")
        except Exception as e:
            print('file does not exist')
        self.shared_memory = sa.create("shm://features", self.features.shape)

        # copy the array into the shared memory
        for row_index in range(self.features.shape[0]):
            for point_index in range(self.features.shape[1]):
                self.shared_memory[row_index,
                                   point_index] = self.features[row_index,
                                                                point_index]
        self.clusters = []
Exemplo n.º 22
0
    def transform(self, Xb, yb):
        shared_array_name = str(uuid4())
        fnames, labels = Xb, yb
        args = []
        da_args = self.da_args()
        for i, fname in enumerate(fnames):
            args.append((i, shared_array_name, fname, da_args))

        if self.num_image_channels is None:
            test_img = data.load_augment(fnames[0], **da_args)
            self.num_image_channels = test_img.shape[-1]

        try:
            shared_array = SharedArray.create(
                shared_array_name,
                [len(Xb), self.w, self.h, self.num_image_channels],
                dtype=np.float32)

            self.pool.map(load_shared, args)
            Xb = np.array(shared_array, dtype=np.float32)

        finally:
            SharedArray.delete(shared_array_name)

        # if labels is not None:
        #     labels = labels[:, np.newaxis]

        return Xb, labels
    def transform(self, Xb, yb):

        shared_array_name = str(uuid4())
        try:
            shared_array = SharedArray.create(
                shared_array_name,
                [len(Xb), 3,
                 self.config.get('w'),
                 self.config.get('h')],
                dtype=np.float32)

            fnames, labels = super(SharedIterator, self).transform(Xb, yb)
            args = []

            for i, fname in enumerate(fnames):
                kwargs = {k: self.config.get(k) for k in ['w', 'h']}
                if not self.deterministic:
                    kwargs.update({
                        k: self.config.get(k)
                        for k in ['aug_params', 'sigma']
                    })
                kwargs['transform'] = getattr(self, 'tf', None)
                kwargs['color_vec'] = getattr(self, 'color_vec', None)
                args.append((i, shared_array_name, fname, kwargs))

            self.pool.map(load_shared, args)
            Xb = np.array(shared_array, dtype=np.float32)

        finally:
            SharedArray.delete(shared_array_name)

        if labels is not None:
            labels = labels[:, np.newaxis]

        return Xb, labels
Exemplo n.º 24
0
 def __create_or_link(self, name, shape, type):
     try:
         data = SharedArray.attach("shm://%s" % name)
         return (data, True)
     except:
         data = SharedArray.create("shm://%s" % name, shape=shape, dtype=type)
         return (data, False)
Exemplo n.º 25
0
def f_load(m_name, fp):
    try:
        out = sa.attach(m_name)
    except:
        out = np.load(fp)
        X = sa.create(m_name, (out.shape), dtype='float32')
        X[:] = out
    return out.astype('float32')
Exemplo n.º 26
0
def create_new_sa_array(name, shape, dtype):
    try:
        sa.delete(name)
    except FileNotFoundError:
        pass
    finally:
        sa_array = sa.create(name, shape, dtype=dtype)
    return sa_array
Exemplo n.º 27
0
def create_new_sa_array(name, shape, dtype):
    try:
        sa.delete(name)
    except FileNotFoundError:
        pass
    finally:
        sa_array = sa.create(name, shape, dtype=dtype)
    return sa_array
Exemplo n.º 28
0
 def createSharedGrads(_name, _data_list):
     self.shared_grads_dict[_name] = []
     self.shared_grads_name_dict[_name] = []
     for i in range(len(_data_list)):
         array_name = 'shm://' + _name + '_grad_' + str(i)
         self.shared_grads_name_dict[_name].append(array_name)
         array = sa.create(array_name, _data_list[i].shape, np.float32)
         self.shared_grads_dict[_name].append(array)
Exemplo n.º 29
0
def attach_or_create(name):
    name = "shm://" + name

    try:
        return SharedArray.attach(name)
    except Exception:
        pass

    return SharedArray.create(name, BUFFER_SIZE, dtype=np.uint8)
Exemplo n.º 30
0
def create(sa_name, npy_name):
    print(sa_name, npy_name)
    print('[*] Loading...')
    X = np.load(npy_name)
    print(X.shape)

    print('[*] Saving...')
    tmp_arr = sa.create(sa_name, X.shape)
    np.copyto(tmp_arr, X)
Exemplo n.º 31
0
 def createSharedVars(_name, _data_list):
     self.shared_vars_dict[_name] = []
     self.shared_vars_name_dict[_name] = []
     for i in range(len(_data_list)):
         array_name = 'shm://' + _name + '_' + str(i)
         self.shared_vars_name_dict[_name].append(array_name)
         array = sa.create(array_name, _data_list[i].shape, np.float32)
         np.copyto(array, _data_list[i])
         self.shared_vars_dict[_name].append(array)
Exemplo n.º 32
0
 def init_mem(self, create=False):
     self.is_shared_memory = True
     import SharedArray as sa
     num_samples = int(self.BUFFER_DURATION * self.fs)
     if create:
         self.data = sa.create(self.SHARED_MEM_NAME, num_samples,
                               np.float32)
         atexit.register(self.cleanup_mem)  # Run cleanup on exit
     else:
         self.data = sa.attach(self.SHARED_MEM_NAME)
Exemplo n.º 33
0
def main():
    """Main function"""
    filepath, name, prefix = parse_arguments()

    data = np.load(filepath)

    if name is None:
        name = os.path.splitext(os.path.basename(filepath))[0]
        if prefix is not None:
            name = prefix + '_' + name

    sa_array = sa.create(name, data.shape, data.dtype)
    np.copyto(sa_array, data)

    print("Successfully saved: {}, {}, {}".format(name, data.shape, data.dtype))
Exemplo n.º 34
0
def main():
    """Main function"""
    filepath, name, prefix, dtype = parse_arguments()

    if name is None:
        name = os.path.splitext(os.path.basename(filepath))[0]
        if prefix is not None:
            name = prefix + '_' + name

    print("Loading data from '{}'.".format(filepath))
    if filepath.endswith('.npy'):
        data = np.load(filepath)
        data = data.astype(dtype)
        print("Saving data to shared memory.")
        sa_array = sa.create(name, data.shape, data.dtype)
        np.copyto(sa_array, data)
    else:
        with np.load(filepath) as loaded:
            print("Saving data to shared memory.")
            sa_array = sa.create(name, loaded['shape'], dtype)
            sa_array[[x for x in loaded['nonzero']]] = True

    print("Successfully saved: (name='{}', shape={}, dtype={})".format(
        name, sa_array.shape, sa_array.dtype))
Exemplo n.º 35
0
    def _setup_arrays(self, inshape):
        """ Setup instance variabels and arrays for processing """
        self.inshape = inshape

        # use shared array
        if self.wfunc is None:
            self.id = str(uuid.uuid4())
            self.arrout = sa.create(self.id, self.inshape)
            self.arrout[:] = np.nan
            self.chunks = self.chunk(self.inshape)

            # TODO - allow custom write function
            #self.arrout = np.empty((self.nbandsout, self.inshape[1], self.inshape[2]))

            def wfunc(output, chunk):
                self.arrout[:, chunk[1]:chunk[1] + chunk[3], chunk[0]:chunk[0] + chunk[2]] = output

            self.wfunc = wfunc