Exemple #1
0
#plt.rc('text', usetex=True)

name = 'CDM'
numb = '095'
bs = 600
nmesh = 1024

num = 200

#mesh = BigFileMesh('ethos4_mesh_%s_%s_h07.bigfile' % (nmesh,bs),'Field')
mesh1 = BigFileMesh(
    '/n/dvorkin_lab/anadr/%s_%s_1024_600_h0.6909.bigfile' % (name, numb),
    'Field')
#mesh2 = BigFileMesh('/n/dvorkin_lab/anadr/%s_%s_1024_600_hbugfix.bigfile' % (name,numb),'Field')

mesh1 = np.fft.fftshift(mesh1.preview(Nmesh=nmesh))
print 'done'

rot = rotate(mesh1, 30, reshape=False)

sys.exit()

nnx = np.random.uniform(0, 10)
nny = np.random.uniform(0, 10)
nnz = np.random.uniform(0, 10)
theta = np.random.uniform(0, 2 * np.pi)

nx = 1 / np.sqrt(nnx**2 + nny**2 + nnz**2) * nnx
ny = 1 / np.sqrt(nnx**2 + nny**2 + nnz**2) * nny
nz = 1 / np.sqrt(nnx**2 + nny**2 + nnz**2) * nnz
Exemple #2
0
        ##  128 x 128 x 128 pixels, with 60K items in train and 10K in validation.
        X = np.zeros((nsplit, 128, 128, 128, 1))

        for iid in np.arange(nsplit):
            mid = zero + iid

            print('Loading %d' % mid)

            fpath = '/global/cscratch1/sd/mjwilson/MLRSD/fastpm/fpm-%d-1.0000' % mid

            mesh = BigFileMesh(fpath,
                               dataset='1/Field',
                               mode='real',
                               header='Header')
            X[iid, :, :, :, 0] = mesh.preview()[:, :, sslice]

        X_train = X[nvalid:, :, :, :, :]
        X_test = X[:nvalid, :, :, :, :]

        _labels = labels[zero:zero + nsplit]

        _y_train = np.digitize(_labels[:, 2][nvalid:], bins)
        _y_test = np.digitize(_labels[:, 2][:nvalid], bins)

        ##  One-hot encode target column.
        y_train = to_categorical(_y_train, num_classes=nhot)
        y_test = to_categorical(_y_test, num_classes=nhot)

        ##
        model = prep_model3D()
Exemple #3
0
def generator(fpath,
              batch_size=32,
              nmesh=128,
              nslice=16,
              mode='train',
              regress=True,
              nhot=10,
              _print=False,
              Field='kField'):
    f = open(fpath, 'r')

    while True:
        images = []
        labels = []

        jump = np.floor(nmesh / nslice).astype(np.int)

        while len(images) < batch_size:
            line = f.readline()
            line = line.replace('\n', '')

            if line == '':
                ##  https://www.pyimagesearch.com/2018/12/24/how-to-use-keras-fit-and-fit_generator-a-hands-on-tutorial/
                f.seek(0)
                line = f.readline()

                if mode == 'eval':
                    break

            if Field == 'Field':
                _file = BigFileMesh(line,
                                    dataset='1/Field',
                                    mode='real',
                                    header='Header')
                attrs = _file.attrs

                ##  (1. + delta) on preview.
                mesh = _file.preview() - 1.0

                if _print:
                    print(
                        'Loading mock:  (h = %.3lf, Om = %.3lf, f = %.3lf).' %
                        (attrs['h'], attrs['Om0'], attrs['Om0']**0.545))

                for ii, sslice in enumerate(np.arange(0, nmesh, jump)):
                    ##  Split 3D sim into _nslice_ 2D (x, z) slices;  Mesh returns (1 + delta).
                    images.append(mesh[:, sslice, :])
                    labels.append(
                        [attrs['h'], attrs['Om0'], attrs['Om0']**0.545])

                    ##  Bin sims in f and use bin index as a supervised label.

            elif Field == 'kField':
                _file = BigFileMesh(line,
                                    dataset='1/kField',
                                    mode='complex',
                                    header='Header')
                attrs = _file.attrs

                if _print:
                    print(
                        'Loading mock:  (h = %.3lf, Om = %.3lf, f = %.3lf).' %
                        (attrs['h'], attrs['Om0'], attrs['Om0']**0.545))

                field = _file.to_complex_field()
                shape = field.shape

                ##  Complex field (convolution?);  Arbitrary slice.
                images.append(_file.to_complex_field()[:, :, 0])
                labels.append([attrs['h'], attrs['Om0'], attrs['Om0']**0.545])

            else:
                raise ValueError('\n\nRequested mode is not available.\n\n')

        images = np.array(images).reshape(batch_size, nmesh, nmesh, 1)
        labels = np.array(labels)

        ## Use f as label.
        labels = labels[:, 2]

        if not regress:
            fmin = .25**0.545
            fmax = .35**0.545

            ##  Number of one-hot encodings == number of bins.
            bins = np.linspace(fmin, fmax, nhot)

            labels = np.digitize(labels, bins)

            ##  One-hot encode target column.
            labels = to_categorical(labels, num_classes=nhot)

        ##
        yield (images, labels)
Exemple #4
0
    nny = np.random.uniform(0, 10)
    nnz = np.random.uniform(0, 10)
    theta = np.random.uniform(0, 2 * np.pi)

    nx = 1 / np.sqrt(nnx**2 + nny**2 + nnz**2) * nnx
    ny = 1 / np.sqrt(nnx**2 + nny**2 + nnz**2) * nny
    nz = 1 / np.sqrt(nnx**2 + nny**2 + nnz**2) * nnz

    R = rotation(nx, ny, nz, theta)

    subcat['Position'] = da.transpose(
        da.dot(R, da.transpose(subcat['Position'])))

    mesh = subcat.to_mesh(Nmesh=nmesh, BoxSize=bs)

    proj1 = np.fft.fftshift(mesh.preview(axes=[0, 1], Nmesh=nmesh))
    proj2 = np.fft.fftshift(mesh.preview(axes=[0, 2], Nmesh=nmesh))
    proj3 = np.fft.fftshift(mesh.preview(axes=[1, 2], Nmesh=nmesh))

    np.save(
        '/n/dvorkin_lab/anadr/%s_proj/%s/hbugfix/%s_%s_projxy_%s_%s' %
        (name, numb, name, numb, count, item), proj1)
    np.save(
        '/n/dvorkin_lab/anadr/%s_proj/%s/hbugfix/%s_%s_projxz_%s_%s' %
        (name, numb, name, numb, count, item), proj2)
    np.save(
        '/n/dvorkin_lab/anadr/%s_proj/%s/hbugfix/%s_%s_projyz_%s_%s' %
        (name, numb, name, numb, count, item), proj3)

    print("%s seconds" % (time.time() - start_time))