コード例 #1
0
ファイル: train.py プロジェクト: Allensmile/ModelZoo
 def prepare_data(self):
     (x_train, y_train), (x_eval, y_eval) = boston_housing.load_data()
     ss = StandardScaler()
     ss.fit(x_train)
     x_train, x_eval = ss.transform(x_train), ss.transform(x_eval)
     train_data, eval_data = (x_train, y_train), (x_eval, y_eval)
     return train_data, eval_data
コード例 #2
0
ファイル: evaluate.py プロジェクト: zzzz123321/ModelZoo
 def prepare_data(self):
     from tensorflow.python.keras.datasets import boston_housing
     from sklearn.preprocessing import StandardScaler
     (x_train, y_train), (x_eval, y_eval) = boston_housing.load_data()
     ss = StandardScaler()
     ss.fit(x_train)
     x_eval = ss.transform(x_eval)
     return x_eval, y_eval
コード例 #3
0
    def __init__(self):
        (x_train, y_train), (x_test, y_test) = boston_housing.load_data()

        mean = x_train.mean(axis=0)
        x_train -= mean
        std = x_train.std(axis=0)
        x_train /= std

        x_test -= mean
        x_test /= std

        super().__init__(x_train, x_test, y_train, y_test, (x_train.shape[1:]),
                         1, 'boston_housing')
コード例 #4
0
def load_data():
    '''Loads and normalizes the boston housing data'''
    (train_data, train_targets), (test_data, test_targets) =\
            boston_housing.load_data()

    mean = train_data.mean(axis=0)
    train_data -= mean
    std = train_data.std(axis=0)
    train_data /= std

    test_data -= mean
    test_data /= std

    return (train_data, train_targets), (test_data, test_targets)
コード例 #5
0
from tensorflow.contrib.eager.python import tfe

# enable eager mode
tf.enable_eager_execution()
tf.set_random_seed(0)
np.random.seed(0)

if not os.path.exists('weights/'):
    os.makedirs('weights/')

# constants
batch_size = 128
epochs = 25

# dataset loading
(x_train, y_train), (x_test, y_test) = boston_housing.load_data()

# normalization of dataset
mean = x_train.mean(axis=0)
std = x_train.std(axis=0)

x_train = (x_train - mean) / (std + 1e-8)
x_test = (x_test - mean) / (std + 1e-8)

print('x train', x_train.shape, x_train.mean(), x_train.std())
print('y train', y_train.shape, y_train.mean(), y_train.std())
print('x test', x_test.shape, x_test.mean(), x_test.std())
print('y test', y_test.shape, y_test.mean(), y_test.std())


# model definition (canonical way)
コード例 #6
0
 def prepare_data(self):
     from tensorflow.python.keras.datasets import boston_housing
     (x_train, y_train), (x_eval, y_eval) = boston_housing.load_data()
     x_train, x_eval = standardize(x_train, x_eval)
     train_data, eval_data = (x_train, y_train), (x_eval, y_eval)
     return train_data, eval_data
コード例 #7
0
from tensorflow.python.keras.datasets import boston_housing
(train_data,train_targets),(test_data,test_targets) =boston_housing.load_data()
print(train_data.shape)

#from google.colab import drive
#drive.mount('/content/drive')
#/content/drive/My Drive/ANN Mahesh Anand/


# ### Collect Data

# In[1]:


from tensorflow.python.keras.datasets import boston_housing

#Load data
(features, actual_prices),_ = boston_housing.load_data(test_split=0)


# In[2]:


print('Number of examples: ', features.shape[0])
print('Number of features for each example: ', features.shape[1])
print('Shape of actual prices data: ', actual_prices.shape)


# # Building the graph

# Define input data placeholders

# In[6]:
コード例 #9
0
ファイル: 3-24-0.py プロジェクト: WebGLer/on-my-window
from tensorflow.python.keras.datasets import boston_housing
data_path = "D:\\data\\boston_housing.npz"
(train_datas,train_targets),(test_datas,test_targets) = boston_housing.load_data(path=data_path)
print(train_datas.shape)

#数据标准化
#每列的平均值
mean= train_datas.mean(axis=0)
#减去平均值
train_datas -= mean
#每列的
std = train_datas.std(axis=0)
train_datas /= std
test_datas -=mean
test_datas /=std


#构建网络
from tensorflow.python.keras import layers,models
def build_model():
    model = models.Sequential()
    model.add(layers.Dense(64,activation='relu',input_shape=(train_datas.shape[1],)))
    model.add(layers.Dense(64,activation='relu'))
    model.add(layers.Dense(1))
    model.compile(
        optimizer='rmsprop',
        loss= 'mse',
        metrics=['mae']
    )
    return model
コード例 #10
0
get_session()
tfe.enable_eager_execution()
tfe.executing_eagerly()  # => True
tf.set_random_seed(0)
np.random.seed(0)

if not os.path.exists('weights/'):
    os.makedirs('weights/')

# 2. parameters
batch_size = 128
epochs = 100

# 3. train data
(x_train, y_train), (x_test, y_test) = boston_housing.load_data(test_split=0.1)

mean = x_train.mean(axis=0)
std = x_train.std(axis=0)

x_train = (x_train - mean) / (std + 1e-8)
x_test = (x_test - std) / (std + 1e-8)

print('x train', x_train.shape, x_train.mean(), x_train.std())
print('y train', y_train.shape, y_train.mean(), y_train.std())
print('x test', x_test.shape, x_test.mean(), x_test.std())
print('y test', y_test.shape, y_test.mean(), y_test.std())


# 4. model (linear regression)
def build_model(input_shape=None):
コード例 #11
0
 def prepare_data(self):
     (x_train, y_train), (x_test, y_test) = boston_housing.load_data()
     ss = StandardScaler()
     ss.fit(x_train)
     x_test = ss.transform(x_test)
     return x_test
コード例 #12
0
 def prepare_data(self):
     from tensorflow.python.keras.datasets import boston_housing
     (x_train, y_train), (x_test, y_test) = boston_housing.load_data()
     _, x_test = standardize(x_train, x_test)
     return x_test
コード例 #13
0
ファイル: lab06_3.py プロジェクト: wharah/cs344
'''
@assignment: Lab 6, Exercise 3
@student: Sarah Whitten
@date: March 14, 2020
'''

import numpy as np
from tensorflow.python.keras.datasets import boston_housing

# load data
(train_images, train_labels), (test_images,
                               test_labels) = boston_housing.load_data()


# print number of training and testing examples
# from the class example guide
def print_structures():
    print(
        'training images \
            \n\tcount: {} \
            \n\tdimensions: {} \
            \n\tshape: {} \
            \n\tdata type: {}\n\n'.format(len(train_images), train_images.ndim,
                                          train_images.shape,
                                          train_images.dtype),
        'testing images \
            \n\tcount: {} \
            \n\tdimensions: {} \
            \n\tshape: {} \
            \n\tdata type: {} \
            \n\tvalues: {}\n'.format(len(test_labels), train_labels.ndim,