Example #1
0
netfilename = 'net_python_cloud_detector4.mat'
data = cloud.load_net(netfilename)

net = data['net']
sample_mean = np.transpose(data['sample_mean'])
sample_std = np.transpose(data['sample_std'])
patchsize = data['patchsize'][0][0]

offset = (patchsize - 1) / 2
inputsize = 7 * patchsize**2

#layer = old_net.layer;
#layer.insert(0,nnet.layer(inputsize))
#net = nnet.net(layer);

d = cloud.load_all_data('/local_scratch/cloudmasks/mat_training_notime')

confusion_matrix = {}
total_confusion_matrix = np.zeros((num_classes, num_classes))
test_confusion_matrix = np.zeros((num_classes, num_classes))
f_out = open(os.path.join(imgdir, 'classification_printout.txt'), 'w')

np.set_printoptions(suppress=True)
print('test rate for this dataset: ' + str(data['test_rate']))

for i in range(len(d[0])):
    A = d[0][i]
    MASK = d[1][i]
    MASKF = d[2][i]
    filename = d[3][i]
    [fname, fext] = os.path.splitext(filename)
Example #2
0
import sys
import numpy as np
import gc
import time

use_sample_storage = True  #off by default - below import may turn on
from cloud_params import *
from nnet_toolkit import nnet

np.random.seed = randomseed1

inputsize = 7 * patchsize**2

offset = (patchsize - 1) / 2

d = cloud.load_all_data(data_path)

estimated_load_percentage = np.min(class_load_percentage)
estimated_size = int(1000.0 * 1000.0 * 44.0 * 2.4 * estimated_load_percentage)
sample_list = np.zeros((estimated_size, inputsize), dtype=np.float32)
class_list = np.zeros((estimated_size, 3), dtype=np.int8)
#class_list = []
sample_list_test = []
class_list_test = []

samples_stored = 0

print('estimated size: ' + str(estimated_size))
for i in range(len(d[0])):
    A = d[0][0]
    MASK = d[1][0]
netfilename = 'net_python_cloud_detector4.mat'
data = cloud.load_net(netfilename)

net = data['net']
sample_mean = np.transpose(data['sample_mean'])
sample_std = np.transpose(data['sample_std'])
patchsize = data['patchsize'][0][0]

offset = (patchsize-1)/2;
inputsize = 7*patchsize**2

#layer = old_net.layer;
#layer.insert(0,nnet.layer(inputsize))
#net = nnet.net(layer);

d = cloud.load_all_data('/local_scratch/cloudmasks/mat_training_notime')

confusion_matrix = {}
total_confusion_matrix = np.zeros((num_classes,num_classes))
test_confusion_matrix = np.zeros((num_classes,num_classes))
f_out = open(os.path.join(imgdir,'classification_printout.txt'),'w')

np.set_printoptions(suppress=True)
print('test rate for this dataset: ' + str(data['test_rate']))

for i in range(len(d[0])):
	A = d[0][i]
	MASK = d[1][i]
	MASKF = d[2][i]
	filename = d[3][i]
	[fname, fext] = os.path.splitext(filename)
import sys
import numpy as np
import gc
import time

use_sample_storage = True  #off by default - below import may turn on
from cloud_params import *
from nnet_toolkit import nnet

np.random.seed = randomseed1;

inputsize = 7*patchsize**2

offset = (patchsize-1)/2;

d = cloud.load_all_data(data_path)

estimated_load_percentage = np.min(class_load_percentage)
estimated_size = int(1000.0*1000.0*44.0*2.4*estimated_load_percentage)
sample_list = np.zeros((estimated_size,inputsize),dtype=np.float32)
class_list = np.zeros((estimated_size,3),dtype=np.int8)
#class_list = []
sample_list_test = []
class_list_test = []

samples_stored = 0

print('estimated size: ' + str(estimated_size))
for i in range(len(d[0])):
	A = d[0][0]
	MASK = d[1][0]