示例#1
0
        test_inds[value_inds[n:]] = True

    return np.where(train_inds)[0], np.where(test_inds)[0]


#==============================================================================
# SET THESE PARAMETERS!
#==============================================================================
curdb_dir = 'roi'
data_dir = r'c:\Users\picturio\OneDrive\KS-XR\Images'

#==============================================================================
# RUN CONFIG
#==============================================================================

cfg = train_params(data_dir, curdb_dir=curdb_dir, training_id=training_id)
"""
Read data description file
"""

df_db = pd.read_csv(cfg.db_file, delimiter=';')
"""
Select classes to process
"""

df_filtered = df_db.copy()

df_labeled = df_db[['Filename']].copy()
df_labeled['category'] = df_db[['Class name']]
df_labeled.columns = ['image', 'category']
"""
示例#2
0
# SET THESE PARAMETERS!
#==============================================================================

training_id='20180308'
curdb_dir='db_cropped_rot'
#data_dir=os.path.join('C:','Users','picturio','OneDrive','WaterScope')
#data_dir=os.path.join('E:','OneDrive','WaterScope')
data_dir=os.path.join('/','home','mikesz','ownCloud','WaterScope')
# cropped results are saved here
#save_dir=os.path.join('D:','DATA','WaterScope','tmp_problem')
save_dir=os.path.join('/','home','mikesz','Data','WaterScope','tmp_cropped')
#==============================================================================
# RUN CONFIG
#==============================================================================

cfg=train_params(data_dir,base_db='db_categorized',curdb_dir=curdb_dir,training_id=training_id)


#==============================================================================
# Create database using folder names
#==============================================================================
image_list=fh.imagelist_in_depth(cfg.base_imagedb_dir,level=1)

"""
Class names from folder names
"""
class_names=[os.path.dirname(f).split(os.sep)[-1] for f in image_list]
df_db = pd.DataFrame(data={'Filename':image_list,'Class name':class_names})
#df_db=df_db[df_db['Class name']=='Others']

#==============================================================================
示例#3
0
numFeature = image_height * image_width * num_channels

#==============================================================================
# SET training parameters
#==============================================================================
max_epochs = 150
model_func = create_shallow_model

epoch_size = 3300  # training
minibatch_size = 128  # training

#==============================================================================
# RUN CONFIG
#==============================================================================

cfg = train_params(data_dir, training_id=training_id)

data_mean_file = os.path.join(cfg.train_dir, 'data_mean.xml')
model_file = os.path.join(cfg.train_dir, 'cnn_model.dnn')
model_temp_file = os.path.join(cfg.train_dir, 'cnn_model_temp.dnn')
train_log_file = os.path.join(cfg.train_log_dir, 'progress_log.txt')

train_map_image = os.path.join(cfg.train_dir, 'train_map_image.txt')
test_map_image = os.path.join(cfg.train_dir, 'test_map_image.txt')


#
# Evaluation action
#
def evaluate_test(input_map,
                  reader_test,
示例#4
0
import matplotlib.pyplot as plt

#from cntk.device import try_set_default_device, gpu
from cntk import cross_entropy_with_softmax, classification_error, input_variable, softmax, element_times
from cntk.io import MinibatchSource, ImageDeserializer, StreamDef, StreamDefs, transforms
from cntk import Trainer, UnitType
from cntk import momentum_sgd, learning_rate_schedule, momentum_as_time_constant_schedule
#from cntk.learners import momentum_schedule
from cntk.logging import log_number_of_parameters, ProgressPrinter, TensorBoardProgressWriter

from src_train.model_functions import create_basic_model, create_advanced_model
from src_train.train_config import train_params
#from src_train.readers import create_reader

data_dir = os.path.join(r'C:\Users', 'picturio', 'OneDrive\WaterScope')
cfg = train_params(data_dir, crop=True, training_id='20171113')

model_file = os.path.join(cfg.train_dir, 'cnn_model.dnn')
model_temp_file = os.path.join(cfg.train_dir, 'cnn_model_temp.dnn')
train_log_file = os.path.join(cfg.train_log_dir, 'progress_log.txt')

train_map = os.path.join(cfg.train_dir, 'train_map.txt')
test_map = os.path.join(cfg.train_dir, 'test_map.txt')
# GET train and test map from prepare4train

data_mean_file = os.path.join(cfg.train_dir, 'data_mean.xml')

# model dimensions

image_height = 64
image_width = 64
示例#5
0
from src_train.train_config import train_params
import src_tools.file_helper as fh

from matplotlib import pyplot as plt
import matplotlib.patches as patches


import crop


#data_dir=os.path.join('C:','Users','picturio','OneDrive','WaterScope')
#data_dir=os.path.join('E:','OneDrive','WaterScope')
data_dir=os.path.join('/','home','mikesz','ownCloud','WaterScope')


cfg=train_params(data_dir,base_db='db_categorized',curdb_dir='crop_problems')


#save_dir=os.path.join('D:\\','DATA','WaterScope','tmp_problem')
save_dir=os.path.join('/','home','mikesz','Data','WaterScope','tmp_problem')

#image_list=fh.imagelist_in_depth(cfg.base_imagedb_dir,level=2)
image_list=fh.imagelist_in_depth(cfg.curdb_dir,level=2)

#for image_file in image_list:
#        img = Image.open(image_file)
#        img_square=crop.crop(img,pad_rate=0.25,
#                             save_file=os.path.join(data_dir,'Images','tmp',os.path.basename(image_file)),
#                             category='dummy')