def get_dataset(path): content = os.listdir(path) if not all(x in ['train', 'valid', 'test'] for x in content): print_error('Folder does not contain image or label folder. Path probably not correct') raise Exception('Fix dataset_path in config') content.sort() return content
def _chunkify(self, dataset, nr_of_chunks, batch_size): #Round items per chunk down until there is an exact number of minibatches. Multiple of batch_size items_per_chunk = len(dataset[0]) / nr_of_chunks if (items_per_chunk < batch_size): print_error( 'Chunk limit in config set to small, or batch size to large. \n' 'Each chunk must include at least one batch.') raise Exception('Fix chunk_size and batch_size') temp = int(items_per_chunk / batch_size) items_per_chunk = batch_size * temp data, labels = dataset #TODO:do floatX operation twice. chunks = [[ AbstractDataset._floatX(data[x:x + items_per_chunk]), AbstractDataset._floatX(labels[x:x + items_per_chunk]) ] for x in xrange(0, len(dataset[0]), items_per_chunk)] #If the last chunk is less than batch size, it is cut. No reason for an unnecessary swap. last_chunk_size = len(chunks[-1][0]) #TODO: Quick fix if (last_chunk_size < batch_size * 15): chunks.pop(-1) print('---- Removed last chunk. ' '{} elements not enough for at least one minibatch of {}'. format(last_chunk_size, batch_size)) return chunks
def dataset_check(name, dataset, batch_size): #If there are are to few examples for at least one batch, the dataset is invalid. if len(dataset[0]) < batch_size: print_error( 'Insufficent examples in {}. ' '{} examples not enough for at least one minibatch'.format( name, len(dataset[0]))) raise Exception( 'Decrease batch_size or increase samples_per_image')
def get_dataset(path): content = os.listdir(path) if not all(x in ['train', 'valid', 'test'] for x in content): print_error( 'Folder does not contain image or label folder. Path probably not correct' ) raise Exception('Fix dataset_path in config') content.sort() return content
def _chunkify(self, dataset, nr_of_chunks, batch_size): #Round items per chunk down until there is an exact number of minibatches. Multiple of batch_size items_per_chunk = len(dataset[0])/ nr_of_chunks if(items_per_chunk < batch_size): print_error('Chunk limit in config set to small, or batch size to large. \n' 'Each chunk must include at least one batch.') raise Exception('Fix chunk_size and batch_size') temp = int(items_per_chunk / batch_size) items_per_chunk = batch_size * temp data, labels = dataset #TODO:do floatX operation twice. chunks = [[AbstractDataset._floatX(data[x:x+items_per_chunk]), AbstractDataset._floatX(labels[x:x+items_per_chunk])] for x in xrange(0, len(dataset[0]), items_per_chunk)] #If the last chunk is less than batch size, it is cut. No reason for an unnecessary swap. last_chunk_size = len(chunks[-1][0]) #TODO: Quick fix if(last_chunk_size < batch_size*15): chunks.pop(-1) print('---- Removed last chunk. ' '{} elements not enough for at least one minibatch of {}'.format(last_chunk_size, batch_size)) return chunks
def dataset_check(name, dataset, batch_size): #If there are are to few examples for at least one batch, the dataset is invalid. if len(dataset[0]) < batch_size: print_error('Insufficent examples in {}. ' '{} examples not enough for at least one minibatch'.format(name, len(dataset[0]))) raise Exception('Decrease batch_size or increase samples_per_image')
def callback(response): if(response.body == 'Unauthorized'): print_error('Gui is enabled, but token in secret.py is invalid') raise Exception('Token is invalid') global current_id current_id = response.body['id']