root_path = os.path.abspath(os.path.join('..'))
if root_path not in sys.path:
    sys.path.append(root_path)

import config
from birds_loader import load_birds, build_dataset
from ssd_utils.ssd_loss import SSDLoss
from utils import import_by_name, train_test_split_tensors, MeanAveragePrecisionCallback

# Load train and validation data
train_image_paths, train_bnd_boxes = load_birds(split='train')
valid_image_paths, valid_bnd_boxes = load_birds(split='valid')

valid_data = build_dataset(valid_image_paths, valid_bnd_boxes,
                           image_size=config.IMAGE_SIZE,
                           batch_size=config.BATCH_SIZE)

for run in range(1, config.NUM_RUNS+1):
    weights_dir = 'weights_{}'.format(run)
    history_dir = 'history_{}'.format(run)

    os.makedirs(weights_dir, exist_ok=True)
    os.makedirs(history_dir, exist_ok=True)

    for architecture in config.ARCHITECTURES:
        model_class = import_by_name('ssd_utils.networks.' + architecture)

        model_name = architecture.lower() + '_pretrained'
        model_file = model_name + '.h5'
        model_path = os.path.join(weights_dir, model_file)
import tensorflow as tf

root_path = os.path.abspath('..')
if root_path not in sys.path:
    sys.path.append(root_path)

import config_naive_pasting_ablation as config
from birds_loader import load_birds, build_dataset
from ssd_utils import output_encoder
from ssd_utils.metrics import MeanAveragePrecision
from utils import import_by_name, MeanAveragePrecisionCallback

test_image_paths, test_bnd_boxes = load_birds(split='test')

test_data = build_dataset(test_image_paths,
                          test_bnd_boxes,
                          image_size=config.IMAGE_SIZE,
                          batch_size=config.BATCH_SIZE)

meanAP_metric = MeanAveragePrecision()
results = {'architecture': []}

for run in range(1, config.NUM_RUNS + 1):
    results['run_{}'.format(run)] = []

for architecture in config.ARCHITECTURES:
    model_class = import_by_name('ssd_utils.networks.' + architecture)
    model = model_class(num_classes=len(config.CLASSES))
    anchors = model.get_anchors(image_shape=config.IMAGE_SIZE + (3, ))

    for train_type in ['finetuned']:
        for train_samples in config.TRAIN_SAMPLES:
Ejemplo n.º 3
0
import config_pretraining_ablation as config
from birds_loader import load_birds, build_dataset
from ssd_utils.ssd_loss import SSDLoss
from utils import import_by_name, train_test_split_tensors, MeanAveragePrecisionCallback

# Load train and validation data
train_image_paths, train_bnd_boxes = load_birds(split='train')
valid_image_paths, valid_bnd_boxes = load_birds(split='valid')
fake_image_paths, fake_bnd_boxes = load_birds(
    root=os.path.join('..', 'data', 'birds_fake'),
    split='fake')


valid_data = build_dataset(valid_image_paths, valid_bnd_boxes,
                           image_size=config.IMAGE_SIZE,
                           batch_size=config.BATCH_SIZE)


for run in range(1, config.NUM_RUNS+1):
    weights_dir = 'weights_pretraining_ablation_{}'.format(run)
    history_dir = 'history_pretraining_ablation_{}'.format(run)

    os.makedirs(weights_dir, exist_ok=True)
    os.makedirs(history_dir, exist_ok=True)

    for architecture in config.ARCHITECTURES:
        model_class = import_by_name('ssd_utils.networks.' + architecture)

        for prop_fake_samples in config.PROP_FAKE_SAMPLES:
            num_fake_samples = int(prop_fake_samples * config.NUM_REAL_SAMPLES)