def __init__(self,
              rooms_file='data_files/rooms',
              persons_file='data_files/persons'):
     """Initializes the class and get the rooms file and persons file"""
     self.rooms_data = shelve.open(rooms_file)
     self.persons_data = shelve.open(persons_file)
     self.manager = DataManager(self.rooms_data, self.persons_data)
def view_results():
    model = torch.load('../temp/exp_results/model_epoch_100.pkl', map_location='cpu')
    config = json.load(open('../config.json'))

    model.eval()
    data_manager = DataManager(config)
    val_loader, _ = data_manager.get_train_eval_dataloaders()

    for idx, (inputs, labels) in enumerate(val_loader, 0):
        print(idx)
        if config['use_cuda']:
            inputs = inputs.cuda().float()
            labels = labels.cuda().float()
            model = model.cuda()
        else:
            inputs = inputs.float()
            labels = labels.float()

        predictions = model(inputs)

        predictions = predictions.data.cpu().numpy()
        print('predictions', np.unique(predictions[0]))
        labels = labels.cpu().numpy()
        print('labels', np.unique(labels[0]))
        mask = create_mask(labels[0], palette=config['palette'])
        img = Image.fromarray(mask.astype(np.uint8))
        img.save('gt' + str(idx) + '.jpg')

        predictions = np.argmax(predictions[0], axis=0)
        mask = create_mask(predictions, palette=config['palette'])
        img = Image.fromarray(mask.astype(np.uint8))
        img.save('pred_' + '_' + str(idx) + '.jpg')
Esempio n. 3
0
def store_and_or_load_data(outputdir, dataset, data_dir):
    save_path = os.path.join(outputdir, dataset + "_Manager.pkl")
    if not os.path.exists(save_path):
        lock = lockfile.LockFile(save_path)
        while not lock.i_am_locking():
            try:
                lock.acquire(timeout=60)  # wait up to 60 seconds
            except lockfile.LockTimeout:
                lock.break_lock()
                lock.acquire()
        print "I locked", lock.path
        #It is not yet sure, whether the file already exists
        try:
            if not os.path.exists(save_path):
                D = DataManager(dataset, data_dir, verbose=True)
                fh = open(save_path, 'w')
                pickle.dump(D, fh, -1)
                fh.close()
            else:
                D = pickle.load(open(save_path, 'r'))
        except:
            raise
        finally:
            lock.release()
    else:
        D = pickle.load(open(save_path, 'r'))
        print "Loaded data"
    return D
Esempio n. 4
0
def test_net():
    # Function made only to test a pretrained network.
    config = json.load(open('./config.json'))
    config['device'] = 'cuda' if torch.cuda.is_available() else 'cpu'

    model = EarthNetComplex().to(config['device'])
    checkpoint = torch.load(os.path.join(config['exp_path'],
                                         config['exp_name'],
                                         'latest_checkpoint.pkl'),
                            map_location=config['device'])
    model.load_state_dict(checkpoint['model_weights'])

    criterion = getattr(loss_functions, config['loss_function'])

    data_manager = DataManager(config)
    _, _, test_loader = data_manager.get_train_eval_test_dataloaders()

    trainer = Trainer(model, None, None, criterion, None, None, config)
    trainer.test_net(test_loader)
Esempio n. 5
0
def select_variables(size=DataManager.shared().get_number_of_columns()):
    number_of_variables = randint(MIN_NUMBER_OF_VARIABLES,
                                  MAX_NUMBER_OF_VARIABLES)
    variables_to_use = sample(range(0, size), number_of_variables)
    variables = []
    for i in range(0, size):
        if i in variables_to_use:
            variables.append(1)
        else:
            variables.append(0)
    return variables
Esempio n. 6
0
def setup_population():
    dm = DataManager.shared()

    from genetic_algorithm.population import Population
    from neural_network.neural_network import NeuralNetwork

    population = Population.instantiate(NeuralNetwork.instantiate)
    population.calculate_fitness(dm.get_X_train(), dm.get_y_train())
    population.get_chromosomes().sort(key=lambda ann: ann.get_fitness(),
                                      reverse=True)
    print_population(population, 0)
    return population
Esempio n. 7
0
def main():
    config = json.load(open('./config.json'))
    config['device'] = 'cuda' if torch.cuda.is_available() else 'cpu'

    try:
        os.mkdir(os.path.join(config['exp_path'], config['exp_name']))
    except FileExistsError:
        print("Director already exists! It will be overwritten!")

    model = EarthNetComplex().to(config['device'])
    model.apply(EarthNetComplex.init_weights)

    # Save info about experiment
    save_logs_about(os.path.join(config['exp_path'], config['exp_name']),
                    json.dumps(config, indent=2))
    shutil.copy(model.get_path(),
                os.path.join(config['exp_path'], config['exp_name']))

    criterion = getattr(loss_functions, config['loss_function'])

    optimizer = optim.Adam(model.parameters(),
                           lr=config['lr'],
                           weight_decay=config['weight_decay'])
    lr_scheduler = torch.optim.lr_scheduler.StepLR(
        optimizer,
        config['lr_sch_step'],
        gamma=config['lr_sch_gamma'],
        last_epoch=-1)

    data_manager = DataManager(config)
    train_loader, validation_loader, test_loader = data_manager.get_train_eval_test_dataloaders(
    )

    trainer = Trainer(model, train_loader, validation_loader, criterion,
                      optimizer, lr_scheduler, config)
    trainer.train()

    trainer.test_net(test_loader)
Esempio n. 8
0
def main():
    config = json.load(open('./config.json'))

    experiment_name = 'Fully Conv'
    vis_legend = ['Training Loss', 'Eval Loss']
    visualizer = VisdomVisualizer(experiment_name, vis_legend, config=config)

    model = UNetV3(n_channels=config['n_channels'], n_classes=config['n_classes'])
    model.apply(init_weights)

    weights = torch.ones(21)
    if config['use_cuda'] is True:
        weights = weights.cuda()
    weights[0] = 0.07
    weights[15] = 0.5

    criterion = SegmentationLosses(weight=weights).build_loss(mode='ce')
    optimizer = optim.SGD(model.parameters(), lr=0.00001, momentum=0.9)

    data_manager = DataManager(config)
    train_loader, val_loader = data_manager.get_train_eval_dataloaders()

    trainer = Trainer(model, train_loader, val_loader, criterion, optimizer, visualizer, experiment_name, config)
    trainer.train()
Esempio n. 9
0
class TestDataManager(TestCase):
    def setUp(self):
        self.sess = None
        self.data_manager = DataManager(hparams=hparams)

    def tearDown(self):
        del self.data_manager

    def test_indexing(self):
        sentence = '1 4 6 8 2 3'
        answer = [2, 4, 0, 0, 0, 5, 6, 1, 1, 3]

        result = self.data_manager.indexing(sentence)

        self.assertListEqual(answer, result)
Esempio n. 10
0
def main():
    setup_data_manager()
    dm = DataManager.shared()
    from genetic_algorithm.genetic_algorithm import GeneticAlgorithm

    population = setup_population()
    generation_number = 1

    while generation_number != NUMBER_OF_GENERATIONS and population.get_chromosomes(
    )[0].get_fitness() < TARGET_FITNESS:
        population = GeneticAlgorithm.evolve(population)
        population.calculate_fitness(dm.get_X_train(), dm.get_y_train())
        population.get_chromosomes().sort(key=lambda ann: ann.get_fitness(),
                                          reverse=True)
        print_population(population, generation_number)
        generation_number += 1
Esempio n. 11
0
def setup_data_manager():
    data_manager = DataManager.shared()
    data_manager.read_data(FILE_PATH)
    # - Data preprocessing -
    # Scaling
    data_manager.scale_data(SCALE_EXCLUDE_VARIABLES)
    # Encoding
    data_manager.one_hot_encode_data(VARIABLES_TO_OH_ENCODE)
    data_manager.binary_encode_data(VARIABLES_TO_B_ENCODE)
    # Remove variables
    data_manager.drop_columns(VARIABLES_TO_DELETE)

    # - Data Split -
    data_manager.split_data(VARIABLE_TO_CLASSIFY)

    # - Data Analysis -
    # Recursive feature elimination
    data_manager.rfe_analysis(12)
    # PCA analysis
    data_manager.pca_analysis(0.95)

    # - Print Variables -
    data_manager.print_variables()
Esempio n. 12
0
def run(arg_unseen_data_path: Path, arg_output_path: Path):
    """
    main script of new_offer_success_predictor repo

    predicts offer acceptance probabilities (probabilities of success i.e. customer will
    accept our offer) using the best overall model (via roc_auc, recall, accuracy and precision
     metrics)

     saves results to excel file (xlsx) in form:
     customer_name | success_probability
        1                   x
        2                   y
    etc.
     """
    """
    read train data to help encode test set
    """
    DM = DataManager()
    train_df = DM.load_data()

    """
    firefighting
    """
    arg_unseen_data_path = Path(Path(str(arg_unseen_data_path).split('.')[0]))

    """
    read unseen data to predict their class
    """
    DM_unseen = DataManager(local_path=Path(arg_unseen_data_path),
                            project_path=Path(''),
                            filename='',
                            suffix='.parquet',
                            csv_suffix='.csv')
    unseen_df = DM_unseen.load_data()

    """
    only for testing reasons
    """
    if 'accepted' in unseen_df.columns:
        unseen_df = unseen_df.drop(columns=['accepted'])

    """
    process both train and test data
    """
    customer_names = unseen_df.reset_index()[['name']].rename(columns={'name': 'customer_name'})
    DP = DataProcessor(train_df=train_df)
    processed_train_df = DP.perform_initial_features_engineering()
    TDP = TestDataProcessor(not_processed_train_df=train_df,
                            processed_train_df=processed_train_df,
                            test_df=unseen_df,
                            sneaky_peaky=True)
    processed_unseen_df = TDP.perform_initial_features_engineering()

    columns_to_encode = ['offer_class',
                         'gender',
                         'customer_type',
                         'center',
                         'phone_calls',
                         'cc_len',
                         'cc_startswith']
    """
    encoding test set
    """
    enc = LeaveOneOutEncoder(train_df=processed_train_df,
                             test_df=processed_unseen_df,
                             columns_to_encode=columns_to_encode,
                             target_column='target',
                             random_state=42,
                             mean=1,
                             std=0.05)
    _, test_df_encoded = enc.fit()
    test_df_encoded_ohemails = test_df_encoded.copy(deep=True)

    """
    dictionary for email ohe mapping
    """
    email_ohe_names = {0: '0_emails',
                       1: '1_email',
                       2: '2_emails',
                       3: '3_emails',
                       4: '4_emails',
                       5: '5_emails'}

    test_df_encoded_ohemails = (
        pd.concat([test_df_encoded_ohemails, pd.get_dummies(test_df_encoded_ohemails['emails'])],
                  axis=1).rename(columns=email_ohe_names)).drop(columns=['emails'])

    """
    features used to predict on test set
    """
    test_columns = ['log_salary', 'log_estimated_expenses_knn', 'log_offer_value_knn',
                    'nan_age', 'not_nan_age', '0_emails', '1_email', '2_emails',
                    '3_emails', '4_emails', '5_emails', 'encoded_offer_class',
                    'encoded_gender', 'encoded_customer_type', 'encoded_center',
                    'encoded_phone_calls', 'encoded_cc_len', 'encoded_cc_startswith']
    unseen_data = test_df_encoded_ohemails[test_columns]

    """
    load model
    """
    models_path = (Path('/Users/mjasiecz/PycharmProjects/new_offer_success_predictor/models/final_model.pickle'))
    if not models_path.exists():
        print('be sure to change model path my friend :)')
    final_model = pickle.load(open(models_path, 'rb'))

    """
    predict probability
    """
    probabilities = pd.DataFrame(
        {'success_probability': final_model.predict_proba(unseen_data)[:, 1]}
    )
    result = pd.merge(customer_names,
                      probabilities,
                      how='inner',
                      on=customer_names.index).drop(columns='key_0')

    """
    generate results
    """
    result.to_excel(arg_output_path,
                    sheet_name='cust_prob_list',
                    engine='xlsxwriter')

    print('results were generated to: '+str(arg_output_path))
Esempio n. 13
0
from data.data_manager import DataManager
from data.person import Person

new_person = Person()
new_person.set_id(3)
new_person.set_name("Michael")
new_person.set_surname("Jackson")
new_person.set_year(1900)

DataManager().add_person(new_person)
Esempio n. 14
0
def main(_):
    # Set Domain Adaptation Network
    net_opts = Network.OPTS()
    net_opts.network_name = 'dom_adapt_net'
    net_opts.weight_path = 'pretrained/vgg-face.mat'  #download link: http://www.vlfeat.org/matconvnet/models/vgg-face.mat
    net_opts.num_class = 30
    net = Network(net_opts)
    net.construct()

    # Set Dataset Manager
    from data.data_manager import Manager as DataManager
    dataset = DataManager('./data', net_opts.num_class)

    # Set Optimizer (fine-tuning VGG-Face)
    with tf.variable_scope('optimizer'):
        net.trainable_vars = tf.get_collection(
            tf.GraphKeys.TRAINABLE_VARIABLES)
        net.trainable_var_names = [v.name for v in net.trainable_vars]
        to_select_names = ('fc6', 'dom', 'class')
        net.sel_vars = []
        for i in range(len(net.trainable_var_names)):
            if net.trainable_var_names[i].startswith(to_select_names):
                net.sel_vars.append(net.trainable_vars[i])
        net.adam = tf.train.AdamOptimizer(
            learning_rate=FLAGS.learning_rate).minimize(net.loss,
                                                        var_list=net.sel_vars)

    # Start Session
    saver = tf.train.Saver(tf.global_variables())
    with tf.Session() as sess:
        # Load Pretrained Model (VGG-Face)
        sess.run(tf.global_variables_initializer())
        net.vgg_net.load_pretrained(sess)

        # Set Writier, Logger, Checkpoint folder
        train_writer = tf.summary.FileWriter(FLAGS.summaries_dir + '/train',
                                             sess.graph)
        logger = Logger(FLAGS.summaries_dir)
        logger.write(str(FLAGS.__flags))
        checkpoint_dir = os.path.join(FLAGS.summaries_dir, 'checkpoints')
        checkpoint_prefix = os.path.join(checkpoint_dir, "model.ckpt")
        if not os.path.exists(checkpoint_dir):
            os.makedirs(checkpoint_dir)

        # Restore Checkpoint
        step = 0
        ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
        if ckpt and ckpt.model_checkpoint_path:
            saver.restore(sess, ckpt.model_checkpoint_path)
            step = int(ckpt.model_checkpoint_path.split('-')[-1])
            print('Session restored successfully. step: {0}'.format(step))
            step += 1

        # Generate Mini-batch
        train_batch = dataset.batch_generator_thread(FLAGS.batch_size, 'train')
        test_batch = dataset.batch_generator_thread(FLAGS.test_batch_size,
                                                    'test')

        # Run Session
        for i in range(step, FLAGS.max_steps):
            p = float(i) / (FLAGS.max_steps)
            lamb = (2. / (1. + np.exp(-10. * p)) - 1.)
            x_batch, y_batch, idx, dom_label = train_batch.next()
            sess.run(net.adam,
                     feed_dict={
                         net.x: x_batch,
                         net.y_: y_batch,
                         net.d_: dom_label,
                         net.with_class_idx: idx,
                         net.keep_prob: FLAGS.keep_prob,
                         net.l: lamb
                     })

            if (i + 1) % FLAGS.display_step == 0:
                loss, d_loss, c_loss, d_acc, c_acc = sess.run(
                    [
                        net.loss, net.dom_loss, net.class_loss,
                        net.dom_accuracy, net.class_accuracy
                    ],
                    feed_dict={
                        net.x: x_batch,
                        net.y_: y_batch,
                        net.d_: dom_label,
                        net.with_class_idx: idx,
                        net.keep_prob: 1.,
                        net.l: lamb
                    })
                logger.write(
                    "[iter %d] costs(a,d,c)=(%4.4g,%4.4g,%4.4g) dom_acc: %.6f, class_acc: %.6f"
                    % (i + 1, loss, d_loss, c_loss, d_acc, c_acc))
                short_summary = tf.Summary(value=[
                    tf.Summary.Value(tag="loss/loss", simple_value=float(
                        loss)),
                    tf.Summary.Value(tag="loss/dom",
                                     simple_value=float(d_loss)),
                    tf.Summary.Value(tag="loss/cat",
                                     simple_value=float(c_loss)),
                    tf.Summary.Value(tag="acc/dom", simple_value=float(d_acc)),
                    tf.Summary.Value(tag="acc/cat", simple_value=float(c_acc)),
                    tf.Summary.Value(tag="lambda", simple_value=float(lamb)),
                ])
                train_writer.add_summary(short_summary, i)

            if (i + 1) % FLAGS.test_step == 0:
                x_batch, y_batch, idx, dom_label = test_batch.next()
                loss, d_loss, c_loss, d_acc, c_acc = sess.run(
                    [
                        net.loss, net.dom_loss, net.class_loss,
                        net.dom_accuracy, net.class_accuracy
                    ],
                    feed_dict={
                        net.x: x_batch,
                        net.y_: y_batch,
                        net.d_: dom_label,
                        net.with_class_idx: idx,
                        net.keep_prob: 1.,
                        net.l: lamb
                    })
                logger.write(
                    "[Test iter %d] costs(a,d,c)=(%4.4g,%4.4g,%4.4g) dom_acc: %.6f, class_acc: %.6f"
                    % (i + 1, loss, d_loss, c_loss, d_acc, c_acc))

            if (i + 1) % FLAGS.save_step == 0:
                saver.save(sess, checkpoint_prefix, global_step=i + 1)
Esempio n. 15
0
    cuda = torch.cuda.is_available()
    if cuda:
        print('Using GPU for acceleration')
    else:
        print('Using CPU...')

    model = Net()
    # Start from checkpoint, if specified
    if opt.pretrained_weights:
        model.load_state_dict(torch.load(opt.pretrained_weights))
        print("pretrained model loaded!")
    if cuda:
        model = model.cuda()
        print('Loaded model on GPU')

    data_manager = DataManager(opt)
    dataset = data_manager.get_datasets()
    train_dataset = dataset["train"]
    test_dataset = dataset["validation"]

    train_dataloader = data_manager.get_dataloaders()["train"]
    test_dataloader = data_manager.get_dataloaders()["validation"]

    grad_cam= GradCam(model=model, feature_module=model.layer2, \
                      target_layer_names=["0"], use_cuda=True)

    bce = nn.BCELoss()
    cross_entropy = nn.CrossEntropyLoss()
    optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)

    for epoch in range(num_epochs):
Esempio n. 16
0
    def setUp(self):
        self.dataManager = DataManager(MovieValidator)
        self.movieService = MovieService(self.dataManager)

        for i in range(1, 10):
            self.movieService.addMovie(Movie(i, "Title" + str(i), "Desc" + str(i), "Genre" + str(i)))
Esempio n. 17
0
class Amity():
    """This class is the entry point for the system"""
    def __init__(self,
                 rooms_file='data_files/rooms',
                 persons_file='data_files/persons'):
        """Initializes the class and get the rooms file and persons file"""
        self.rooms_data = shelve.open(rooms_file)
        self.persons_data = shelve.open(persons_file)
        self.manager = DataManager(self.rooms_data, self.persons_data)

    def add_person(self, firstname, lastname, person_type, living_choice='N'):
        """Calls the add_person method from the DataManager class"""
        self.manager.add_person(firstname, lastname, person_type,
                                living_choice)
        self.close_file()

    def create_room(self, room_name, room_type):
        """Checks room_type and calls save_room method from DataManager"""
        if room_type.upper() == 'OFFICE' or room_type.upper() == 'LIVING':
            self.manager.save_room(room_name, room_type)
        else:
            raise Exception('Room type invalid. Must be office or living')
        self.close_file()

    def print_allocations(self, filename=''):
        """Checks room_type and calls save_room method from DataManager"""
        self.manager.load_all_rooms(filename)
        self.close_file()

    def print_unallocated(self, filename=''):
        """Calls print_unallocated from DataManager class"""
        self.manager.print_unallocated(filename)
        self.close_file()

    def print_room(self, room_name):
        """Calls print_room method from DataManager class"""
        self.manager.print_room(room_name)
        self.close_file()

    def reallocate_person(self, person_id, new_room_name):
        """Calls reallocate_person method from DataManager class"""
        self.manager.reallocate_person(person_id, new_room_name)
        self.close_file()

    def load_people(self, filename):
        """Calls load_people method from DataManager class"""
        self.manager.load_people(filename)
        self.close_file()

    def save_state(self, filename):
        """Calls save_state method from DatabaseManager class"""
        DatabaseManager(self.rooms_data, self.persons_data,
                        filename).save_state()
        self.close_file()

    def load_state(self, filename):
        """Calls save_state method from DatabaseManager class"""
        DatabaseManager(self.rooms_data, self.persons_data,
                        filename).load_state()
        self.close_file()

    def print_people(self):
        """Calls save_state method from DataManager class"""
        self.manager.print_people()
        self.close_file()

    def close_file(self):
        """Close files"""
        self.rooms_data.close()
        self.persons_data.close()
Esempio n. 18
0
 def setUp(self):
     self.manager = DataManager(RentalValidator)
     self.rentalService = RentalService(self.manager)
     self.rentalService.addRental(Rental(564, 1, 1, 214, 2144, None))
     for i in range(1, 10):
         self.rentalService.addRental(Rental(i, i, i, 100+i, 200+i, None))
Esempio n. 19
0
class TestDataService(TestCase):

    def setUp(self):
        self.dataManager = DataManager(ValidateTest)

    def test_saveEntity(self):
        o = ObjForTest(1, "Name1")
        self.dataManager.saveEntity(o)

        with self.assertRaises(Exception):
            o = ObjForTest(3, 2321)
            self.dataManager.saveEntity(o)

    def test_getEntityById(self):
        self.dataManager.saveEntity(ObjForTest(1, "name"))

        self.assertEqual(self.dataManager.getEntityById(-2), None)
        self.assertEqual(self.dataManager.getEntityById(1).ID, 1, "ID error")
        self.assertEqual(self.dataManager.getEntityById(1).name, "name", "Name error")

    def test_updateEntity(self):
        self.dataManager.saveEntity(ObjForTest(2, "Jhon"))
        self.dataManager.saveEntity(ObjForTest(3, "Marry"))

        with self.assertRaises(Exception):
            self.dataManager.updateEntity(5, ObjForTest(3, "Asd"))

        self.dataManager.updateEntity(2, ObjForTest(2, "Martin"))
        self.assertEqual(self.dataManager.getEntityById(2).ID, 2, "ID error")
        self.assertEqual(self.dataManager.getEntityById(2).name, "Martin", "Name error")

    def test_deleteEntityById(self):
        self.dataManager.saveEntity(ObjForTest(1, "Jhon"))
        self.dataManager.saveEntity(ObjForTest(2, "Marry"))
        with self.assertRaises(Exception):
            self.dataManager.deleteEntityById(5)

        print(self.dataManager.getEntities()[2])
        self.dataManager.deleteEntityById(2)

        c = self.dataManager.getEntityById(2)
        self.assertEqual(c, None, "Failed deleting")

    def test_entityExists(self):
        o = ObjForTest(1, "asd")
        self.dataManager.saveEntity(o)
        self.assertEqual(self.dataManager.entityExists(2), False)
        self.assertEqual(self.dataManager.entityExists(1), True)

    def test_getEntities(self):
        o = ObjForTest(1, "asd")
        o2 = ObjForTest(2, "sadas")
        self.dataManager.saveEntity(o)
        self.dataManager.saveEntity(o2)

        self.assertEqual(len(self.dataManager.getEntities()), 2)
 def __init__(self, rooms_file='data_files/rooms',
              persons_file='data_files/persons'):
     """Initializes the class and get the rooms file and persons file"""
     self.rooms_data = shelve.open(rooms_file)
     self.persons_data = shelve.open(persons_file)
     self.manager = DataManager(self.rooms_data, self.persons_data)
Esempio n. 21
0
 def setUp(self):
     self.dataManager = DataManager(ValidateTest)
Esempio n. 22
0
    def setUp(self):
        self.dataManager = DataManager(ClientValidator)
        self.clientService = ClientService(self.dataManager)

        for i in range(1, 10):
            self.clientService.addClient(Client(i, "Name" + str(i)))
Esempio n. 23
0
def main(_):
    FLAGS = flags.FLAGS

    print(FLAGS.summaries_dir)
    print("test_batch_size: ", FLAGS.test_batch_size)
    # Set Dataset Manager
    from data.data_manager import Manager as DataManager
    dataset = DataManager('./data', FLAGS.dataset, FLAGS.exp_mode)

    # Set Domain Adaptation Network
    net_opts = Network.OPTS()
    net_opts.network_name = 'dom_adapt_net'
    net_opts.weight_path = 'pretrained/vgg-face.mat'
    net_opts.num_class = dataset.num_class
    net = Network(net_opts, FLAGS.emb_layer)
    net.construct()
    net.probe_prob = tf.nn.softmax(net.class_score)
    net.probe_correct_pred = tf.equal(
        tf.argmax(net.probe_prob[:, 0:net.opts.num_class], 1),
        tf.argmax(net.y_, 1))
    net.probe_class_accuracy = tf.reduce_mean(
        tf.cast(net.probe_correct_pred, tf.float32))

    # Start Session
    saver = tf.train.Saver(tf.global_variables())
    with tf.Session() as sess:
        # Load Pretrained Model (VGG-Face)
        sess.run(tf.global_variables_initializer())
        net.vgg_net.load_pretrained(sess)

        # Set Writier, Checkpoint folder
        train_writer = tf.summary.FileWriter(FLAGS.summaries_dir + '/train',
                                             sess.graph)
        checkpoint_dir = os.path.join(FLAGS.summaries_dir, 'checkpoints')
        checkpoint_prefix = os.path.join(checkpoint_dir, "model.ckpt")
        if not os.path.exists(checkpoint_dir):
            os.makedirs(checkpoint_dir)

        # Restore Checkpoint
        step = 0
        ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
        if ckpt and ckpt.model_checkpoint_path:
            saver.restore(sess, ckpt.model_checkpoint_path)
            step = int(ckpt.model_checkpoint_path.split('-')[-1])
            print('Session restored successfully. step: {0}'.format(step))
            step += 1

        # Generate Mini-batch
        test_batch = dataset.eval_batch_generator(FLAGS.test_batch_size)

        # Run Session
        acc_loss, acc_d_loss, acc_c_loss, acc_d_acc, acc_c_acc = 0., 0., 0., 0., 0.
        acc_b = 0
        num_left = 1
        while num_left > 0:
            x_batch, y_batch, idx, dom_label, num_left = test_batch.__next__()
            b_size = x_batch.shape[0]
            loss, d_loss, c_loss, d_acc, c_acc, embed_feat = sess.run(
                [
                    net.loss, net.dom_loss, net.class_loss, net.dom_accuracy,
                    net.probe_class_accuracy, net.embedded_with_class
                ],
                feed_dict={
                    net.x: x_batch,
                    net.y_: y_batch,
                    net.d_: dom_label,
                    net.with_class_idx: idx,
                    net.keep_prob: 1.,
                    net.l: 1.
                })
            acc_loss += loss * b_size
            acc_d_loss += d_loss * b_size
            acc_c_loss += c_loss * b_size
            acc_d_acc += d_acc * b_size
            acc_c_acc += c_acc * b_size
            acc_b += b_size
        print(
            "[AGGR. TEST iter %d] costs(a,d,c)=(%4.4g,%4.4g,%4.4g) dom_acc: %.6f, class_acc: %.6f\n"
            % (step, acc_loss / acc_b, acc_d_loss / acc_b, acc_c_loss / acc_b,
               acc_d_acc / acc_b, acc_c_acc / acc_b))
Esempio n. 24
0
def run_application():
    """
    The main method of the application
    :return: nothing
    """
    try:
        undoRedoHandler = UndoHandler()
        dataManagerType = Settings.getDataManagerType()

        clientManager = None
        movieManager = None
        rentalManager = None

        if dataManagerType == "memory":
            # IN MEMORY
            clientManager = DataManager(ClientValidator)
            movieManager = DataManager(MovieValidator)
            rentalManager = DataManager(RentalValidator)
        elif dataManagerType == "pickle":
            # WITH PICKLE SERIALIZATION
            clientManager = DataManagerPickle(ClientValidator,
                                              "clients.pickle")
            movieManager = DataManagerPickle(MovieValidator, "movies.pickle")
            rentalManager = DataManagerPickle(RentalValidator,
                                              "rentals.pickle")
        elif dataManagerType == "text":
            # WITH SIMPLE TXT FILES:
            clientManager = DataManagerText(ClientValidator, "clients.text",
                                            Client)
            movieManager = DataManagerText(MovieValidator, "movies.text",
                                           Movie)
            rentalManager = DataManagerText(RentalValidator, "rentals.text",
                                            Rental)
        elif dataManagerType == "sql":
            # WITH SQL
            clientManager = DataManagerSql(ClientValidator, "moviestore",
                                           "clients", Client)
            movieManager = DataManagerSql(MovieValidator, "moviestore",
                                          "movies", Movie)
            rentalManager = DataManagerSql(RentalValidator, "moviestore",
                                           "rentals", Rental)
        elif dataManagerType == "json":
            # WITH JSON
            clientManager = DataManagerJson(ClientValidator, "clientsJSON",
                                            "clients", Client)
            movieManager = DataManagerJson(MovieValidator, "moviesJSON",
                                           "movies", Movie)
            rentalManager = DataManagerJson(RentalValidator, "rentalsJSON",
                                            "rentals", Rental)

        print("clientManager: ", type(clientManager))
        print("movieManager: ", type(movieManager))
        print("rentalManager: ", type(rentalManager))

        clientService = ClientService(clientManager)
        movieService = MovieService(movieManager)
        rentalService = RentalService(rentalManager)

        console = Console(clientService, movieService, rentalService,
                          undoRedoHandler)
        console.startConsole()
    except Exception as ex:
        print("Exception: ", ex)
        traceback.print_exc()
Esempio n. 25
0
 def setUp(self):
     self.sess = None
     self.data_manager = DataManager(hparams=hparams)
class Amity():
    """This class is the entry point for the system"""

    def __init__(self, rooms_file='data_files/rooms',
                 persons_file='data_files/persons'):
        """Initializes the class and get the rooms file and persons file"""
        self.rooms_data = shelve.open(rooms_file)
        self.persons_data = shelve.open(persons_file)
        self.manager = DataManager(self.rooms_data, self.persons_data)

    def add_person(self, firstname, lastname,
                   person_type, living_choice='N'):
        """Calls the add_person method from the DataManager class"""
        self.manager.add_person(firstname, lastname,
                                person_type, living_choice)
        self.close_file()

    def create_room(self, room_name, room_type):
        """Checks room_type and calls save_room method from DataManager"""
        if room_type.upper() == 'OFFICE' or room_type.upper() == 'LIVING':
            self.manager.save_room(room_name, room_type)
        else:
            raise Exception('Room type invalid. Must be office or living')
        self.close_file()

    def print_allocations(self, filename=''):
        """Checks room_type and calls save_room method from DataManager"""
        self.manager.load_all_rooms(filename)
        self.close_file()

    def print_unallocated(self, filename=''):
        """Calls print_unallocated from DataManager class"""
        self.manager.print_unallocated(filename)
        self.close_file()

    def print_room(self, room_name):
        """Calls print_room method from DataManager class"""
        self.manager.print_room(room_name)
        self.close_file()

    def reallocate_person(self, person_id, new_room_name):
        """Calls reallocate_person method from DataManager class"""
        self.manager.reallocate_person(person_id, new_room_name)
        self.close_file()

    def load_people(self, filename):
        """Calls load_people method from DataManager class"""
        self.manager.load_people(filename)
        self.close_file()

    def save_state(self, filename):
        """Calls save_state method from DatabaseManager class"""
        DatabaseManager(self.rooms_data, self.persons_data,
                        filename).save_state()
        self.close_file()

    def load_state(self, filename):
        """Calls save_state method from DatabaseManager class"""
        DatabaseManager(self.rooms_data, self.persons_data,
                        filename).load_state()
        self.close_file()

    def print_people(self):
        """Calls save_state method from DataManager class"""
        self.manager.print_people()
        self.close_file()

    def close_file(self):
        """Close files"""
        self.rooms_data.close()
        self.persons_data.close()