Ejemplo n.º 1
0
    if args.model == 'GAN':
        model = GAN(args)
    elif args.model == 'DCGAN':
        model = DCGAN_MODEL(args)
    elif args.model == 'WGAN-CP':
        model = WGAN_CP(args)
    elif args.model == 'WGAN-GP':
        model = WGAN_GP(args)
    else:
        print("Model type non-existing. Try again.")
        exit(-1)

    # Load datasets to train and test loaders
    train_loader, test_loader = get_data_loader(args)
    # feature_extraction = FeatureExtractionTest(train_loader, test_loader, args.cuda, args.batch_size)

    # Start model training
    if args.is_train == 'True':
        model.train(train_loader)

    # start evaluating on test data
    else:
        model.evaluate(test_loader, args.load_D, args.load_G)
        # for i in range(50):
        #    model.generate_latent_walk(i)


if __name__ == '__main__':
    args = parse_args()
    main(args)
Ejemplo n.º 2
0
            # just top-1 result will be returned for the final
            if scores[0][0] >= self.config["IndexProcess"]["score_thres"]:
                preds["rec_docs"] = self.id_map[docs[0][0]].split()[1]
                preds["rec_scores"] = scores[0][0]
                output.append(preds)

        # st5: nms to the final results to avoid fetching duplicate results
        output = self.nms_to_rec_results(
            output, self.config["Global"]["rec_nms_thresold"])

        return output


def main(config):
    system_predictor = SystemPredictor(config)
    image_list = get_image_list(config["Global"]["infer_imgs"])

    assert config["Global"]["batch_size"] == 1
    for idx, image_file in enumerate(image_list):
        img = cv2.imread(image_file)[:, :, ::-1]
        output = system_predictor.predict(img)
        draw_bbox_results(img, output, image_file)
        print(output)
    return


if __name__ == "__main__":
    args = config.parse_args()
    config = config.get_config(args.config, overrides=args.override, show=True)
    main(config)
Ejemplo n.º 3
0
    command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True,
    universal_newlines=True)

  for line in proc.stdout:
    sys.stdout.write(line)
    log_file.write(line)

  proc.wait()
  log_file.close()

def main(_):
  begin = time.time()

  tf.gfile.MakeDirs(FLAGS.model_dir)

  # redirects tf logs to file
  log_file = logging.init_logger(FLAGS.model_dir, FLAGS.do_debug)
  config.display_args(FLAGS)

  if FLAGS.model == "bert":
    run_bert_classifier(log_file)
  else:
    E = DRSCExperiment(FLAGS)
    E.run()

  tf.logging.info("Execution Time: {:.2f}s".format(time.time() - begin))

if __name__ == '__main__':
  FLAGS = config.parse_args()
  tf.app.run()
Ejemplo n.º 4
0
Archivo: ncp.py Proyecto: lilujunai/NCP
        optimizer.step()

        for i in range(normalized_data.shape[1]):
            if normalized_data[0][i] > cfg.data.normalized_max_value[i]:
                normalized_data[0][i] = cfg.data.normalized_max_value[i]
            if normalized_data[0][i] < cfg.data.normalized_min_value[i]:
                normalized_data[0][i] = cfg.data.normalized_min_value[i]
        normalized_data = normalized_data.detach().clone()
        normalized_data.requires_grad = True

        denormalized_data = denormalize(normalized_data, cfg)
        rounded_data = denormalized_data.copy()
        for i in range(rounded_data.shape[0]):
            rounded_data[i] = _make_divisible(rounded_data[i],
                                              cfg.data.min_value[i])
        flops, params = net2flops(list(rounded_data.astype(int)), device)

    pickle.dump(edit_net_set, open(f"{cfg.log_dir}/NCP.pkl", "wb"))


if __name__ == '__main__':
    cfg = parse_args()
    # alias = f'epoch_{cfg.optimization.epoch}-bs_{cfg.optimization.batch_size}' \
    #         f'-{cfg.optimization.optimizer}-{cfg.optimization.scheduler}'
    # cfg.timestamp = time.strftime('{}-%Y%m%d-%H%M%S-{}'.format(cfg.data.dataset, alias))
    cfg.log_dir = '{}/{}'.format(cfg.log_dir, cfg.data.dataset)
    setup_logging(cfg.log_dir, file_name='NCP.log')
    logging.info(cfg)

    main(cfg)
Ejemplo n.º 5
0
"""
@author: tompx-nobug
"""
from utils.config import parse_args
from utils.data_loader import get_data_loader
from models.nk_model import nkModel
import pandas as pd


def main(args):
    train_loader, val_loader, test_loader = get_data_loader(args)
    # test_loader 만들어야 한다
    model = nkModel(args, train_loader, val_loader, test_loader)

    if args.is_train:
        model.train()
    else:
        temp_list = model.test()
        print(temp_list)
        my_df = pd.DataFrame(temp_list)
        my_df.to_csv('my_csv.csv', index=False, header=False)


if __name__ == '__main__':
    config = parse_args()
    main(config)