Exemple #1
0
    def test_create_dataset_should_load_parquet(self):
        dataset_conf = {
            "path": "resources/datasets/test.parquet",
            "format": "parquet"
        }

        dataset = DatasetFactory.create_dataset(self.spark, dataset_conf)

        self.assertEqual(dataset.columns, ["id", "text"])
        self.assertEqual(dataset.count(), 4)
Exemple #2
0
    def test_create_dataset_should_load_csv(self):
        dataset_conf = {
            "path": "resources/datasets/test.csv",
            "format": "csv",
            "sep": ",",
            "header": True
        }

        dataset = DatasetFactory.create_dataset(self.spark, dataset_conf)

        self.assertEqual(dataset.columns, ["id", "text"])
        self.assertEqual(dataset.count(), 4)
Exemple #3
0
    def create_stage(spark, stage_conf):
        if not isinstance(stage_conf, dict):
            # Stage conf is already a stage
            return stage_conf

        name = stage_conf["name"]
        params = stage_conf["params"]

        if "dataset" in params and isinstance(params["dataset"], dict):
            # Create dataset if needed in stage parameters
            dataset_conf = params["dataset"]
            params["dataset"] = DatasetFactory.create_dataset(
                spark, dataset_conf)

        stage = StageFactory.get_stage(name)
        StageFactory.set_params(spark, stage, params)

        return stage
Exemple #4
0
    def create_step(spark, step_conf):
        name = step_conf["name"]
        params = step_conf["params"]
        stage = step_conf["stage"]

        if isinstance(stage, dict) and "stage" in stage:
            # Stage comes from an executed step
            stage = StepFactory.create_step(spark, stage).execute()
        else:
            # Create a stage
            stage = StageFactory.create_stage(spark, stage)

        if "dataset" in params and isinstance(params["dataset"], dict):
            # Create dataset if needed in step parameters
            dataset_conf = params["dataset"]
            params["dataset"] = DatasetFactory.create_dataset(
                spark, dataset_conf)

        return Step(name, params, stage)
Exemple #5
0
    parser.add_argument('--t7', type=str, required=True, default="")
    parser.add_argument('--gpu', type=str, required=True, default="")
    args = parser.parse_args()

    modelpath = args.t7

    device = torch.device("cuda" if len(args.gpu) > 0 else "cpu")

    # user defined parameters
    num_threads = multiprocessing.cpu_count()
    PATH_PREFIX = "./results/{}".format(modelpath.split(".")[0])

    input_size = 224
    modelname = args.model

    test_dataset = DatasetFactory.get_test_dataset("resnet", input_size)

    print("Loading testing dataset, wait...")
    bs_test = len(test_dataset)
    test_dataloader = DataLoader(test_dataset,
                                 batch_size=bs_test,
                                 shuffle=False,
                                 num_workers=num_threads)

    # get all test data
    all_test_data = {}
    for i_batch, sample_batched in enumerate(tqdm(test_dataloader)):
        all_test_data = sample_batched
        eval_coco(all_test_data, modelname, modelpath,
                  os.path.join(PATH_PREFIX, 'result-gt-json.txt'),
                  os.path.join(PATH_PREFIX, 'result-pred-json.txt'))
Exemple #6
0
 def __init__(self,dataset_name,data_dir,cfg):
     self.dataset = DatasetFactory.get_dataset_interface(dataset_name,data_dir)
     self.cfg = cfg
     pass
    # full_name = "/home/yuliang/code/MobilePose-pytorch/models/demo/resnet18_227x227-robust.t7" # Rescale Expansion ToTensor
    # full_name = "/home/yuliang/code/MobilePose-pytorch/models/demo/resnet18_227x227.t7" # Rescale Expansion ToTensor

    ROOT_DIR = "../deeppose_tf/datasets/mpii"

    if modeltype == 'resnet':
        full_name = "/home/yuliang/code/MobilePose-pytorch/models/demo/resnet18_227x227.t7"  # Rescale Expansion ToTensor
        input_size = 227
        # test_dataset = PoseDataset(csv_file=os.path.join(ROOT_DIR,'test_joints.csv'),
        #                             transform=transforms.Compose([
        #                                         Rescale((input_size, input_size)), # resnet use
        #                                         # Wrap((input_size,input_size)), # mobilenet use
        #                                         Expansion(),
        #                                         ToTensor()
        #                                     ]))
        test_dataset = DatasetFactory.get_test_dataset(modeltype, input_size)

    elif modeltype == 'mobilenet':
        full_name = "/home/yuliang/code/MobilePose-pytorch/models/demo/mobilenetv2_224x224-robust.t7"  # Wrap Expansion ToTensor
        input_size = 224
        # test_dataset = PoseDataset(csv_file=os.path.join(ROOT_DIR,'test_joints.csv'),
        #                             transform=transforms.Compose([
        #                                         Rescale((input_size, input_size)), # resnet use
        #                                         # Wrap((input_size,input_size)), # mobilenet use
        #                                         Expansion(),
        #                                         ToTensor()
        #                                     ]))
        test_dataset = DatasetFactory.get_test_dataset(modeltype, input_size)

    print("Loading testing dataset, wait...")
Exemple #8
0
import numpy as np
import cv2

from dataset_factory import DatasetFactory
from model_factory import Model
from constants import *

import tensorflow as tf
from tensorflow import keras

data_fact = DatasetFactory(train_path=TRAIN_PATH,
                           test_path=TEST_PATH,
                           image_size=IMAGE_SIZE,
                           batch_size=BATCH_SIZE)
model_fact = Model(TRAIN)

model = model_fact.get_model()

if LOAD:
    model.load_weights(SAVE_PATH)
    print(" - Modello caricato. - ")

else:
    train_data, val_data = data_fact.create_pandas_train_splitted_dataset()
    train_images, val_images = data_fact.create_images_train_splitted_dataset(
        train_data=train_data, val_data=val_data)

    history = model.fit(train_images,
                        epochs=10,
                        validation_data=val_images,
                        validation_steps=val_data.shape[0] // BATCH_SIZE,
    print("GPU NUM: %d" % (torch.cuda.device_count()))

    logname = modeltype + '-log.txt'

    if not args.retrain:
        # load pretrain model
        # net = torch.load('./models/%s/%s'%(modeltype,modelname)).cuda()
        net = torch.load('./models/%s/%s' % (modeltype, modelname)).cuda()
    # alog.info(net)
    net = net.train()

    ROOT_DIR = "../deeppose_tf/datasets/mpii"  # root dir to the dataset
    PATH_PREFIX = './models/{}/'.format(modeltype)  # path to save the model

    tmp_modeltype = "resnet"
    train_dataset = DatasetFactory.get_train_dataset(tmp_modeltype, inputsize)
    train_dataloader = DataLoader(train_dataset,
                                  batch_size=batchsize,
                                  shuffle=False,
                                  num_workers=num_threads)

    test_dataset = DatasetFactory.get_test_dataset(tmp_modeltype, inputsize)
    test_dataloader = DataLoader(test_dataset,
                                 batch_size=batchsize,
                                 shuffle=False,
                                 num_workers=num_threads)

    criterion = nn.MSELoss().cuda()
    # optimizer = optim.Adam(net.parameters(), lr=learning_rate, betas=(0.9, 0.999), eps=1e-08)
    # optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9)
    optimizer = optim.RMSprop(net.parameters(), lr=learning_rate, momentum=0.9)