Пример #1
0
    def __init__(self,
    instence_id=0,
    config_dir='./cfg',
    ):  
        self.root=root
        print('root in :\n',os.path.join(self.root,'..'))
        sys.path.append(os.path.join(sys.path[0],'../'))
        print('workspace in:\n')
        for i in sys.path:
            print(i)
        
        DatasetGenerator.__init__(self)
        # super(NetworkGenerator,self).__init__()
        super(Instence,self).__init__()
        print('\n\n-----Instence Class Init-----\n\n')

        #####################################################
        #Dataloader
        
        self.TrainSet=DatasetGenerator()
        self.TrainSet.DefaultDataset(Mode='train')
        self.Trainloader=DataLoader(
            self.TrainSet,
            self.BatchSize,
            shuffle=True,
            num_workers=self.worker_num,
            collate_fn=self.TrainSet.detection_collate_fn
        )
        self.ValSet=DatasetGenerator()
        self.ValSet.DefaultDataset(Mode='val')
        self.Valloader=DataLoader(
            self.ValSet,
            self.BatchSize,
            shuffle=True,
            num_workers=self.worker_num,
            collate_fn=self.ValSet.detection_collate_fn
        )
Пример #2
0
    def __init__(
        self,
        instence_id=0,
        configfile='./cfg',
    ):

        # ---------------------------------------------------------------------------- #
        #                                workspace info                                #
        # ---------------------------------------------------------------------------- #

        self.root = root
        self.configfile = configfile
        print('root in :\n', os.path.join(self.root, '..'))
        sys.path.append(os.path.join(sys.path[0], '../'))
        print('workspace in:\n')
        for i in sys.path:
            print(i)

        DatasetGenerator.__init__(self, configfile=configfile)

        # super(Instence,self).__init__()
        print('\n\n-----Instence Class Init-----\n\n')

        # ---------------------------------------------------------------------------- #
        #                                  dataloader                                  #
        # ---------------------------------------------------------------------------- #

        # ------------------------------ dataset object ------------------------------ #

        transforms = []
        transforms.append(ConvertCocoPolysToMask())
        transforms.append(T.ToTensor())
        transforms.append(T.RandomHorizontalFlip(0.5))
        self.transform_compose = T.Compose(transforms)

        # ---------------------------------------------------------------------------- #
        #                                   temp part                                  #
        # ---------------------------------------------------------------------------- #

        if self.DefaultDataset:
            self.datasets = DatasetGenerator(transforms=self.transform_compose,
                                             configfile=configfile)
            self.datasets.DefaultDatasetFunction()

            self.trainset = _coco_remove_images_without_annotations(
                self.datasets.trainset)
            self.valset = self.datasets.valset
            print('-----train&val set already done')

        # ----------------------------- DataLoader object ---------------------------- #

        if self.DistributedDataParallel:
            self.train_sampler = torch.utils.data.distributed.DistributedSampler(
                self.trainset)
            self.test_sampler = torch.utils.data.distributed.DistributedSampler(
                self.valset)
            print("-----DistributedDataParallel Sampler build done")
            self.model = torch.nn.parallel.DistributedDataParallel(
                self.model, device_ids=self.gpu_id)
            self.model_without_ddp = self.model.module

        if not self.DistributedDataParallel:

            self.train_sampler = torch.utils.data.RandomSampler(self.trainset)
            self.test_sampler = torch.utils.data.SequentialSampler(self.valset)
            print("-----DataSampler build done")

        # ---------------------------------- Sampler --------------------------------- #

        if self.aspect_ratio_factor >= 0:
            self.group_ids = create_aspect_ratio_groups(
                self.trainset, k=self.aspect_ratio_factor)
            self.train_batch_sampler = GroupedBatchSampler(
                self.train_sampler, self.group_ids, self.BatchSize)
        else:
            self.train_batch_sampler = torch.utils.data.BatchSampler(
                self.train_sampler, self.BatchSize, drop_last=True)

        # ---------------------------------- loader ---------------------------------- #

        self.trainloader = torch.utils.data.DataLoader(
            self.trainset,
            batch_sampler=self.train_batch_sampler,
            num_workers=self.worker_num,
            collate_fn=self.collate_fn)

        self.valloader = torch.utils.data.DataLoader(
            self.valset,
            batch_size=self.BatchSize,
            sampler=self.test_sampler,
            num_workers=self.worker_num,
            collate_fn=self.collate_fn)