Example #1
0
    def __init__(self, pkl_path=None, normalize=True, num_instance=4):

        self.normalize = normalize
        self.to_tensor = ToTensor(normalize=self.normalize)
        #self.data = []
        #self.generate_index()

        self.random_flip = RandomFlip(flip_prob=0.5)

        # 检查是否有该文件
        if not os.path.exists(pkl_path):
            raise ValueError('{} not exists!!'.format(pkl_path))
        # 打开pkl  pid:[_,image_id,camera_id]
        with open(pkl_path, 'rb') as fs:
            self.pkl = pickle.load(fs)

        self.sort_keys = list(sorted(self.pkl.keys()))

        self.len = len(self.pkl)

        # nori
        self.nf = nori.Fetcher()

        # 一次性一个人取多少张图片
        self.num_instance = num_instance
Example #2
0
def NoriReader(paths=[]):
    import nori2 as nori
    from nori2.multi import MultiSourceReader
    if config.community_version:
        return MultiSourceReader(paths)
    else:
        return nori.Fetcher(paths)
Example #3
0
    def __init__(self,img_size=(128, 64), bbox_threshold=200, pkl_path = None,normalize=True,num_instance=4):
        
        self.img_size = img_size
        self.normalize = normalize
        self.to_tensor = ToTensor(normalize=self.normalize)
        
        self.bbox_threshold = bbox_threshold
        

        self.random_flip = RandomFlip(flip_prob=0.5)
        self.resize = Resize(output_size=self.img_size)
        
        # 检查是否有该文件
        if not os.path.exists(pkl_path):
            raise ValueError('{} not exists!!'.format(pkl_path))
        # 打开pkl  pid:[_,image_id,camera_id]
        with open(pkl_path, 'rb') as fs:
            self.pkl = pickle.load(fs)
            
            
        
        self.len = len(self.pkl)
        
        # nori
        self.nf = nori.Fetcher()
        
        # 一次性一个人取多少张图片
        self.num_instance = num_instance
Example #4
0
    def __init__(self, nori_list_dir, transform=None):
        self.name_list = []
        self.transform = transform
        with open(nori_list_dir) as fp:
            for line in fp.readlines():
                self.name_list.append(line.split())

        self.nori_f = nori.Fetcher()
Example #5
0
    def fetch(self, file_name, fetcher=None):
        import nori2 as nr
        data_id = self.get(smart_path(file_name).name)
        if fetcher is None:
            if self.fechcer is None:
                self.fechcer = nr.Fetcher()

            fetcher = self.fechcer

        return fetcher.get(data_id)
Example #6
0
 def __init__(self, data_file):
     self.t_usr = []
     self.t_prd = []
     self.t_hlp = []
     self.t_tme = []
     self.t_label = []
     self.t_docs = []
     self.t_sums = []
     self.nf = nori.Fetcher()
     nori_path = data_file.replace(".ss", ".nori.list")
     if os.path.exists(nori_path):
         nid_list = [i.strip() for i in open(nori_path, 'r').readlines()]
         for nid in nid_list:
             line = pickle.loads(self.nf.get(nid))
             self.t_usr.append(line[0])
             self.t_prd.append(line[1])
             self.t_label.append(line[2])
             self.t_docs.append(line[3])
             self.t_tme.append(line[4])
             self.t_hlp.append(line[6])
     else:
         with nori.open(nori_path.replace(".list", ""), 'w') as nw:
             nid_list = []
             with open(data_file, 'r') as f:
                 idx = 0
                 for line in f:
                     # line = line.strip().decode('utf8', 'ignore').split('\t\t')
                     line = line.strip().split('\t\t')
                     if idx == 0:
                         print("one input data line: ", line)
                         idx = 2
                         print("length:", len(line))
                     if len(line) < 8:
                         continue
                     line = [i.strip().lower() for i in line]
                     line[2] = int(float(line[2])) - 1
                     nid = nw.put(pickle.dumps(line))
                     nid_list.append(nid)
                     self.t_usr.append(line[0])
                     self.t_prd.append(line[1])
                     self.t_label.append(line[2])
                     self.t_docs.append(line[3])
                     self.t_tme.append(line[4])
                     self.t_hlp.append(line[6])
                     # self.t_sums.append(line[8].strip().lower())
             os.system('nori speedup {} --on'.format(
                 nori_path.replace(".list", "")))
             with open(nori_path, 'w') as of:
                 for nid in nid_list:
                     print(nid, file=of)
     self.data_size = len(self.t_docs)
     self.sum_size = len(self.t_sums)
Example #7
0
def main(nori_path, lmdb_path=None):
    if lmdb_path is None:
        lmdb_path = nori_path
    env = lmdb.Environment(lmdb_path,
                           map_size=int(5e10),
                           writemap=True,
                           max_dbs=2,
                           lock=False)
    fetcher = nori.Fetcher(nori_path)
    db_extra = env.open_db('extra'.encode(), create=True)
    db_image = env.open_db('image'.encode(), create=True)
    with nori.open(nori_path, 'r') as nr:
        with env.begin(write=True) as writer:
            for data_id, data, meta in tqdm(nr.scan()):
                value = {}
                image = fetcher.get(data_id)
                value['extra'] = {}
                for key in meta['extra']:
                    value['extra'][key] = meta['extra'][key]
                writer.put(data_id.encode(), pickle.dumps(value), db=db_extra)
                writer.put(data_id.encode(), image, db=db_image)
    env.close()
    print('Finished')
 def __init__(self, cmd={}, **kwargs):
     self.load_all(**kwargs)
     self.fetcher = nori.Fetcher()
     if 'mode' in cmd:
         self.mode = cmd['mode']
Example #9
0
# encoding: utf-8
"""
@author:  liaoxingyu
@contact: [email protected]
"""

import numpy as np
import os.path as osp
from PIL import Image
from torch.utils.data import Dataset

try:
    import nori2 as nori

    nf = nori.Fetcher()
    import cv2

    use_nori = True
except ImportError as e:
    use_nori = False


def read_image(img_path):
    """Keep reading image until succeed.
    This can avoid IOError incurred by heavy IO process."""
    got_img = False
    if not osp.exists(img_path):
        raise IOError("{} does not exist".format(img_path))
    while not got_img:
        try:
            img = Image.open(img_path).convert('RGB')
import numpy as np
import pickle as pkl
import png

import nori2
from ip_basic import depth_map_utils, depth_map_utils_ycb
from ip_basic import vis_utils
import sys
sys.path.append('..')
from lib.utils.my_utils import my_utils
from neupeak.utils.webcv2 import imshow, waitKey
from tqdm import tqdm
import concurrent.futures


nf = nori2.Fetcher()


def show_depth(name, dpt):
    dpt = (dpt / np.max(dpt) * 255).astype(np.uint8)
    imshow(name, dpt)


def get_one_show(nid):
    fill_type = 'multiscale'
    # fill_type = 'fast'
    show_process = False
    extrapolate = True# False # True
    # blur_type = 'gaussian'
    blur_type = 'bilateral'
Example #11
0
    def __init__(self, dataset_name):
        self.dataset_name = dataset_name
        self.xmap = np.array([[j for i in range(640)] for j in range(480)])
        self.ymap = np.array([[i for i in range(640)] for j in range(480)])
        self.diameters = {}

        self.trancolor = transforms.ColorJitter(0.2, 0.2, 0.2, 0.05)
        self.norm = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.224])
        self.obj_dict={
            'ape':1,
            'cam':2,
            'cat':3,
            'duck':4,
            'glue':5,
            'iron':6,
            'phone':7,
            'benchvise':8,
            'can':9,
            'driller':10,
            'eggbox':11,
            'holepuncher':12,
            'lamp':13,
        }

        self.rng = stable_rng(random.random())
        self.nf = nori2.Fetcher()
        self.cls_id = self.obj_dict[cls_type]

        if dataset_name == 'train':
            self.nid_path = config.train_nid_path
            config.add_noise = False
        elif dataset_name == 'val':
            self.nid_path = config.validation_nid_path
            config.add_noise = False
        else:
            self.nid_path = config.test_nid_path
            config.add_noise = False

        if 'zbuf' in self.nid_path:
            self.depth_scale = 1.0
            self.mm2m = 1.0
        else:
            self.depth_scale = 2.0
            self.mm2m = 1000.0
        dataset_items = self.read_nid_list(self.nid_path)
        if len(config.fuse_nid_path) > 0:
            fuse_nid_list = self.read_nid_list(config.fuse_nid_path)
        if dataset_name == 'validation':
            if os.path.exists(config.validation_preproc_nid_path):
                self.dataset_items = self.read_nid_list(config.validation_preproc_nid_path)
                print('{} val nori exists'.format(cls_type))
            else:
                print('packing {} val nori'.format(cls_type))
                self.dataset_items = self.pack_preproc(dataset_items, "val")
                self.save_nid_list(config.validation_preproc_nid_path, self.dataset_items)
        elif dataset_name == 'test':
            if os.path.exists(config.test_preproc_nid_path):
                self.dataset_items = self.read_nid_list(config.test_preproc_nid_path)
                print('{} test nori exists'.format(cls_type))
            else:
                print('packing {} test nori'.format(cls_type))
                self.dataset_items = self.pack_preproc(dataset_items, "test")
                self.save_nid_list(config.test_preproc_nid_path, self.dataset_items)
        else:
            if len(fuse_nid_list) > 0:
                self.dataset_items = dataset_items + fuse_nid_list
                shuffle(self.dataset_items)
            else:
                self.dataset_items = dataset_items
Example #12
0
def get(dataset_name):
    rng = stable_rng(stable_rng)
    # get train dataset
    g = nori.Fetcher()

    with open(config.label, 'r') as f:
        file_json = json.load(f)
    rng.shuffle(file_json['results'])

    labels_pose = []
    labels_master_belt = []
    labels_co_belt = []
    labels_master_visor = []
    labels_co_visor = []
    labels_penant = []
    labels_issue = []
    labels_call = []
    imgs = []
    indexs = []

    for one_img in file_json["results"]:
        img = cv2.imdecode(np.fromstring(g.get(one_img['ID']), np.uint8), cv2.IMREAD_UNCHANGED)
        assert img.shape[0] > 0 and img.shape[1] > 0 and img.shape[2] == 3

        # for crop window
        img_crop = cv2.resize(img, config.image_shape)

        # label = one_img['label']
        label_pose = one_img['label_pose']
        label_master_belt = one_img['label_master_belt']
        label_co_belt = one_img['label_co_belt']
        label_master_visor = one_img['label_master_visor']
        label_co_visor = one_img['label_co_visor']
        label_penant = one_img['label_penant']
        label_issue  = one_img['label_issue']
        label_call = one_img['label_call']
        imgs.append(img_crop)
        indexs.append(one_img['index'])

        labels_pose.append(label_pose)
        labels_master_belt.append(label_master_belt)
        labels_co_belt.append(label_co_belt)
        labels_master_visor.append(label_master_visor)
        labels_co_visor.append(label_co_visor)
        labels_penant.append(label_penant)
        labels_issue.append(label_issue)
        labels_call.append(label_call)

    labels_pose = np.array(labels_pose)
    labels_master_belt = np.array(labels_master_belt)
    labels_co_belt = np.array(labels_co_belt)
    labels_master_visor = np.array(labels_master_visor)
    labels_co_visor = np.array(labels_co_visor)
    labels_penant = np.array(labels_penant)
    labels_issue = np.array(labels_issue)
    labels_call = np.array(labels_call)
    labels = [[labels_pose[i], labels_master_belt[i], labels_co_belt[i],
               labels_master_visor[i], labels_co_visor[i], labels_penant[i],
               labels_issue[i], labels_call[i]]
              for i in range(len(labels_master_belt))]

    imgs = np.array(imgs)
    indexs = np.array(indexs)

    nr_imgs = len(imgs)

    train_ds = (imgs[:int(0.8 * nr_imgs)], labels[:int(0.8 * nr_imgs)],indexs[:int(0.8*nr_imgs)])
    val_ds = (imgs[int(0.8 * nr_imgs):int(0.9 * nr_imgs)], labels[int(0.8 * nr_imgs):int(0.9 * nr_imgs)],indexs[int(0.8*nr_imgs):int(0.9*nr_imgs)])
    test_ds = (imgs[int(0.9 * nr_imgs):], labels[int(0.9 * nr_imgs):], indexs[int(0.9*nr_imgs):])

    # -------JUST FOR GENERATE DATE WITH SOFT LABEL-------------
    test_ds = (imgs, labels, indexs)
    # ----------------------------------------------------------

    datasets = {
        'train': train_ds,
        'validation': val_ds,
        'test': test_ds,
    }

    # imgs, labels = datasets[dataset_name]
    imgs, labels, indexs = datasets[dataset_name]
    ds_size = imgs.shape[0]

    nr_instances_in_epoch = {
        'train': datasets['train'][0].shape[0],
        'validation': datasets['validation'][0].shape[0],
        'test': datasets['test'][0].shape[0],
    }[dataset_name]

    # do_training = (dataset_name == 'train')
    # def sample_generator():
    #     while True:
    #         try:
    #             i = rng.randint(0, ds_size)
    #             img = imgs[i].swapaxes(1, 2).swapaxes(0, 1)
    #             label = [int(j) for j in labels[i]]
    #         except AssertionError:
    #             continue
    #
    #         yield DatasetMinibatch(
    #             data=np.array(img, dtype=np.float32),
    #             label=np.array(label, dtype=np.int32),
    #             check_minibatch_size=False,
    #         )

    def sample_generator():
        i = 0
        while True:
            try:
                img = imgs[i].swapaxes(1, 2).swapaxes(0, 1)
                label = [int(j) for j in labels[i]]
                index = indexs[i]
                i += 1
                if i == ds_size - 1:
                    i = 0
            except AssertionError:
                continue

            yield DatasetMinibatch(
                data=np.array(img, dtype=np.float32),
                label=np.array(label, dtype=np.float32),
                index=np.array(index, dtype=np.int32),
                check_minibatch_size=False,
            )

    dataset = GeneratorDataset(sample_generator)
    dataset = EpochDataset(dataset, nr_instances_in_epoch)
    dataset = StackMinibatchDataset(
        dataset, minibatch_size=config.minibatch_size,
    )

    dataset_dep_files = [
        config.real_path(f) for f in ['common.py', 'dataset.py']
        ]

    servable_name = config.make_servable_name(
        dataset_name, dataset_dep_files,
    )

    dataset = create_servable_dataset(
        dataset, servable_name,
        dataset.nr_minibatch_in_epoch,
        serve_type='combiner',
    )

    return dataset
Example #13
0
def get(dataset_name):
    rng = stable_rng(stable_rng)
    #get train dataset
    #nr_train = nori.open(config.nori_path)
    nr = nori.Fetcher()
    #imgs = []
    # pos_labels = []
    # neg_labels = []
    boxes = [[], []]
    f = open(config.read_odgt)
    files = f.readlines()
    from tqdm import tqdm
    for file in files:
        file = eval(file)
        for dtbox in file['dtboxes']:
            if dtbox['tag'] == '__TP__':
                class_idx = 1
            elif dtbox['tag'] == '__FP__':
                class_idx = 0
            boxes[class_idx].append({
                'noriID': file['noriID'],
                'box': dtbox['box']
            })
    print(len(boxes[0]), len(boxes[1]))

    #with open(config.json_label, 'r') as f:
    #    load_dict = json.load(f)
    #for k,v in load_dict.items():
    #    imgs.append(k)
    #    labels.append(int(v))
    #imgs = np.array(imgs)
    #labels = np.array(pos_labels)
    #labels = np.array(pos_labels)
    #nr_imgs = len(imgs)
    #train_ds = (imgs[:int(0.8*nr_imgs)], labels[:int(0.8*nr_imgs)])
    #val_ds =(imgs[int(0.8*nr_imgs):int(0.9*nr_imgs)], labels[int(0.8*nr_imgs):int(0.9*nr_imgs)])
    #test_ds = (imgs[int(0.9*nr_imgs):], labels[int(0.9*nr_imgs):])
    train_ds = (boxes)

    datasets = {
        'train': train_ds,
    }
    # nr = {
    #     'train': nr_train,
    #     'validation': nr_train,
    #     'test': nr_train,
    # }

    boxes = datasets[dataset_name]
    #  imgs = imgs.reshape(-1, 1, 28, 28)
    #ds_size = imgs.shape[0]

    nr_instances_in_epoch = {
        'train': len(boxes[0]) + len(boxes[1]),
    }[dataset_name]

    do_training = (dataset_name == 'train')

    def sample_generator():
        while True:
            try:
                class_idx = rng.randint(2)
                box_idx = rng.randint(0, len(boxes[class_idx]))
                #print(class_idx, box_idx)
                data = boxes[class_idx][box_idx]
                assert data is not None
                noriID = data['noriID']
                box = data['box']

                img = cv2.imdecode(np.fromstring(nr.get(noriID), np.uint8),
                                   cv2.IMREAD_UNCHANGED)  # maybe gray
                #x,y,w,h = int(box[imgs[i]][0]), int(box[imgs[i]][1]),int(box[imgs[i]][2]), int(box[imgs[i]][3])
                x, y, w, h = int(box[0]), int(box[1]), int(box[2]), int(box[3])
                x, y = min(max(x, 0),
                           img.shape[1]), min(max(y, 0), img.shape[0])
                w, h = min(w, img.shape[1] - x), min(h, img.shape[0] - y)
                img = img[y:y + h, x:x + w]
                #print(x,y,w,h)
                if len(img.shape) <= 2:
                    continue
                assert img.shape[0] > 0 and img.shape[1] > 0 and img.shape[
                    2] == 3
                img = rotate(img, do_training)
                img = scale(img, do_training)

                img = cv2.resize(img, config.image_shape)
                #img = img.swapaxes(1, 2).swapaxes(0, 1)
                label = class_idx  # must 0,1,2
                assert label in [0, 1]
                #print('[debug]', img.shape, label)
            except AssertionError:
                continue

            ic01 = augment(img, rng, do_training)
            yield DatasetMinibatch(
                data=np.array(ic01, dtype=np.float32),
                label=np.array(label, dtype=np.int32),
                check_minibatch_size=False,
            )

    dataset = GeneratorDataset(sample_generator)
    dataset = EpochDataset(dataset, nr_instances_in_epoch)
    dataset = StackMinibatchDataset(
        dataset,
        minibatch_size=config.minibatch_size,
    )

    dataset_dep_files = [
        config.real_path(f) for f in ['common.py', 'dataset.py']
    ]

    servable_name = config.make_servable_name(
        dataset_name,
        dataset_dep_files,
    )

    dataset = create_servable_dataset(
        dataset,
        servable_name,
        dataset.nr_minibatch_in_epoch,
        serve_type='combiner',
    )

    return dataset