def loadIdent(matches=None,
              mode='unit',
              organise=False,
              sorting=False,
              top=None,
              filtering=None):
    start = time.time()

    if matches is None:
        if mode == 'unit':
            data_path = PATH.OUT.IDE.DATA.UNIT
        elif mode == 'concept':
            data_path = PATH.OUT.IDE.DATA.CONCEPT
        else:
            raise Exception("Error: invalid mode for loading identification")

        print("Identification: loaded and processed from matches data.")
        if os.path.exists(data_path):
            matches = loadObject(data_path)
        else:
            raise Exception("Error: no data for identification")

    if mode == 'unit' and isConceptForm(matches):
        print("Matches: convert from units to concepts.")
        matches = reverseDict(matches)
    elif mode == 'concept' and isUnitForm(matches):
        print("Matches: convert from concepts to units.")
        matches = reverseDict(matches)

    if filtering is not None:
        if isinstance(filtering, int):
            filtering = getClasses(order=filtering)
        matches = filterDict(matches, filtering)

    # sorting
    if organise:
        if mode == 'concept':
            matches = organiseMatches(matches, sorting, top)
        else:
            raise Exception(
                "Error: conflict paramters for 'organise' and 'mode'.")
    elif sorting or top:
        pool = Pool()
        for k, v in matches.items():
            params = (v, False, [0], True, True)
            matches[k] = pool.apply_async(sortDict, args=params)
        pool.close()
        pool.join()
        for k, v in matches.items():
            if top and top >= 0:
                matches[k] = v.get()[:top]
            elif top and top < 0:
                matches[k] = v.get()[top:]
            else:
                matches[k] = v.get()

    end = time.time()
    print("Identification: finished {}s.".format(int(end - start)))
    return matches
def loadClassifierClasses():
    global CSF_CLASS
    if CSF_CLASS:
        return CSF_CLASS
    path = PATH.MODEL.CLASSES
    mapping = loadObject(path, split=False)
    CSF_CLASS = mapping
    return mapping
def loadWnidNames():
    global WNID_NAME
    if WNID_NAME:
        return WNID_NAME
    path = PATH.DATA.IMAGENET.WORDS
    wnid_names = loadObject(path, split=False)
    for entry in wnid_names:
        entry = entry.split('\t')
        WNID_NAME[entry[0]] = entry[1]
    return WNID_NAME
def loadIsAMap():
    global ISA_MAP
    if ISA_MAP:
        return ISA_MAP
    path = PATH.DATA.IMAGENET.ISA
    isa_map = loadObject(path, split=False)
    for entry in isa_map:
        entry = entry.split(' ')
        sup, sub = entry
        ISA_MAP[sub] = sup
    return ISA_MAP
def mapImageNet(maps=None):
    if maps:
        img_ids, _, img_cls_map = maps
    else:
        img_ids = loadObject(PATH.DATA.IMG_MAP)
        img_cls_map = loadObject(PATH.DATA.IMG_CLS_MAP)

    img_dir = PATH.DATA.IMAGENET.IMGS
    data = getFilesInDirectory(img_dir, "jpg")
    data = [(x[x.rfind('/') + 1:-4], IMAGENET) for x in data]

    idx = len(img_ids)
    for _data in data:
        img_ids.append((idx, ) + _data)
        img_id = _data[0]
        cls = getClassID(img_id.split('_')[0])
        img_cls_map.append([img_id, cls])
        idx += 1

    saveObject(img_ids, PATH.DATA.IMG_MAP)
    saveObject(img_cls_map, PATH.DATA.IMG_CLS_MAP)
Exemple #6
0
    def img_infos(self):
        if hasattr(self, '_img_infos'):
            return self._img_infos
        else:
            img_infos = loadObject(PATH.DATA.IMG_CLS_MAP)
            self._img_infos = []
            for info in img_infos:
                _info = {}
                _info['id'] = info[0]
                _info['classes'] = info[1:]
                self._img_infos.append(_info)

            return self._img_infos
Exemple #7
0
 def loadConfigs(self):
     configs = loadObject(self.config_file)
     model_configs = configs[0]
     self.input_mean = model_configs["input_mean"]
     self.input_dim = tuple(model_configs["input_dim"])
     self.net_type = model_configs["net_type"]
     
     archi_configs = configs[1:]
     self.layers = []
     self.configs = {}
     for layer_config in archi_configs:
         name = layer_config[0]
         self.layers.append(name)
         self.configs[name] = adict(layer_config[1])
    def getFieldmaps(self, file_path=None):
        if self.field_maps is not None:
            return self.field_maps

        # load/generate field maps
        file_path = PATH.MODEL.FIELDMAPS if file_path is None else file_path
        if os.path.isfile(file_path):
            print ("Fieldmaps: loading from the stored object file ...")
            field_maps = loadObject(file_path)
        else:
            print ("Fieldmaps: generating ...")
            field_maps = stackedFieldmaps(self.model)
            saveObject(field_maps, file_path)
            print ("Fieldmaps: saved at {}".format(file_path))
        self.field_maps = field_maps
        return self.field_maps
Exemple #9
0
    def initDatasets(self, sources, amount):
        # config data to be loaded
        self.database = loadObject(PATH.DATA.IMG_MAP)
        self.dataset = []
        # filter samples not belonging to target sources
        for _data in self.database:
            if _data[2] in sources:
                self.dataset.append(_data)

        if amount is not None and len(self.dataset) > amount:
            # discard remaining datasets, since the specified amount is reached
            self.data = self.dataset[:amount]
            self.backup = self.dataset[amount:]
        else:
            self.data = self.dataset.copy()
            self.backup = None
        self.amount = len(self.data)
def mapCOCO(maps, source):
    print("Mapping MS-COCO dataset...")
    img_ids, cls_map, img_cls_map = maps

    for subset in ["val", "train"]:
        file_path = PATH.DATA.COCO.ANNOS.format(subset)
        coco = loadObject(file_path)

        # class map
        classes = coco['categories']
        _cls_map = {}
        for idx, cls in enumerate(classes):
            name = convert(cls['name'])
            _cls_id = cls['id']
            cls_id = getClassID(name, cls_map)
            if cls_id is None:
                # new class not exists in class_map
                cls_map.append(name)
                _cls_map[_cls_id] = len(cls_map) - 1
            else:
                _cls_map[_cls_id] = cls_id

        annos = {}
        for anno in coco['annotations']:
            img_id = anno['image_id']
            cls = _cls_map[anno['category_id']]
            if img_id not in annos:
                annos[img_id] = set()
            annos[img_id].add(cls)

        data = sorted(list(annos.keys()))
        # only count the image_id appearing in the annotations
        # not all image_ids in coco['images']
        for img_id in data:
            img_cls = [img_id] + list(annos[img_id])
            img_cls_map.append(img_cls)
            img_ids.append((img_id, source, subset))
Exemple #11
0
def reportCorrelations(corrs):
    results = {}
    for unit_1, unit_2, coef in nested(corrs):
        # TODO

'''
Main Program

'''

if __name__ == "__main__":
    data_path = PATH.OUT.COR.ACTIVS
    if os.path.exists(data_path):
        print("Units activaiton data: load from existing file.")
        data = loadObject(data_path)
    else:
        print ("Units activation data: generate from scratch...")
        bl = BatchLoader(sources=["PASCAL"])
        model = ModelAgent()
        probe_layers = loadObject(PATH.MODEL.PROBE)
        cls_layers = model.getLayers()[-2:]
        probe_layers += cls_layers
        
        data = {}
        while bl:
            batch = bl.nextBatch()
            imgs = batch[1]
    
            activ_maps = model.getActivMaps(imgs, probe_layers)
            conv_activs, cls_activs = splitDic(activ_maps, cls_layers)
Exemple #12
0

def reportMatchesInFigure(matches):
    print("placeholder")


'''
Main program

'''

if __name__ == "__main__":
    path = PATH.OUT.IDE.DATA.UNIT
    if not os.path.exists(path):
        bl = BatchLoader(amount=100)
        probe_layers = loadObject(PATH.MODEL.PROBE)
        model = ModelAgent()
        field_maps = model.getFieldmaps()

        pool = Pool()
        num = pool._processes
        matches = None
        while bl:
            batch = bl.nextBatch()
            images = batch[1]
            annos = batch[2]

            activ_maps = model.getActivMaps(images, probe_layers)
            activ_maps = splitDict(activ_maps, num)
            params = [(amap, field_maps, annos) for amap in activ_maps]
            print("Reflecting and matching activation maps...")
from os.path import exists

curr_path = os.path.dirname(os.path.abspath(__file__))
root_path = os.path.join(curr_path, "../..")
if root_path not in sys.path:
    sys.path.insert(0, root_path)

from src.config import PATH
from utils.helper.file_manager import loadObject, saveObject
'''
Global variable

'''

path = PATH.DATA.CLS_MAP
class_map = loadObject(path) if exists(path) else []

path = PATH.DATA.IMG_CLS_MAP
img_cls_map = loadObject(path) if exists(path) else []
'''
Getter

'''


def getClasses(order=0, mapping=class_map, indices=None):
    classes = []
    for idx, cls in enumerate(mapping):
        if idx == 0 or cls is None:
            # skip first element in each order, it is non-sense
            # or belonging to parent order
Exemple #14
0
Main program

'''

if __name__ == "__main__":
    data_path = PATH.OUT.UNIT_ATTRS

    if not exists(data_path):
        print(
            "Can not find existing verification data, thus beginning from scratch."
        )

        input_num = 15
        bl = BatchLoader(batch_size=input_num)
        model = ModelAgent(input_size=input_num)
        probe_layers = loadObject(PATH.MODEL.PROBE)

        attr_diffs = {}
        patch_data = [[], []]
        while bl:
            batch = bl.nextBatch()
            imgs = batch[1]
            annos = batch[2]

            # obtain original activation maps
            print("Fetching activation maps for specific units ...")
            activ_maps = model.getActivMaps(imgs, probe_layers)
            activ_attrs = activAttrs(activ_maps)
            anno_ids = [[anno[0] for anno in img_annos] for img_annos in annos]
            updateActivAttrDiffs(attr_diffs,
                                 activ_attrs,
Exemple #15
0
    return batch_matches

    
'''
Main Program

'''

if __name__ == "__main__":
    quans = [x for x in range(0, 100, 10)]
    file_path = PATH.OUT.ACTIV_THRESH

    if not os.path.exists(file_path):
        print ("Can not find existing match results, thus beginning from scratch.")
        bl = BatchLoader(amount=4000, classes=0)
        probe_layers = loadObject(PATH.MODEL.PROBE)
        model = ModelAgent()
        field_maps = model.getFieldmaps()
        
        matches = [None for x in range(len(quans))]
        while bl:
            batch = bl.nextBatch()
            imgs = batch[1]
            annos = batch[2]

            activ_maps = model.getActivMaps(imgs, probe_layers)
            activ_maps = splitDict(activ_maps, num)
            params = [(amap, field_maps, annos, quans) for amap in activ_maps]
            with Pool() as pool:
                batch_matches = pool.starmap(process, params)
            print ("Combine matches...")