Ejemplo n.º 1
0
 def __init__(self, file):
     from mlib.file import File
     self.file = File(file)
     self.file.allow_autoload = True
     super().__init__(self.file)
     if not self.file.exists:
         self.file.save([])
     self.file.default_quiet = True
Ejemplo n.º 2
0
    def __init__(self, label, file, hw, *args, is_pretrained, **kwargs):
        super().__init__(*args, **kwargs)
        self.ARCH_LABEL = label
        self.file = File(file)
        self.IS_PRETRAINED = is_pretrained
        self.HEIGHT_WIDTH = hw

        self.OUTPUT_IDX = None
Ejemplo n.º 3
0
 def __init__(self, js, onload=True):
     if os.path.isfile(js):
         js = File(js)
     if isinstsafe(js, File):
         if js.ext == 'coffee':
             js = js.coffee2js()
         js = js.read()
     self._raw = js
     self._onload = onload
Ejemplo n.º 4
0
def gen_images(*, folder, class_pairs, ims_per_class):
    N_IMAGES = ims_per_class * 2 * len(class_pairs)
    classes = get_class_dict(class_pairs)
    File(folder).deleteAllContents()

    BLOCK_HEIGHT_WIDTH = 20

    y = []

    band_group_size = N_IMAGES / len(class_pairs)
    band_group = 0
    band_group_i = 0

    with Progress(N_IMAGES) as prog:
        for i in range(N_IMAGES):
            im_data = np.random.rand(BLOCK_HEIGHT_WIDTH, BLOCK_HEIGHT_WIDTH)
            if BLACK_AND_WHITE:
                im_data = np.vectorize(round)(im_data)

            band = class_pairs[band_group]
            ns_classname, s_classname = band.get_classnames()
            darken = band.dark
            if darken:
                im_data = im_data / 2
            band = band.bandsize
            bar_start = int((BLOCK_HEIGHT_WIDTH / 2) - (band / 2))
            bar_end = bar_start + band
            for w in range(bar_start, bar_end):
                im_data[:, w] = 0.5

            im_data = make255(im_data)

            if iseven(i):
                im_data = nn_lib.symm(im_data, 1)
                y.append(classes[s_classname])
                label = s_classname
            else:
                y.append(classes[ns_classname])
                label = ns_classname

            im_data = np.expand_dims(im_data, 2)

            # i think Darius' data was single channeled
            # im_data = np.concatenate((im_data, im_data, im_data), axis=2)

            # im_file = File(f'{File(folder).abspath}/{label}/sym{i}.png')
            im_file = File(f'{File(folder).abspath}/{label}/{label}_{band_group_i}.png')

            band_group_i = band_group_i + 1
            if band_group_i == band_group_size:
                band_group = band_group + 1
                band_group_i = 0

            im_file.save(im_data, silent=True)
            prog.tick()

    return classes
Ejemplo n.º 5
0
 def __init__(self, RESOURCES_ROOT: Folder, _DEV: bool = None):
     assert _DEV is not None
     self._DEV = _DEV
     self.RESOURCES_ROOT = RESOURCES_ROOT
     self.RESOURCES_ROOT = Folder(self.RESOURCES_ROOT)
     self.EXP_FOLDER = File(inspect.getfile(self.__class__)).parent
     self.FIG_FOLDER = Folder(self.EXP_FOLDER['figs'])
     self.changelist = self.EXP_FOLDER['changelist.yml']
     self.VERSIONS = self.changelist
     self.THIS_VERSION = listitems(self.VERSIONS.load())[-1]
     self.ROOT = self.EXP_FOLDER['build/site']
Ejemplo n.º 6
0
 def write_reqs(cls):
     File('environment.yml').write(shell('conda env export').all_output())
     reqs_conda = spshell(
         f'{HOME}/miniconda3/bin/conda list -n {pwdf().name} -e'
     ).readlines_and_raise_if_err().filtered(
         lambda l: 'pypi' not in l and (not l.strip().startswith("#"))
     )
     File('reqs_conda.txt').write('\n'.join(reqs_conda))
     conda_prune(just_cache=True)
     good2go = conda_prune()
     return reqs_conda, good2go
Ejemplo n.º 7
0
 def _save(self, pretrained=False):
     model_save_file = f'_arch/{self.ARCH_LABEL}'
     if pretrained:
         model_save_file = f'{model_save_file}_pretrained'
     try:
         self.net.save(model_save_file)
         self.net.save(f'{model_save_file}.h5')
         log('saved model')
     except TypeError:
         warn(f'could not save model due to tf bug')
         File(model_save_file).deleteIfExists()
         File(f'{model_save_file}.h5').deleteIfExists()
Ejemplo n.º 8
0
class PermaList(DefaultMutableList):
    def __init__(self, file):
        from mlib.file import File
        self.file = File(file)
        self.file.allow_autoload = True
        super().__init__(self.file)
        if not self.file.exists:
            self.file.save([])
        self.file.default_quiet = True
    def __getitem__(self, val):
        return self._l[val]
    def __setitem__(self, key, value):
        self._l[key] = value
    def __delitem__(self, key): del self.file[key]
Ejemplo n.º 9
0
def take_om_logs(OMP):
    manager = get_manager()
    from mlib.boot.lang import pwd
    from mlib.boot.stream import listitems
    from mlib.file import File, Folder
    with PipelineSection('downloading pipeline section data', log=True):
        OMP.get(
            File(PIPELINE_SECTION_FILE).rel_to(pwd()),
            destination=Folder('_data'),
            overwrite=True
        )
    for seclabel, secdata in listitems(File(PIPELINE_SECTION_FILE).load(silent=True)):
        while seclabel in manager.PIPELINE_SECTIONS:
            seclabel = next_int_suffix(seclabel)
        manager.PIPELINE_SECTIONS[seclabel] = secdata
Ejemplo n.º 10
0
def write_README(proj):
    File('README.md').write(README({
        'Installation' : _sec(
            '[![Anaconda-Server Badge](https://anaconda.org/mgroth0/mlib-mgroth0/badges/version.svg)](https://anaconda.org/mgroth0/mlib-mgroth0)') if proj.mbuild else _sec(
            f'git clone --recurse-submodules https://github.com/mgroth0/{proj.EXECUTABLE}',
            'install [miniconda](https://docs.conda.io/en/latest/miniconda.html)',
            '`conda update conda`',
            f'`conda create --name {proj.EXECUTABLE} --file requirements.txt` (requirements.txt is currently not working, TODO)',
            'might need to separately `conda install -c mgroth0 mlib-mgroth0`'
            '-- When updating, use `conda install --file requirements.txt;`',
            f'`conda activate {proj.EXECUTABLE}`',
            numbered=True
        ),
        'Usage'        : _sec(
            f'./{proj.EXECUTABLE} {proj.registered_flags()}',
            proj.instructions,
        ),
        'Configuration': proj.configuration,
        'Testing'      : 'automatic' if proj.mbuild else 'todo',
        'Development'  : _sec(
            'TODO: have separate development and user modes. Developer mode has PYTHONPATH link to mlib and instructions for resolving and developing in ide in parallel. User mode has mlib as normal dependency. might need to use `conda uninstall mlib-mgroth0 --force`. Also in these public readmes or reqs.txt I have to require a specific mlib version',
            f'./{proj.EXECUTABLE} build'
        ),
        'Credits'      : proj.credits
    }))
Ejemplo n.º 11
0
 def __init__(
         self,
         keep_up,
         restart,
         rebuild
 ):
     super().__init__(
         home='/home'
     )
     self.vagrantfile = File('Vagrantfile')
     if not self.vagrantfile:
         self.myinit()
     self._isup = None
     self.keep_up = keep_up
     self.restart = restart
     self._destroy = rebuild
Ejemplo n.º 12
0
 def sample(self, n_per_class=10, preload=False):
     import random
     tran = listvalues(self.trans)[0]
     splt = listvalues(self.splits)[0]
     the_sample = []
     for cls in listvalues(self.classes):
         cls: ImageDatasetClass
         folder = cls.folder(splt, tran)
         im_paths = folder.paths
         num = len(im_paths)
         assert num > n_per_class
         already_took = []
         n = 0
         while n < n_per_class:
             i = random.randrange(0, num)
             if i in already_took:
                 continue
             else:
                 the_im = ImageDatasetImage(File(im_paths[i]), self, tran,
                                            splt, cls)
                 if preload:
                     the_im.load()
                 the_sample.append(the_im)
                 n += 1
                 already_took += [i]
     return the_sample
Ejemplo n.º 13
0
def finish_dnn_remote():
    from mlib.file import File
    from mlib.boot.crunch import get_manager, PIPELINE_SECTION_FILE
    File(PIPELINE_SECTION_FILE).save(get_manager().PIPELINE_SECTIONS, silent=True)
    from mlib.boot import info
    info('printing dnn is finished string!')
    print('__DNN_IS_FINISHED__')
Ejemplo n.º 14
0
def basic_cache_logic(f,
                      pklfile: File,
                      *args,
                      log_level=LogLevel.PROGRESS,
                      **kwargs):
    if pklfile.exists:
        log_level.handle(f"loading {f.__name__} result from cache")
        return pklfile.load(silent=True), True
    else:
        key = log_level.handle(
            f"no cached result for {f.__name__}, running ...",
            attempt_one_line=True)
        r = f(*args, **kwargs)
        pklfile.save(r, silent=True)
        log_level.handle(f"completed and cached", one_line_key=key)
        return r, False
Ejemplo n.º 15
0
def write_index_webpage(
        htmlDoc: HTMLPage,
        root,
        resource_root_file,
        upload_resources=True,
        WOLFRAM=False,
        DEV=False
):
    resource_root_rel = File(resource_root_file).rel_to(root)

    assert root.edition_local['.'].abspath == root.edition_local.abspath

    assert root[resource_root_rel].rel_to(root['.']) == resource_root_rel

#       assert root[resource_root_rel].abspath == resource_root_file.abspath

    write_sub_webpage(
        htmlDoc=htmlDoc,
        index_root=root,
        rel_resource_root=resource_root_rel,
        rel_root='.',
        upload_resources=upload_resources,
        WOLFRAM=WOLFRAM,
        DEV=DEV
    )
Ejemplo n.º 16
0
 def __post_init__(self):
     from mlib.open_mind import OM_MOUNT
     txt = f'''
     cd {OM_MOUNT}/user/mjgroth/dnn
     {self.bashscript_str}
     echo {self.FINISH_STR}
     '''
     self.file = File(self.name, w=txt)
Ejemplo n.º 17
0
        def gen():
            # twentyData = []
            # twentyLabel = []
            twentyPairs = []
            i = 0

            # did this?
            warn('NEED TO MERGE getReal and PREPROCESSOR CODE. USE PREPROCESSOR.')
            sfilt = MattSalienceFilter()
            with Progress(len(self.imds)) as prog:
                for imd in self.imds:
                    i += 1
                    if i <= nnstate.FLAGS.batchsize:
                        if nnstate.FLAGS.salience:
                            the_new = imd

                            data = File(imd.file).load()
                            if nnstate.FLAGS.cfg_cfg['full_cfg']['SFILT']:
                                data = sfilt.transform(data)

                            the_new.data = preprocessors(HW)[pp_type].preprocess(data)

                            # I think I fixed this. problem was preprocess resize was not resizing if one of the dimensions was right but not the other. Used an 'and' when I should have used an 'or'.
                            # if (str(type(the_new.data)) != "<class 'numpy.ndarray'>") or (
                            #         str(the_new.data.dtype) != "float32") or str(
                            #         the_new.data.shape) != '(299, 299, 3)':  # debug
                            #     breakpoint()
                            # log('finished preprocess')
                            the_new.label = self.class_label_map[imd.clazz]
                        else:
                            the_new = getReal((imd, HW),
                                              self.class_label_map,
                                              self.normalize_single_ims,
                                              self.std_d,
                                              self.USING_STD_DIR)

                        twentyPairs += [
                            the_new


                        ]
                        # twentyData.append(imd.data)
                        # twentyLabel.append(imd.label)
                    if i == nnstate.FLAGS.batchsize:
                        # batch = SimpleNamespace()
                        # batch.data = twentyData
                        # batch.label = twentyLabel
                        yield (
                            [imd.data for imd in twentyPairs],
                            [imd.label for imd in twentyPairs]
                        )
                        twentyPairs.clear()
                        # twentyData = []
                        # twentyLabel = []
                        i = 0

                    #     this is maybe better than logging in fill_cmat because it also works during net.predict()
                    prog.tick()
Ejemplo n.º 18
0
def makefig(
        subplots,
        file=None,
        show=False,
        width=6,
        height=8
):
    assert subplots is not None
    if not isitr(subplots):
        subplots = [[subplots]]
    subplots = arr(subplots, ndims=2)
    from matplotlib import rcParams
    rcParams['figure.figsize'] = width, height
    rcParams["savefig.dpi"] = 200

    with plt.style.context('dark_background'):
        # if len(subplots.shape) != 2:
        if len(subplots.shape) == 1:
            ncol = 1
        else:
            ncol = subplots.shape[1]
        nrow = subplots.shape[0]

        subplots = make2d(subplots)
        fig, axs = plt.subplots(ncols=ncol, nrows=nrow, squeeze=False)

        if len(axs.shape) == 1:
            # noinspection PyUnresolvedReferences
            axs.shape = (axs.shape[0], 1)
        for r, row in enumerate(subplots):
            for c, fd in enumerate(row):
                if isinstance(fd, MultiPlot):
                    [d.show(axs[r, c]) for d in fd]
                else:
                    fd.show(axs[r, c])
        fig.tight_layout(pad=3.0)
        if file is None:
            plt.show()
        else:
            File(file).mkparents()
            plt.savefig(file)
            plt.clf()
            if show:
                showInPreview(imageFile=File(file).abspath)
            return File(file)
Ejemplo n.º 19
0
def file_cache_logic(f,
                     namefile: File,
                     myfolder: Optional[File],
                     *args,
                     log_level=LogLevel.PROGRESS,
                     **kwargs):
    key = log_level.handle(f"no cached file for {f.__name__}, running ...",
                           attempt_one_line=True)
    r = f(*args, **kwargs)
    assert isinstsafe(
        r, File
    ), f'filecache functions must return a single File, but this is a {type(r)}'

    r = r.copy_into(myfolder, next_new=True)
    namefile.save(r.name, silent=True)

    log_level.handle(f"completed and filecached", one_line_key=key)
    return r
Ejemplo n.º 20
0
def pyshell(script, args=(), cd=None, env=None, PYTHONPATH=None, force=False):
    pyenv = None
    if PYTHONPATH is not None:
        pyenv = {'PYTHONPATH': PYTHONPATH}
    from mlib.file import File
    return shell(f'/Users/matt/miniconda3/envs/{env}/bin/python3',
                 File(script).abspath,
                 *args,
                 cwd=cd,
                 env=pyenv).fake_interact()
Ejemplo n.º 21
0
 def __init__(self, file, makeObjs=False):
     self.makeObjs = makeObjs
     file = File(file, quiet=True)
     file.backup()
     file.allow_autoload = True
     super().__init__(file)
     self.__dict__['file'] = self._l
     assert file.ext == 'json'
     file.backup()
Ejemplo n.º 22
0
Archivo: stat.py Proyecto: mgroth0/mlib
def py_deps(start, output):
    start = File(start).abspath
    output = File(output).abspath
    if not output.endswith('.svg'):
        output = output + '.svg'
    return shell(
        '/Users/matt/miniconda3/bin/python',
        '-m', 'pydeps',
        '-o', output,

        '-T', 'svg',  # default
        '--noshow',
        # '--display','IntelliJ IDEA'

        # Verbosity stuff
        # '-vvv',  # very verbose
        # '--show-deps',
        # '--show-raw-deps',
        # '--show-dot',

        # '--no-output',
        # '--show-cycles',
        # ' --noise-level INT',
        # '--max-bacon', '2',  # default,
        '--max-bacon', '3',  # default,

        '--pylib',
        # '--pylib-all',

        # '--include-missing',
        # --x PATTERN, --exclude PATTERN
        #  --xx MODULE, --exclude-exact MODULE
        #  --only MODULE_PATH
        '--cluster',
        '--min-cluster-size', '2',  # DEFAULT
        '--max-cluster-size', '10',  # DEFAULT
        # '--externals', #causes stuff to print but no svg
        # '--keep-target-cluster',
        # '--rmprefix PREFIX'
        # --reverse
        start
    ).interact()
Ejemplo n.º 23
0
def FigureTable(*figs_captions,
                resources_root=None,
                exp_id=None,
                editable=False):
    children = [Script(js='''hotElements=[]''')]
    my_stacker = MagicTermLineLogger(FigureTable)
    for maybe_pair in [f for f in figs_captions if f]:
        was_tuple = istuple(maybe_pair)
        if was_tuple:
            fig, caption = maybe_pair
        else:
            fig = maybe_pair
            caption = None
        if is_file(fig):
            if not fig: continue
            fig = File(fig).copy_into(resources_root, overwrite=True)
            fig = HTMLImage(fig.abspath, fix_abs_path=True)
        if not caption:
            children.append(fig)
        else:
            # the_id = f'{exp_id}.{".".join(File(fig).names(keepExtension=False)[-1:])}'
            the_id = f'{exp_id}.{".".join(File(fig).names(keepExtension=False)[-1:])}'
            log(f'creating figure: {the_id}', stacker=my_stacker)
            children.append(
                TableRow(
                    DataCell(fig),
                    DataCell(
                        HTML_P(
                            caption,
                            id=the_id,
                        ) if not editable else TextArea(
                            caption, id=the_id, **{'class': 'textcell'}),
                        Script(
                            js=
                            '''(() => {hotElements.push(document.currentScript.parentNode.childNodes[0])})()'''
                        ),
                        **{'class': 'parentcell'},
                    )))
    my_stacker.done = True
    return Table(
        *children,
        Script(js='''
   onload_funs.push(() => {
       hotElements.forEach((e)=>{
            original_value = apiGET(e.id).de_quote()
            e.setText(original_value)
            if (e.tagName === 'TEXTAREA') {
                $(e).on('input',  _=> {
                    apiFun(e.id,e.value)
                })
            }
        }
    )})
        '''))
Ejemplo n.º 24
0
 def monitor():
     log('in monitor thread')
     while not self._detatch_monitor:
         data = f"\n\n{self.__class__.__name__} Monitor\n{str(self.p)}\n\n"
         if logfile is None:
             print(data)
         else:
             from mlib.file import File
             File(logfile).append(data)
         sleep(5)
     log('end of monitor thread')
Ejemplo n.º 25
0
 def _load_weights(self):
     try:
         self.net.load_weights(self.weightsf())
     except:
         import traceback
         print(traceback.format_exc())
         ww = File(self.weightsf()).load()  # DEBUG
         for k in listkeys(ww):
             for kk in listkeys(ww[k]):
                 print(f'{kk}: {ww[k][kk].shape}')
         err('could not load weights')
Ejemplo n.º 26
0
def gc(*args, AUTO_LOGIN=False, RECURSE=False):
    SSH = len(args) <= 1
    arg = 'ssh' if SSH else args[1]
    STOPIN = arg == 'stopin'
    if STOPIN:
        SSH = True
        arg = 'ssh'
    STOP = arg == 'stop'
    START = arg == 'start'
    LIST = arg == 'list'
    PUT = arg == 'put'
    GET = arg == 'get'
    if PUT or GET: arg = 'scp'
    COMPUTE = ['/Users/matt/google-cloud-sdk/bin/gcloud', 'compute']
    if STOP or START or LIST:
        COMPUTE += ['instances']
    COMMAND = COMPUTE + [arg]
    if STOP or START or SSH:
        COMMAND += PROJECT
    if PUT or GET:
        FROM = ((PROJECT_NAME + ':') if GET else '') + abspath(args[2], remote=GET)
        TO = ((PROJECT_NAME + ':') if PUT else '') + abspath(args[3], remote=PUT)
        if File(FROM).isdir() or RECURSE:
            COMMAND.append('--recurse')
        COMMAND.extend([FROM, TO])
    if SSH:
        COMMAND.append('--ssh-flag="-vvv"')
        p = GCShell(COMMAND)
        if STOPIN:
            p.login()
            if args[2] == 'w':
                p.sendline(
                    './stopinw'
                )
                p.readline() # input line
                w = utf_decode(p.readline())
                if '1969' in w:
                    log('no shutdown is scheduled')
                else:
                    log(f'shutdown is scheduled for {w}')
            elif args[2] == 'c':
                p.sudo(['shutdown', '-c'])
                log('cancelled shutdown')
            else:
                p.sudo(['shutdown', '-h', args[2]])

                log(f'scheduled shutdown for {args[2]} mins')
            p.close()
            return None
    else:
        p = GCProcess(COMMAND)
    if AUTO_LOGIN: p.login()
    return p
Ejemplo n.º 27
0
def getReal(
        image_HW,
        _class_label_map,
        normalize_single_ims,
        std_d,
        USING_STD_DIR
):
    real, HW = image_HW
    if GRAY_SCALE:
        real.data = Image.open(real.file.abspath)
    else:
        real.data = np.resize(
            arr(Image.open(real.file.abspath).getdata()),
            (20, 20, 3)
        )
    if normalize_single_ims and not USING_STD_DIR:
        # noinspection PyUnusedLocal
        def smallify():
            err('dev')
            files = glob.glob(sys.argv[1] + "/**/*.png", recursive=True)
            i = 0
            log('found ' + str(len(files)) + ' images')
            with Progress(len(files)) as prog:
                for f in files:
                    p = shell(['convert', f, '-resize', '20x20', f], silent=True)
                    p.interact()
                    i = i + 1
                    prog.tick()
            log('resized ' + str(i) + ' images')
            sys.exit()
        assert len(real.data.getdata()) == 20 * 20, 'dev: smallify if files are large but blocky'
        real.data = np.reshape(
            arr(
                real.data.getdata()
            ),
            (20, 20)
        )
        real.data = real.data / np.std(real.data)
        if SAVE_PREPROC_DATA:
            std_file = File(std_d).resolve(File(real.file).parentName).resolve(File(real.file).name)

            std_file.save(real.data, silent=True)

    real.data = make1(real.data)
    if normalize_single_ims:
        if GRAY_SCALE:
            real.data = real.data - np.mean(real.data)
        else:
            for chan in range(3):
                real.data[:, :, chan] = real.data[:, :, chan] - np.mean(real.data[:, :, chan])

    real.data = resampleim(real.data, HW, HW)

    if GRAY_SCALE:
        shape1 = real.data.shape[0]
        shape2 = real.data.shape[1]
        real.data = np.broadcast_to(real.data, (shape1, shape2, 3))
    real.label = _class_label_map[real.clazz]
    return real
Ejemplo n.º 28
0
Archivo: host.py Proyecto: mgroth0/mlib
    def get(self,
            *files,
            destination=None,
            merge=None,
            overwrite=False,
            verbose=False):
        if len(files) > 0:
            self.host.get(*files, project_name=self.name, verbose=verbose)

            # breakpoint()
            # files = (File(File(f).name) for f in files) # gen expression! YOU CAN ONLY ITERATE IT ONCE!

            files = [File(File(f).name) for f in files]

            all_mb_files = []
            for f in files:
                mb_files = [(fi, si) for fi, si in f.files_recursive().map(
                    lambda fi: (fi, fi.size())) if si.mb() >= 1]
                all_mb_files.extend(mb_files)
                size = f.size(recursive=True)
                progress(f'Downloaded file {f.name} is {size}')

            the_len = len(all_mb_files)
            progress(f'{the_len} files were 1 MB or larger' +
                     (':' if the_len else ''))
            for mb_file, size in sorted(all_mb_files,
                                        key=lambda t: t[1].num_bytes):
                progress(f'\t{size}\t{mb_file.rel_to(pwd())}')

            if destination is not None and Folder(
                    destination).abspath != Folder(pwd()).abspath:
                Folder(destination).mkdirs()
                for f in files:
                    f.moveinto(destination, overwrite=overwrite)
            if merge is not None:
                for f in files:
                    f.mergeinto(Folder(merge), overwrite=overwrite)
                    f.deleteIfExists()
Ejemplo n.º 29
0
class PermaDict(DefaultMutableMapping):
    def __init__(self, file):
        from mlib.file import File
        self.file = File(file)
        self.file.allow_autoload = True
        super().__init__(self.file)
        if not self.file.exists:
            self.file.save({})
        self.file.default_quiet = True
    def check(self):
        from mlib.proj.struct import GIT_DIR, GIT_IGNORE
        if not self.file.rel.startswith('_') and self.file.parent.name != 'data':
            err('PermaDicts should be private (start with _)')
        if GIT_DIR.exists and (not GIT_IGNORE.exists or '/_*' not in GIT_IGNORE.read()):
            err(f'{self.file} needs to be ignored')
        if not self.file.exists:
            self.file.save({})
    def __getitem__(self, val):
        self.check()
        return self._d[val]
    def __setitem__(self, key, value):
        self.check()
        self._d[key] = value
    def __delitem__(self, key): del self.file[key]
Ejemplo n.º 30
0
 def temp_map_filenames(self):
     indexs = []
     log('loading ims...')
     old_ims = [f.load() for f in Folder('_ImageNetTesting_old')]
     new_ims = [f.load() for f in Folder('_ImageNetTesting/unknown')]
     for oi, new_im in enum(new_ims):
         log(f'checking new im {oi}...')
         for i, old_im in enum(old_ims):
             if np.all(old_im == new_im):
                 log(f'\tfound! @ {i}')
                 indexs += [i]
                 break
         assert len(indexs) == oi + 1
     File('image_net_map.p').save(indexs)
     return None