Пример #1
0
def run(_):
    cfgs = lz.load_cfg('./cfgs/single_ohnm.py')
    procs = []
    for args in cfgs.cfgs:
        if args.loss != 'trivid':
            print(f'skip {args.loss} {args.logs_dir}')
            continue
        if args.log_at is None:
            args.log_at = np.concatenate([
                range(0, 640, 31),
                range(args.epochs - 8, args.epochs, 1)
            ])
        args.logs_dir = lz.work_path + 'reid/work/' + args.logs_dir
        if osp.exists(args.logs_dir) and osp.exists(args.logs_dir + '/checkpoint.64.pth'):
            print(os.listdir(args.logs_dir))
            continue

        if not args.gpu_fix:
            args.gpu = lz.get_dev(n=len(args.gpu),
                                  ok=args.gpu_range,
                                  mem_thresh=[0.09, 0.09], sleep=32.3)
        lz.logging.info(f'use gpu {args.gpu}')
        # args.batch_size = 16
        # args.gpu = (3, )
        # args.epochs = 1
        # args.logs_dir+='.bak'

        if isinstance(args.gpu, int):
            args.gpu = [args.gpu]
        if not args.evaluate and not args.vis:
            assert args.logs_dir != args.resume
            lz.mkdir_p(args.logs_dir, delete=True)
            lz.pickle_dump(args, args.logs_dir + '/conf.pkl')
        if cfgs.no_proc:
            main(args)
        else:
            proc = mp.Process(target=main, args=(args,))
            proc.start()
            lz.logging.info('next')
            time.sleep(random.randint(39, 90))
            if not cfgs.parallel:
                proc.join()
            else:
                procs.append(proc)

    if cfgs.parallel:
        for proc in procs:
            proc.join()
Пример #2
0
def run(_):
    cfgs = lz.load_cfg('./cfgs/single_ohnm.py')
    procs = []
    for args in cfgs.cfgs:
        if args.loss != 'tcx':
            print(f'skip {args.loss} {args.logs_dir}')
            continue
        # args.log_at = np.concatenate([
        #     args.log_at,
        #     range(args.epochs - 8, args.epochs, 1)
        # ])
        args.logs_dir = 'work/' + args.logs_dir
        if not args.gpu_fix:
            args.gpu = lz.get_dev(n=len(args.gpu),
                                  ok=args.gpu_range,
                                  mem=[0.12, 0.07],
                                  sleep=32.3)
        lz.logging.info(f'use gpu {args.gpu}')
        # args.batch_size = 16
        # args.gpu = (3, )
        # args.epochs = 1
        # args.logs_dir+='.bak'

        if isinstance(args.gpu, int):
            args.gpu = [args.gpu]
        if not args.evaluate:
            assert args.logs_dir != args.resume
            lz.mkdir_p(args.logs_dir, delete=True)
            lz.pickle_dump(args, args.logs_dir + '/conf.pkl')

        # main(args)
        proc = mp.Process(target=main, args=(args, ))
        proc.start()
        lz.logging.info('next')
        time.sleep(random.randint(39, 90))
        procs.append(proc)

    for proc in procs:
        proc.join()
Пример #3
0
import lz
from lz import *
from torch.nn import CrossEntropyLoss
from tools.vat import VATLoss
from torchvision import transforms as trans

# todo label smooth

dist = False
num_devs = 3
if dist:
    num_devs = 1
else:
    pass
    # lz.init_dev(3)
    lz.init_dev(lz.get_dev(num_devs))

conf = edict()
conf.num_workers = 4  # ndevs * 3
conf.num_devs = num_devs
conf.no_eval = False
conf.start_eval = False
conf.loss = 'arcface'  # adacos softmax arcface arcfaceneg arcface2 cosface

conf.writer = None
conf.local_rank = None
conf.num_clss = None
conf.dop = None  # top_imp
conf.id2range_dop = None  # sub_imp
conf.explored = None
Пример #4
0
from easydict import EasyDict as edict
from pathlib import Path
import torch
import lz
from torch.nn import CrossEntropyLoss
from torchvision import transforms as trans

lz.init_dev(lz.get_dev(n=1))
# lz.init_dev((0, 1))


def get_config(training=True):
    conf = edict()

    dbg = lz.dbg
    if dbg:
        # conf.num_steps_per_epoch = 38049
        conf.num_steps_per_epoch = 3
        # conf.no_eval = False
        conf.no_eval = True
    else:
        conf.num_steps_per_epoch = 38049
        # conf.num_steps_per_epoch = 3
        conf.no_eval = False
        # conf.no_eval = True
    conf.loss = 'softmax'  # softmax arcface
    conf.fgg = ''  # g gg ''
    conf.fgg_wei = 0  # 1
    conf.start_eval = False

    conf.data_path = Path('/data2/share/')
Пример #5
0
        # path = paths[0]
        if not osp.exists(path + '/conf.pkl'):
            continue
        args = pickle_load(path + '/conf.pkl')
        if args.logs_dir != path:
            print('not same', args.logs_dir)
        args.logs_dir = path + '/eval'
        args.gpu_range = range(4)
        if osp.exists(args.logs_dir + '/res.json'): continue
        if 'mkt' not in args.logs_dir: continue
        continue
        args.dataset_val = args.dataset
        args.eval_conf = 'market1501'

        args.gpu = lz.get_dev(n=len(args.gpu),
                              ok=range(4),
                              mem=[0.12, 0.05],
                              sleep=32.3)

        args.evaluate = True
        args.resume = path + '/model_best.pth'
        if not osp.exists(args.resume):
            logging.info(args.logs_dir, 'shoule delete')
            # rm(args.logs_dir)
            continue

        # res = cmain(args)  # will not release mem
        proc = mp.Process(target=main, args=(args, ))
        proc.start()
        proc.join()

        # print(res)