Exemplo n.º 1
0
def create_array_job(loss, out_dir):
    run_one_job(script=__file__,
                queue='48h',
                cpu_only=False,
                memory=25,
                out_dir=out_dir,
                name='infer_{}'.format(loss),
                overwrite=True,
                hold_off=False,
                array=True,
                num_jobs=1)
Exemplo n.º 2
0
def create_array_job(ins_root, log_dir):
    run_one_job(script=__file__,
                queue='middle',
                cpu_only=True,
                memory=10,
                script_parameters=[],
                out_dir=log_dir,
                name='xy',
                overwrite=True,
                hold_off=True,
                num_jobs=len(os.listdir(ins_root)),
                array=True)
Exemplo n.º 3
0
def create_array_job(num_jobs, log_dir):
    run_one_job(script=__file__,
                queue='short',
                cpu_only=True,
                memory=40,
                script_parameters=[],
                out_dir=log_dir,
                name='parametrize',
                overwrite=True,
                hold_off=False,
                num_jobs=num_jobs,
                array=True)
Exemplo n.º 4
0
def create_array_job(loss, out_dir):
    run_one_job(script=__file__,
                queue='48h',
                cpu_only=False,
                memory=50,
                script_parameters=[('out_folder', os.path.basename(out_dir))],
                out_dir=out_dir,
                name='train_{}'.format(loss),
                overwrite=True,
                hold_off=False,
                array=True,
                num_jobs=1)
Exemplo n.º 5
0
        ref = references[query.split('_')[0]]

        t_n_file = os.path.join(top_n_root, setting, '{}_{}.pickle'.format(query, cp))

        if os.path.exists(os.path.join(fs_root(), 'grad_cam', os.path.splitext(os.path.basename(t_n_file))[0])):
            print('Skipping existing: {}'.format(top_n_root))
            continue

        parameters = list()
        parameters.append(
            ('top_n_pickle', t_n_file))
        parameters.append(('query_csv', os.path.join(csv_root, '{}.csv'.format(query))))
        parameters.append(('ref_csv', os.path.join(csv_root, '{}.csv'.format(ref))))
        parameters.append(('checkpoint', os.path.join(fs_root(), 'checkpoints', cp)))

        if 'vl64' not in cp:
            parameters.append(('vlad_cores', 0))

        k = 0
        out_folder = '{}_{}_{:03}'.format(cp, query, k)
        log_dir = os.path.join(log_root, out_folder)
        while os.path.exists(log_dir):
            k = k + 1
            out_folder = '{}_{}_{:03}'.format(cp, query, k)
            log_dir = os.path.join(log_root, out_folder)

        parameters.append(('log_dir', log_dir))
        run_one_job(script=script, queue='2h', cpu_only=False, memory=30,
                    script_parameters=parameters, out_dir=log_dir,
                    name='grad_{}_{}'.format(cp, query), overwrite=True, hold_off=False, array=False)
Exemplo n.º 6
0
for s in sets:
    for c_name in checkpoints:

        c_path = os.path.join(fs_root(), 'checkpoints', c_name)

        if os.path.exists(os.path.join(out_root, '{}_{}.pickle'.format(s, c_name))):
            print('{}_{}.pickle already exists. Skipping.'.format(s, c_name))
            continue

        par = common_parameters.copy()
        par.append(('checkpoint', c_path))
        par.append(('set', s))
        par.append(('out_name', c_name))

        if 'vl0' in c_name:
            par.append(('vlad_cores', 0))

        k = 0
        out_folder = '{}_{}_{}_{:03}'.format(s, c_name, k)
        log_dir = os.path.join(log_root, out_folder)
        while os.path.exists(log_dir):
            out_folder = '{}_{}_{}_{:03}'.format(s, c_name, k)
            log_dir = os.path.join(log_root, out_folder)
            k = k + 1

        par.append(('log_dir', log_dir))
        run_one_job(script=script, queue='24h', cpu_only=False, memory=50,
                    script_parameters=par, out_dir=log_dir,
                    name='{}_{}'.format(s, c_name), overwrite=True, hold_off=False, array=False)
Exemplo n.º 7
0
    out_folder = '{}_{}_{:03}'.format(name, SERIES, k)
    out_dir = os.path.join(out_root, out_folder)
    while os.path.exists(out_dir):
        out_folder = '{}_{}_{:03}'.format(name, SERIES, k)
        out_dir = os.path.join(out_root, out_folder)
        k = k + 1

    parameters = [('out_root', os.path.dirname(out_dir)),
                  ('out_folder', os.path.basename(out_dir))]
    for key in setting.keys():
        parameters.append((key, setting[key]))

    os.makedirs(out_dir)
    if i % (middle + long) < long:
        q = '5d'
    else:
        q = '48h'

    run_one_job(script=train_script,
                queue=q,
                cpu_only=False,
                memory=50,
                script_parameters=parameters,
                out_dir=out_dir,
                name='{}_{}'.format(name, SERIES),
                overwrite=True,
                hold_off=False,
                array=True,
                num_jobs=1)
Exemplo n.º 8
0
        log_dir = os.path.join(log_root, out_folder)
        while os.path.exists(log_dir):
            out_folder = '{}_{:03}'.format(name, k)
            log_dir = os.path.join(log_root, out_folder)
            k = k + 1

        queue = 'middle'
        memory = 90

        if 'pittsburgh' == s[0]:
            queue = 'long'

        if 'freiburg' == s[0]:
            memory = 38

        if 'oxford' == s[0]:
            memory = 49

        parameters.append(('log_dir', log_dir))

        run_one_job(script=script,
                    queue=queue,
                    cpu_only=True,
                    memory=memory,
                    script_parameters=parameters,
                    out_dir=log_dir,
                    name='imp_{}_{:03}'.format(name, k),
                    overwrite=True,
                    hold_off=False,
                    array=False)