Ejemplo n.º 1
0
def serverconfig():
    '''Create server configuration files on local directory'''
    from static import server_types
    env.path = ''
    server = server_types[env.server_type]
    utils.get_directories(release = False)
    server.config_files(env, dir = '.')
Ejemplo n.º 2
0
def info():
    '''Information regarding installation parameters'''
    from static import server_types
    utils.get_directories(release = False)
    data = env.copy()
    server = server_types[env.server_type]
    server.info(data)
    for k in sorted(data):
        print('%20s: %s' % (k,data[k]))
Ejemplo n.º 3
0
def download(addon):
    # TODO: prevent multiple downloads.
    current = addon['current_version']
    for file_obj in current['files']:
        url = file_obj['url']
        filename = file_obj['id']

        directories = get_directories(addon['id'])
        target = '{}.xpi'.format(
            os.path.join(directories['files'], str(file_obj['id'])),
        )

        if os.path.exists(target):
            log.info('{}: Skipping download'.format(addon['id']))
            continue

        res = requests.get(url)
        if res.status_code == 404:
            log.warning('{}: got a 404'.format(addon['id']))
            continue
        else:
            res.raise_for_status()

        with open(target, 'wb') as filehandle:
            for chunk in res.iter_content(10000):
                filehandle.write(chunk)

        log.info('{}: Downloaded file: {}'.format(addon['id'], file_obj['id']))
Ejemplo n.º 4
0
def download(addon):
    # TODO: prevent multiple downloads.
    current = addon['current_version']
    for file_obj in current['files']:
        url = file_obj['url']
        filename = file_obj['id']

        directories = get_directories(addon['id'])
        target = '{}.xpi'.format(
            os.path.join(directories['files'], str(file_obj['id'])), )

        if os.path.exists(target):
            log.info('{}: Skipping download'.format(addon['id']))
            continue

        res = requests.get(url)
        if res.status_code == 404:
            log.warning('{}: got a 404'.format(addon['id']))
            continue
        else:
            res.raise_for_status()

        with open(target, 'wb') as filehandle:
            for chunk in res.iter_content(10000):
                filehandle.write(chunk)

        log.info('{}: Downloaded file: {}'.format(addon['id'], file_obj['id']))
Ejemplo n.º 5
0
 def assemble_initial_menu(self):
     filepath = os.path.join("data", "scripts")
     menu_items = utils.get_directories(filepath)
     self.window_text_list = [
         "{}. {}".format(count + 1, i.replace(".txt", ""))
         for count, i in enumerate(menu_items)
     ]
Ejemplo n.º 6
0
def upload(release = True):
    "Upload the ``project`` directory into the server"
    import time
    import os
    if release and not env.path.startswith('/'):
        result = run('pwd').split(' ')[0]
        env.path = os.path.join(result,env.path)
        
    release_name = time.strftime('%Y%m%d-%H%M%S')    
    utils.get_directories(release_name, release)
    env.tarfile = archive(release)
    # put tar package
    if release:
        utils.makedir(env.release_path)
        run('cd; mkdir %(logdir)s; mkdir %(confdir)s' % env)
        put(env.tarfile, '%(path)s' % env)
        run('cd %(release_path)s && tar zxf ../%(tarfile)s' % env)
        run('rm %(path)s/%(tarfile)s' % env)
        local('rm %(tarfile)s' % env)
Ejemplo n.º 7
0
def serialize_addon_result(addon_id, result, type='root'):
    directories = get_directories(addon_id)
    files = get_files(addon_id)
    target_file = files[type]
    if os.path.exists(target_file):
        if file_hash(target_file) == json_hash(result):
            log.info('{}: No need to update, hashes match.'.format(addon_id))
        else:
            log.info('{}: Hashes differ.'.format(addon_id))
            shutil.move(target_file, files['backup'])
            json.dump(result, open(target_file, 'w'))
    else:
        log.info('{}: Writing json for {}'.format(addon_id, type))
        json.dump(result, open(target_file, 'w'))
Ejemplo n.º 8
0
 def assemble_menu(self):
     filepath = os.path.join("data", "quizes")
     menu_items = utils.get_directories(filepath)
     self.window_text_list = [
         "{}. {}".format(count + 1, i) for count, i in enumerate(menu_items)
     ]
organize data for Direct Replication 2
from original data files (obtain from OSF)
into Psych-DS (ish) format

study-2/
    study-2_sub-<subnum>_data.tsv
"""

import argparse
from utils import (get_directories, get_datafiles, load_and_resave_datafiles)


def get_command_line_arguments():
    parser = argparse.ArgumentParser(description='Reorganize replication data')
    parser.add_argument('-b',
                        '--basedir',
                        required=True,
                        help='base directory')
    parser.add_argument('--studynum', default=1, type=int, help='study number')
    parser.add_argument('--studyname',
                        default='wessel_replication_1_OSF',
                        help='study name')
    return (parser.parse_args())


if __name__ == "__main__":
    args = get_command_line_arguments()
    args = get_directories(args)
    args = get_datafiles(args)
    load_and_resave_datafiles(args)
Ejemplo n.º 10
0
def worker(args, q_ind, q_value, r_ind, r_value):
    fd = load_object_detector(args.object_detector)
    dataset = "data/TinyTLP/"
    all_directories = get_directories(dataset)
    results = {}
    for directory in all_directories:
        try:
            results_dir = results[directory] = []
            plotted = results[f"{directory}_plots"] = []
            iou = results[f"{directory}_iou"] = []
            errors = results[f"{directory}_errors"] = []
            ground_truth_file = dataset + directory + "/groundtruth_rect.txt"
            images_wildcard = dataset + directory + "/img/*.jpg"
            images_filelist = glob(images_wildcard)

            images_filelist = sort_images(images_filelist)

            de = None
            if args.extract_density:
                # de = DensityExtractor(images_filelist[0], args)
                # de.create_grid()
                de = GaussianDensityExtractor(images_filelist[0], args)
                de.create_grid()

            # Extract all ground truths
            ground_truth, gt_measurements = extract_all_groud_truths(
                ground_truth_file)

            # Create PF
            w, h = map(int, ground_truth[0][3:5])
            pf = create_particle_filter(ground_truth[0], q_value, r_value,
                                        images_filelist[0], args)

            # Create images iterator
            t = create_iterator(images_filelist, args)

            # Create data association
            da = MLPFAssociation(states=pf.S,
                                 R=r_value,
                                 H=pf.H,
                                 threshold=args.outlier_detector_threshold)

            # Iterate of every image
            features = None
            for i, im in enumerate(t):
                img = read_img(im)

                # Do prediction
                pf.predict()

                if i % args.resample_every == 0:

                    # Compute features
                    features = np.array(fd.compute_features(img))

                    if len(features) > 0:
                        # Do data association
                        if args.point_estimate:
                            features = [
                                np.array([i[0] + w / 2, i[1] + h / 2])
                                for i in features
                            ]
                        psi, outlier, c = da.associate(features)

                        pf.update(psi, outlier, c)
                    else:
                        pf.assign_predicted()

                else:
                    pf.assign_predicted()

                gt = list(map(int, ground_truth[i]))
                x = pf.get_x().T

                # Plot features
                if args.should_plot:
                    plot_result = plot(args, x, de, img, gt, features)
                    if args.show_plots is False:
                        plotted.append(plot_result)

                if args.extract_density:
                    if args.should_plot is False:
                        de.estimate(x)
                    if args.point_estimate:
                        v = np.linalg.norm([
                            de.xtmean - gt[1] - gt[3] / 2,
                            de.ytmean - gt[2] - gt[4] / 2
                        ],
                                           axis=0)
                        results_dir.append(v)
                    else:
                        v = np.linalg.norm([
                            de.xtmean - gt[1], de.ytmean - gt[2],
                            (de.xbmean - de.xtmean) - gt[3],
                            (de.ybmean - de.ytmean) - gt[4]
                        ])
                    t.set_description(f"Last error: {v:3.4}")
                    errors.append(v)
                    if not args.point_estimate:
                        iou.append(
                            get_iou(
                                [de.xtmean, de.ytmean, de.xbmean, de.ybmean],
                                [gt[1], gt[2], gt[1] + gt[3], gt[2] + gt[4]]))
                        results_dir.append([
                            de.xtmean, de.ytmean, de.xbmean - de.xtmean,
                            de.ybmean - de.ytmean
                        ])
        except Exception as e:
            print(f"Crashed with error: {str(e)}. Q: {q_value}, R: {r_value}")
    if args.point_estimate:
        results_file = f"results/pf_R_{r_ind}_Q_{q_ind}_{args.object_detector}.pickle"
    else:
        results_file = f"results/pf_box_R_{r_ind}_Q_{q_ind}_{args.object_detector}.pickle"
    with open(results_file, 'wb') as fp:
        pickle.dump(results, fp)
Ejemplo n.º 11
0
def test_bkf_Q(dataset, all_directories, args):
    results = {}
    l = 2
    R_value = 1
    # Get images list from dataset
    dataset = "data/TinyTLP/"

    all_directories = get_directories(dataset)
    fd = load_object_detector(args.object_detector)
    for k, Q_value in enumerate(Q_values):
        for dir in all_directories:
            iou = []
            results_dir = results[dir] = []
            ground_truth_file = dataset + dir + "/groundtruth_rect.txt"
            images_wildcard = dataset + dir + "/img/*.jpg"
            images_filelist = glob(images_wildcard)

            images_filelist = sort_images(images_filelist)

            # Extract all ground truths
            ground_truth, gt_measurements = extract_all_groud_truths(
                ground_truth_file)

            # Create KF
            kf = create_box_kalman_filter(ground_truth[0], Q_value, R_value)

            # Iterate of every image
            t = tqdm(images_filelist[1:], desc="Processing")
            da = DataAssociation(R=kf.R, H=kf.H, threshold=1)

            # Create plot if should_plot is true
            if args.should_plot:
                fig, ax = create_fig_ax()

            features = {}
            for i, im in enumerate(t):
                img = read_img(images_filelist[i])
                # Compute features
                features[i] = np.array(fd.compute_features(img))

                # Do prediction
                mu_bar, Sigma_bar = kf.predict()

                # Do data association
                da.update_prediction(mu_bar, Sigma_bar)
                m = da.associate(features[i])
                kf.update(m)
                gt = list(map(int, ground_truth[i]))
                kf_x = kf.get_x()

                if args.should_plot:
                    plot_box_kf_result(ax, i, img, m, kf_x, ground_truth)
                iou.append(
                    get_iou([kf_x[0], kf_x[1], kf_x[2], kf_x[3]],
                            [gt[1], gt[2], gt[1] + gt[3], gt[2] + gt[4]]))
                results_dir.append(
                    [kf_x[0], kf_x[1], kf_x[2] - kf_x[0], kf_x[3] - kf_x[1]])
            print(f"Dataset: {dir}, IoU: {np.mean(iou), np.std(iou)}")
        with open(
                f"results/kalman_filter_box_R_{l}_Q_{k}_{args.object_detector}.pickle",
                'wb') as fp:
            pickle.dump(results, fp)