Beispiel #1
0
def main():
    args = parse_args()
    db = get_db()

    action = args.action
    if action == 'drop':
        drop_dataset(db)
    elif action == 'load':
        with open(args.dataset_path) as f:
            dataset = json.load(f)
        ensure_dataset_indices(db)
        load_dataset(db, dataset, normalize=args.normalize)
    elif action == 'export':
        dataset = export_dataset(db, denormalize=args.denormalize)
        with open(args.output_path, 'w') as f:
            json.dump(dataset, f)
def main():
    p = Pool(PROCS)
    t1 = time.time()
    outputs = p.map(process, range(PROCS))
    t2 = time.time()

    db = get_db()
    db_objects = db.test.find()
    print(N, t2 - t1)
    print(sum([len(o) for o in outputs]), db_objects.count())
    ids = {obj['id'] for o in outputs for obj in o}
    db_ids = set()
    for obj in db_objects:
        db_ids.add(obj['id'])
    print(ids == db_ids)
    db.drop_collection('test')
def process(proc_id):
    db = get_db()
    objects = list()
    for i in range(N):
        if np.random.random() < 0.7:
            obj = {'id': str(uuid.uuid4()), 'data': np.random.random([50]).tolist()}
            objects.append(obj)
            db.test.insert_one(obj)
        elif len(objects) > 5:
            index = np.random.randint(len(objects))
            obj = objects[index]
            if np.random.random() < 0.5:
                db.test.delete_one({'id': obj['id']})
                del objects[index]
            else:
                obj['data'] = np.random.random([50]).tolist()
                db.test.replace_one({'id': obj['id']}, obj)

    return objects
Beispiel #4
0
def main():
    args = parse_args()
    db = get_db()

    action = args.action
    if action == 'drop':
        drop_bbox_collections(db)
    elif action == 'load':
        with open(args.task_path) as f:
            task_data = json.load(f)
        ensure_bbox_indices(db)
        load_tasks(db, task_data)
    elif action == 'export':
        if args.task_path != None:
            with open(args.task_path) as f:
                task_data = json.load(f)
        else:
            task_data = None
        results = export_task_results(db,
                                      task_data,
                                      denormalize=args.denormalize)
        with open(args.output_path, 'w') as f:
            json.dump(results, f)