def upload_bag(obj, bag_file, couchdb_url=dbtools.DEFAULT_SERVER_URL): ''' Uploads a bag to the couch of your choice, associates it with the object description given @param obj: An instance of object_rocognition.models.Object @param bag_file: A file like object that looks like a ros bag. ''' couch = couchdb.Server(couchdb_url) dbs = dbtools.init_object_databases(couch) objects = dbs['objects'] bags = dbs['bags'] obj.store(objects) bag = models.Bag(object_id=obj.id, author_name=obj.author_name, author_email=obj.author_email, ) bag.store(bags) bags.put_attachment(doc=bag, content=bag_file, filename='data.bag', content_type='application/octet-stream') return bag
session.bag_id = bag.id if args.commit: session.store(sessions) print "running graph" plasm = connect_observation_calc(sync, args.commit, str(session.object_id), str(session.id), args.visualize) sched = ecto.schedulers.Threadpool(plasm) sched.execute() return session finally: print "Removing tmp_file:", tmp_file.name os.remove(tmp_file.name) if "__main__" == __name__: args = parse_args() couch = couchdb.Server(args.db_root) dbs = dbtools.init_object_databases(couch) bags = dbs['bags'] if args.compute_all: models.sync_models(dbs) results = models.Bag.all(bags) for bag in results: existing_sessions = models.Session.by_bag_id(dbs['sessions'], key=bag.id) if(len(existing_sessions) == 0): obj = models.Object.load(dbs['objects'], bag.object_id) print "Computing session for:", obj.object_name, "\ndescription:", obj.description compute_for_bag(bag, bags, args) else: print "Skipping bag:", bag.id, "Already computed %d sessions" % len(existing_sessions) else: bag = models.Bag.load(bags, args.bag_id) if bag == None or bag.id == None:
type=str, help='Remote db to copy from.') parser.add_argument('--remote_collection', dest='remote_collection', type=str, help='Remote collection.') object_recognition.dbtools.add_db_options(parser) args = parser.parse_args() return args if __name__ == "__main__": args = parse_args() couch = couchdb.Server(args.db_root) remote_couch = couchdb.Server(args.remote) local_db = dbtools.init_object_databases(couch) remote_db = remote_couch[args.remote_collection] results = remote_db.view('_all_docs') total_docs = len(results) i = 1 for x in results: doc = remote_db.get(x.id) doc = doc.copy() attachments = doc.get('_attachments', False) if attachments: del doc['_attachments'] if x.id not in local_db: (doc_id, rev) = local_db.save(doc) if attachments: for key, val in attachments.iteritems():
def parse_args(): parser = argparse.ArgumentParser(description='Copies docs from one db to another.') parser.add_argument('--remote', dest='remote', type=str, help='Remote db to copy from.') parser.add_argument('--remote_collection', dest='remote_collection', type=str, help='Remote collection.') object_recognition.dbtools.add_db_options(parser) args = parser.parse_args() return args if __name__ == "__main__": args = parse_args() couch = couchdb.Server(args.db_root) remote_couch = couchdb.Server(args.remote) local_db = dbtools.init_object_databases(couch) remote_db = remote_couch[args.remote_collection] results = remote_db.view('_all_docs') total_docs = len(results) i = 1 for x in results: doc = remote_db.get(x.id) doc = doc.copy() attachments = doc.get('_attachments',False) if attachments: del doc['_attachments'] if x.id not in local_db: (doc_id,rev) = local_db.save(doc) if attachments: for key,val in attachments.iteritems():
from ecto_object_recognition import capture from object_recognition import models, dbtools import ecto_opencv import os def parse_args(): import argparse parser = argparse.ArgumentParser(description='Train MyAlgorithm on views from the database.') parser.add_argument('objects', metavar='OBJECT', type=str, nargs='+', help='Object ids to train.') dbtools.add_db_options(parser) args = parser.parse_args() return args args = parse_args() db = dbtools.init_object_databases(couchdb.Server(args.db_root)) for object_id in args.objects: #get a list of observation ids for a particular object id. obs_ids = models.find_all_observations_for_object(db, object_id) if not obs_ids: print 'No observations found for object %s.' % object_id continue plasm = ecto.Plasm() #the db_reader transforms observation id into a set of image,depth,mask,K,R,T db_reader = capture.ObservationReader("db_reader", db_params=dbtools.args_to_db_params(args)) #this iterates over all of the observation ids. observation_dealer = ecto.Dealer(tendril=db_reader.inputs.at('observation'), iterable=obs_ids)
import os import math import subprocess import couchdb import ecto from ecto_opencv import highgui import object_recognition from object_recognition import dbtools, models, capture db_url = dbtools.DEFAULT_SERVER_URL #database ritual couch = couchdb.Server(db_url) dbs = dbtools.init_object_databases(couch) sessions = dbs['sessions'] observations = dbs['observations'] results = models.Session.all(sessions) obs_ids = [] for session in results: obs_ids += models.find_all_observations_for_session( observations, session.id) if len(obs_ids) == 0: raise RuntimeError("There are no observations available.") db_reader = capture.ObservationReader('db_reader', db_params=ObjectDbParameters({ 'type':
emit(null,doc) } """, ) @classmethod def sync(cls, db): cls.all.sync(db) cls.by_object_id.sync(db) def sync_models(dbs): Bag.sync(dbs["bags"]) Object.sync(dbs["objects"]) Session.sync(dbs["sessions"]) Observation.sync(dbs["observations"]) if __name__ == "__main__": couch = couchdb.Server(DEFAULT_SERVER_URL) dbs = init_object_databases(couch) obj = Object( object_id="TestObject", object_name="A test object.", description="test objects are fun.", tags=["test", "object", "tod"], ) obj.store(dbs["objects"]) print obj print dbs