def processor(cls, *args, **kwargs): object_db = kwargs['object_db'] object_id = kwargs.get('object_id', None) subtype = kwargs['subtype'] # db_models = Models(db_params, [ object_id ], method, subtype) if object_id: # TODO these should be loaded from the database? json_K = kwargs['pipeline_params']['K'] json_D = kwargs['pipeline_params']['D'] imageWidth = kwargs['pipeline_params']['imageWidth'] imageHeight = kwargs['pipeline_params']['imageHeight'] document_ids = find_model_for_object(db_params_to_db(object_db.parameters()), object_id, model_type='mesh') print document_ids db_models = Documents(object_db, document_ids) print 'Found %d meshes:' % len(db_models) else: # TODO these should be loaded from the database? json_K = [] json_D = [] imageWidth = 640 imageHeight = 480 db_models = [] return TransparentObjectsProcessor(json_subtype=obj_to_cpp_json_str(subtype), json_K=obj_to_cpp_json_str(json_K), json_D=obj_to_cpp_json_str(json_D), imageWidth=imageWidth, imageHeight=imageHeight, db_models=db_models)
def processor(cls, *args, **kwargs): object_db = kwargs['object_db'] object_id = kwargs.get('object_id', None) submethod = kwargs['submethod'] #db_models = Models(db_params, [ object_id ], method, submethod) if object_id: #TODO these should be loaded from the database? json_K = kwargs['pipeline_params']['K'] json_D = kwargs['pipeline_params']['D'] imageWidth = kwargs['pipeline_params']['imageWidth'] imageHeight = kwargs['pipeline_params']['imageHeight'] document_ids = find_model_for_object(db_params_to_db( object_db.parameters()), object_id, model_type='mesh') print document_ids db_models = Documents(object_db, document_ids) print 'Found %d meshes:' % len(db_models) else: #TODO these should be loaded from the database? json_K = [] json_D = [] imageWidth = 640 imageHeight = 480 db_models = [] return TransparentObjectsProcessor( json_submethod=dict_to_cpp_json_str(submethod), json_K=dict_to_cpp_json_str(json_K), json_D=dict_to_cpp_json_str(json_D), imageWidth=imageWidth, imageHeight=imageHeight, db_models=db_models)
def common_interpret_object_ids(pipeline_param_full, args=None): """ Given command line arguments and the parameters of the pipeline, clean the 'object_ids' field to be a list of object ids """ pipeline_param = pipeline_param_full['parameters'] # read the object_ids object_ids = None if args: objs = [args.__dict__, pipeline_param] else: objs = [pipeline_param] for obj in objs: ids = obj.get('object_ids', None) names = obj.get('object_names', None) if ids is None and names is None: continue db_params = pipeline_param.get('db', {}) db_type = db_params.get('type', '') if db_type.lower() not in core_db_types(): continue # initialize the DB if isinstance(ids, str) and ids != 'all' and ids != 'missing': ids = eval(ids) if isinstance(names, str) and names != 'all' and names != 'missing': names = eval(names) if not ids and not names: break if object_ids is None: object_ids = set() db = dbtools.db_params_to_db(ObjectDbParameters(db_params)) if 'all' in (ids, names): object_ids = set([ str(x.id) for x in models.Object.all(db) ]) # unicode without the str() break if 'missing' in (ids, names): tmp_object_ids = set([ str(x.id) for x in models.Object.all(db) ]) tmp_object_ids_from_names = set([ str(x.object_id) for x in models.Model.all(db) ]) object_ids.update(tmp_object_ids.difference(tmp_object_ids_from_names)) if ids and ids != 'missing': object_ids.update(ids) if names and names != 'missing': for object_name in names: object_ids.update([str(x.id) for x in models.objects_by_name(db, object_name)]) # if we got some ids through the command line, just stop here if object_ids: break if isinstance(object_ids, set): pipeline_param['object_ids'] = list(object_ids) else: pipeline_param['object_ids'] = []
def processor(cls, *args, **kwargs): object_db = kwargs["object_db"] object_id = kwargs.get("object_id", None) submethod = kwargs["submethod"] # db_models = Models(db_params, [ object_id ], method, submethod) if object_id: # TODO these should be loaded from the database? json_K = kwargs["pipeline_params"]["K"] json_D = kwargs["pipeline_params"]["D"] imageWidth = kwargs["pipeline_params"]["imageWidth"] imageHeight = kwargs["pipeline_params"]["imageHeight"] document_ids = find_model_for_object(db_params_to_db(object_db.parameters()), object_id, model_type="mesh") print document_ids db_models = Documents(object_db, document_ids) print "Found %d meshes:" % len(db_models) else: # TODO these should be loaded from the database? json_K = [] json_D = [] imageWidth = 640 imageHeight = 480 db_models = [] return TransparentObjectsProcessor( json_submethod=dict_to_cpp_json_str(submethod), json_K=dict_to_cpp_json_str(json_K), json_D=dict_to_cpp_json_str(json_D), imageWidth=imageWidth, imageHeight=imageHeight, db_models=db_models, )
def find_all_sessions_for_object(db_params, object_id): db = dbtools.db_params_to_db(db_params) sessions = Session.by_object_id(db, key=object_id) sessions_by_date_added = [] for x in sessions: sess = db[x.id] sessions_by_date_added.append((sess['added'], x.id)) sessions_by_date_added = sorted(sessions_by_date_added) tmp = zip(*sessions_by_date_added) if tmp: return tmp[1] else: return []
def find_all_observations_for_session(db_params, session_id): ''' Finds all of the observations associated with a session, and returns a list of their ids. These are sorted by the frame number, so they should be in chronological ordering. ''' db = dbtools.db_params_to_db(db_params) #run the view, keyed on the session id. results = Observation.by_session_id(db, key=session_id) if len(results) == 0 : return [] #create a list of tuples, so that they can be sorted by frame number obs_tuples = [ (obs.frame_number, obs.id) for obs in results] # sort by frame_number, helps preserve chronological order obs_ids = zip(*sorted(obs_tuples, key=lambda obs: obs[0]))[1] return obs_ids
def find_all_observations_for_session(db_params, session_id): ''' Finds all of the observations associated with a session, and returns a list of their ids. These are sorted by the frame number, so they should be in chronological ordering. ''' db = dbtools.db_params_to_db(db_params) #run the view, keyed on the session id. results = Observation.by_session_id(db, key=session_id) if len(results) == 0: return [] #create a list of tuples, so that they can be sorted by frame number obs_tuples = [(obs.frame_number, obs.id) for obs in results] # sort by frame_number, helps preserve chronological order obs_ids = zip(*sorted(obs_tuples, key=lambda obs: obs[0]))[1] return obs_ids
print 'Existing core types: ' + str(core_db_types()) str_to_enum = {'CouchDB': ObjectDbTypes.COUCHDB, 'filesystem': ObjectDbTypes.FILESYSTEM, 'empty': ObjectDbTypes.EMPTY } # test default parameters for db_params_raw in [{'type': 'CouchDB', 'root': 'http://localhost:5984', 'collection': 'object_recognition'}, {'path': '/tmp', 'type': 'filesystem', 'collection': 'object_recognition'}, {'type': 'empty'}]: type_str = db_params_raw['type'] print 'starting type ' + type_str db_params = ObjectDbParameters(db_params_raw) db = ObjectDb(db_params) db_params_new = db.parameters().raw for dic1, dic2 in [(db_params_raw, db_params_new), (db_params_new, db_params_raw)]: for k, v in dic1.items(): if (k not in dic2) or (k in dic2 and dic2[k] != v): raise RuntimeError('Key "%s" in %s but not in %s' % (str(k), str(dic1), str(dic2))) if str_to_enum[type_str] != db_params.type: raise RuntimeError('The "type" argument in db_params are wrong for db of type %s' % type_str) print 'ending type ' + type_str # test that we can convert a JSON string to an ObjectDbParameters type db_params_to_db(db_params) print 'all good'