def train(cls, object_id, session_ids, observation_ids, submethod, pipeline_params, db_params, args=None): ''' Returns a training plasm, that will be executed exactly once. :param object_id: The object id to train up. :param session_ids: A list of session ids that this model should be based on. :param observation_ids: A list of observation ids that will be dealt to the incremental model builder. :param submethod: A dictionary of discriminative parameters that will be used to initialize the training pipeline. :param pipeline_params: A dictionary of non-discriminative parameters that will be used to initialize the training pipeline. :param db_params: A DB parameters object that specifies where to save the model to. :param args: General command line args, for things like visualize or what have you. :returns: A plasm, only execute once please. ''' from ecto_object_recognition.object_recognition_db import ModelWriter #todo make this depend on the pipeline specification or something... dealer = ObservationDealer(db_params=db_params, observation_ids=observation_ids) pipeline = cls() incremental_model_builder = pipeline.incremental_model_builder( submethod, pipeline_params, args) model_builder = ModelBuilder( source=dealer, incremental_model_builder=incremental_model_builder, niter=0, ) #execute until a quit condition occurs. post_process = pipeline.post_processor(submethod, pipeline_params, args) plasm = ecto.Plasm() # Connect the model builder to the source for key in set(model_builder.outputs.keys()).intersection( post_process.inputs.keys()): plasm.connect(model_builder[key] >> post_process[key]) writer = ModelWriter( db_params=db_params, object_id=object_id, session_ids=list_to_cpp_json_str(session_ids), method=cls.type_name(), json_submethod=dict_to_cpp_json_str(submethod), json_parameters=dict_to_cpp_json_str(pipeline_params), ) plasm.connect(post_process["db_document"] >> writer["db_document"]) return plasm
def configure(self, p, _i, _o): feature_params = self._parameters.get("feature", False) if not feature_params: raise RuntimeError("You must supply feature_descriptor parameters for TOD.") # merge it with the subtype feature_descriptor_params = { 'feature': feature_params, 'descriptor': self._parameters.get('descriptor', {}) } from object_recognition.tod import merge_dict feature_descriptor_params = merge_dict(feature_descriptor_params, self._submethod) self.feature_descriptor = FeatureDescriptor(json_params=json_helper.dict_to_cpp_json_str(feature_descriptor_params)) self.descriptor_matcher = tod_detection.DescriptorMatcher("Matcher", search_json_params=json_helper.dict_to_cpp_json_str(self._parameters['search']), model_documents=self._model_documents) self.message_cvt = ecto_ros.Mat2Image() guess_params = self._parameters['guess'].copy() guess_params['visualize'] = self._visualize self.guess_generator = tod_detection.GuessGenerator("Guess Gen", **guess_params)
def train(cls, object_id, session_ids, observation_ids, submethod, pipeline_params, db_params, args=None): ''' Returns a training plasm, that will be executed exactly once. :param object_id: The object id to train up. :param session_ids: A list of session ids that this model should be based on. :param observation_ids: A list of observation ids that will be dealt to the incremental model builder. :param submethod: A dictionary of discriminative parameters that will be used to initialize the training pipeline. :param pipeline_params: A dictionary of non-discriminative parameters that will be used to initialize the training pipeline. :param db_params: A DB parameters object that specifies where to save the model to. :param args: General command line args, for things like visualize or what have you. :returns: A plasm, only execute once please. ''' from ecto_object_recognition.object_recognition_db import ModelWriter #todo make this depend on the pipeline specification or something... dealer = ObservationDealer(db_params=db_params, observation_ids=observation_ids) pipeline = cls() incremental_model_builder = pipeline.incremental_model_builder(submethod, pipeline_params, args) model_builder = ModelBuilder(source=dealer, incremental_model_builder=incremental_model_builder, niter=0, ) #execute until a quit condition occurs. post_process = pipeline.post_processor(submethod, pipeline_params, args) plasm = ecto.Plasm() # Connect the model builder to the source for key in set(model_builder.outputs.keys()).intersection(post_process.inputs.keys()): plasm.connect(model_builder[key] >> post_process[key]) writer = ModelWriter(db_params=db_params, object_id=object_id, session_ids=list_to_cpp_json_str(session_ids), method=cls.type_name(), json_submethod=dict_to_cpp_json_str(submethod), json_parameters=dict_to_cpp_json_str(pipeline_params), ) plasm.connect(post_process["db_document"] >> writer["db_document"]) return plasm
def db_models(self, object_ids, submethod, db_params): return DbModels(db_params, object_ids, self.type_name(), json_helper.dict_to_cpp_json_str(submethod))
def db_models(self, object_ids, submethod, db_params): return DbModels(db_params, object_ids, self.type_name(), json_helper.dict_to_cpp_json_str(submethod))