Пример #1
0
    def build_keyframe_graph(self, demonstrations, bandwidth, vectorizor=None):
        self.G = KeyframeGraph()
        keyframe_clustering = KeyframeClustering()
        """
        Generate clusters using labeled observations, build the models, graphs, and attributes for each
        cluster in the KeyFrameGraph
        """
        clusters = keyframe_clustering.get_clusters(demonstrations)
        for cluster_id in clusters.keys():
            self.G.add_node(cluster_id)
            self.G.nodes[cluster_id]["observations"] = clusters[cluster_id][
                "observations"]
            self.G.nodes[cluster_id]["keyframe_type"] = clusters[cluster_id][
                "keyframe_type"]

            ####################################################
            # NEED TO BE ABLE TO SUPPORT CC-LFD eventually!
            # graph.nodes[cluster_id]["applied_constraints"] = [clusters[cluster_id]["applied_constraints"]]
            self.G.nodes[cluster_id]["applied_constraints"] = []
            ####################################################
            self.G.nodes[cluster_id]["autoconstraints"] = {}
            # Used to track changes in the autoconstraint assignment according to segmentation and proximity style constraint assignment.
            self.G.nodes[cluster_id]["autoconstraint_transitions"] = []
            self.G.nodes[cluster_id]["model"] = KDEModel(kernel='gaussian',
                                                         bandwidth=bandwidth)
        self.G.add_path(self.G.nodes())
        if vectorizor is not None:
            self.G.fit_models(vectorizor)
        else:
            self.G.fit_models(get_observation_joint_vector)
        self.G.identify_primal_observations(get_observation_joint_vector)
Пример #2
0
 def build_keyframe_graph(self, demonstrations, bandwidth):
     self.G = KeyframeGraph()
     keyframe_clustering = KeyframeClustering()
     """
     Generate clusters using labeled observations, build the models, graphs, and attributes for each
     cluster in the KeyFrameGraph
     """
     clusters = keyframe_clustering.get_clusters(demonstrations)
     for cluster_id in clusters.keys():
         self.G.add_node(cluster_id)
         self.G.nodes[cluster_id]["observations"] = clusters[cluster_id][
             "observations"]
         self.G.nodes[cluster_id]["keyframe_type"] = clusters[cluster_id][
             "keyframe_type"]
         self.G.nodes[cluster_id]["applied_constraints"] = []
         self.G.nodes[cluster_id]["model"] = KDEModel(kernel='gaussian',
                                                      bandwidth=bandwidth)
     self.G.add_path(self.G.nodes())
     self.G.fit_models(get_observation_joint_vector)
     self.G.identify_primal_observations(get_observation_joint_vector)
Пример #3
0
def main():
    arg_fmt = argparse.RawDescriptionHelpFormatter
    parser = argparse.ArgumentParser(formatter_class=arg_fmt,
                                     description=main.__doc__)
    required = parser.add_argument_group('required arguments')

    required.add_argument(
        '-c',
        '--config',
        dest='config',
        required=True,
        help='the file path of configuration config.json file ')

    required.add_argument(
        '-d',
        '--directory',
        dest='directory',
        required=True,
        help=
        'the directory from which to input labeled demonstration .json files')

    parser.add_argument('-b',
                        '--bandwidth',
                        type=float,
                        default=.025,
                        metavar='BANDWIDTH',
                        help='gaussian kernel density bandwidth')

    parser.add_argument('-t',
                        '--threshold',
                        type=int,
                        default=-1200,
                        metavar='THRESHOLD',
                        help='log-liklihood threshold value')

    parser.add_argument(
        '-n',
        '--number_of_samples',
        type=int,
        default=50,
        metavar='NUMBEROFSAMPLES',
        help='the number of samples to validate for each keyframe')

    args = parser.parse_args(rospy.myargv()[1:])

    # Import the data
    importer = DataImporter()
    labeled_demonstrations = importer.load_json_files(args.directory +
                                                      "/*.json")

    # Convert imported data into Demonstrations and Observations
    demonstrations = []
    for datum in labeled_demonstrations["data"]:
        observations = []
        for entry in datum:
            observations.append(Observation(entry))
        demonstrations.append(Demonstration(observations))

    if len(demonstrations) == 0:
        rospy.logwarn("No demonstration data to model!!")
        return 0

    rospy.init_node("graph_traverse")
    """ Create the Cairo LfD environment """
    config_filepath = args.config
    configs = import_configuration(config_filepath)
    items = ItemFactory(configs).generate_items()
    constraints = ConstraintFactory(configs).generate_constraints()
    # We only have just the one robot...for now.......
    environment = Environment(items=items['items'],
                              robot=items['robots'][0],
                              constraints=constraints,
                              triggers=None)
    """ Create the moveit_interface """
    moveit_interface = SawyerMoveitInterface()
    moveit_interface.set_velocity_scaling(.35)
    moveit_interface.set_acceleration_scaling(.25)
    """ Create KeyframeGraph object. """
    graph = KeyframeGraph()
    cluster_generator = ObservationClusterer()
    """
    Generate clusters using labeled observations, build the models, graphs, and atributes for each
    cluster in the KeyFrameGraph
    """
    clusters = cluster_generator.generate_clusters(demonstrations)
    for cluster_id in clusters.keys():
        graph.add_node(cluster_id)
        graph.nodes[cluster_id]["observations"] = clusters[cluster_id][
            "observations"]
        graph.nodes[cluster_id]["keyframe_type"] = clusters[cluster_id][
            "keyframe_type"]
        graph.nodes[cluster_id]["applied_constraints"] = clusters[cluster_id][
            "applied_constraints"]
        graph.nodes[cluster_id]["meta_constraints"] = {}
        graph.nodes[cluster_id]["model"] = KDEModel(kernel='gaussian',
                                                    bandwidth=args.bandwidth)
    graph.add_path(graph.nodes())
    graph.fit_models(get_observation_joint_vector)
    graph._identify_primal_observations(get_observation_joint_vector)
    rospy.loginfo(graph.get_keyframe_sequence())
    for node in graph.get_keyframe_sequence():
        print("KEYFRAME: {}".format(node))
        print(graph.nodes[node]["keyframe_type"])
        print(graph.nodes[node]["applied_constraints"])
        print

    # Create height segmentation and heuristic model
    position_vectorizor = partial(vectorize_demonstration,
                                  vectorizors=[vectorize_robot_position])
    position_vectors = np.array(map(position_vectorizor, demonstrations))
    # stack all observation vectors
    X = np.vstack(position_vectors)
    height_segment_model = BayesianGMMSegmentModel(X, n_components=10)
    height_heuristic = HeightHeuristicModel(height_segment_model)
    height_heuristic.fit()
    height_static_parameters = {
        "item_id": 1,
        "reference_height": 0,
        "direction": "positive"
    }
    height_metaconstraint_builder = HeightMetaconstraintBuilder(
        height_heuristic, height_static_parameters)
    metaconstraint_assigner = MetaconstraintAssigner(
        environment, graph, [height_metaconstraint_builder])
    metaconstraint_assigner.assign_metaconstraints()
    """ Build a ConstraintAnalyzer and KeyframeGraphAnalyzer """
    constraint_analyzer = ConstraintAnalyzer(environment)
    graph_analyzer = KeyframeGraphAnalyzer(graph, moveit_interface,
                                           get_observation_joint_vector)

    sample_to_obsv_converter = SawyerSampleConverter(moveit_interface)
    sampler = KeyframeSampler(constraint_analyzer, sample_to_obsv_converter)
    """ Generate raw_samples from graph for each keyframe """
    for node in graph.get_keyframe_sequence():

        n_samples = args.number_of_samples
        constraints = [
            meta.constraints[4]
            for meta in graph.nodes[node]["metaconstraints"]
        ]
        for constraint in constraints:
            print constraint
        attempts, samples, matched_ids = sampler.generate_n_valid_samples(
            graph.nodes[node]["model"],
            graph.nodes[node]["primal_observation"],
            constraints,
            n=n_samples)

        rospy.loginfo("Keyframe %d: %s valid of %s attempts", node,
                      len(samples), attempts)
        if len(samples) < n_samples:
            rospy.loginfo("Keyframe %d: only %s of %s waypoints provided",
                          node, len(samples), n_samples)
        if len(samples) == 0:
            # TODO: DOWN SAMPLE METACONSTRAINTS AND KEEP TESTING
            rospy.loginfo("Keyframe %d has no valid sample observations", node)
            rospy.loginfo("Sampling with no meta constraints")
            attempts, samples, matched_ids = sampler.generate_n_valid_samples(
                graph.nodes[node]["model"],
                graph.nodes[node]["primal_observation"], [],
                n=n_samples)

        # Order sampled points based on their intra-model log-likelihood
        ranked_samples = sampler.rank_samples(graph.nodes[node]["model"],
                                              samples)

        # User converter object to convert raw sample vectors into LfD observations
        graph.nodes[node]["samples"] = [
            sample_to_obsv_converter.convert(sample, run_fk=True)
            for sample in ranked_samples
        ]
    """ Clear occluded points (points in collision etc,.) """
    for node in graph.get_keyframe_sequence():
        samples = graph.nodes[node]["samples"]
        free_samples, trash = graph_analyzer.evaluate_keyframe_occlusion(
            samples)
        if free_samples == []:
            rospy.loginfo(
                "Keyframe {} has no free samples and will be culled.".format(
                    node))
            graph.cull_node(node)
        else:
            graph.nodes[node]["free_samples"] = free_samples
    """ Cull/remove keyframes/nodes that via change point estimation using log-likelihood """
    graph_analyzer.cull_keyframes(threshold=args.threshold)

    # """ Order sampled points based on their intra-model log-likelihood """
    # for node in graph.get_keyframe_sequence():
    #     graph.rank_waypoint_samples(node)

    output = []
    """ Create a sequence of keyframe way points and execute motion plans to reconstruct skill """
    joint_config_array = []
    for node in graph.get_keyframe_sequence():
        output.append((node, graph.nodes[node]["applied_constraints"]))
        sample = graph.nodes[node]["free_samples"][0]
        joints = sample.get_joint_angle()
        joint_config_array.append(joints)

    print output

    # moveit_interface.move_to_joint_targets(joint_config_array)

    return 0
def main():
    arg_fmt = argparse.RawDescriptionHelpFormatter
    parser = argparse.ArgumentParser(formatter_class=arg_fmt,
                                     description=main.__doc__)
    required = parser.add_argument_group('required arguments')

    required.add_argument(
        '-c', '--config', dest='config', required=True,
        help='the file path of configuration config.json file '
    )

    required.add_argument(
        '-d', '--directory', dest='directory', required=True,
        help='the directory from which to input labeled demonstration .json files'
    )

    parser.add_argument(
        '-b', '--bandwidth', type=float, default=.025, metavar='BANDWIDTH',
        help='gaussian kernel density bandwidth'
    )

    parser.add_argument(
        '-t', '--threshold', type=int, default=, metavar='THRESHOLD',
        help='Kullbach-Leibler divergence threshold value - optional'
    )

    parser.add_argument(
        '-n', '--number_of_samples', type=int, default=50, metavar='NUMBEROFSAMPLES',
        help='the number of samples to validate for each keyframe'
    )

    args = parser.parse_args(rospy.myargv()[1:])

    # Import the data
    importer = DataImporter()
    labeled_demonstrations = importer.load_json_files(args.directory + "/*.json")

    # Convert imported data into Demonstrations and Observations
    demonstrations = []
    for datum in labeled_demonstrations["data"]:
        observations = []
        for entry in datum:
            observations.append(Observation(entry))
        demonstrations.append(Demonstration(observations))

    if len(demonstrations) == 0:
        rospy.logwarn("No demonstration data to model!!")
        return 0

    rospy.init_node("graph_traverse")

    """ Create the Cairo LfD environment """
    config_filepath = args.config
    configs = import_configuration(config_filepath)
    items = ItemFactory(configs).generate_items()
    constraints = ConstraintFactory(configs).generate_constraints()
    # We only have just the one robot...for now.......
    environment = Environment(items=items['items'], robot=items['robots'][0], constraints=constraints, triggers=None)

    """ Create the moveit_interface """
    moveit_interface = SawyerMoveitInterface()
    moveit_interface.set_velocity_scaling(.35)
    moveit_interface.set_acceleration_scaling(.25)

    """ Create KeyframeGraph object. """
    graph = KeyframeGraph()
    cluster_generator = ObservationClusterer()

    """
    Generate clusters using labeled observations, build the models, graphs, and atributes for each
    cluster in the KeyFrameGraph
    """
    clusters = cluster_generator.generate_clusters(demonstrations)
    for cluster_id in clusters.keys():
        graph.add_node(cluster_id)
        graph.nodes[cluster_id]["observations"] = clusters[cluster_id]["observations"]
        graph.nodes[cluster_id]["keyframe_type"] = clusters[cluster_id]["keyframe_type"]
        graph.nodes[cluster_id]["applied_constraints"] = clusters[cluster_id]["applied_constraints"]
        graph.nodes[cluster_id]["model"] = KDEModel(kernel='gaussian', bandwidth=args.bandwidth)
    graph.add_path(graph.nodes())
    graph.fit_models(get_observation_joint_vector)
    graph._identify_primal_observations(get_observation_joint_vector)
    rospy.loginfo(graph.get_keyframe_sequence())
    for node in graph.get_keyframe_sequence():
        print("KEYFRAME: {}".format(node))
        print(graph.nodes[node]["keyframe_type"])
        print(graph.nodes[node]["applied_constraints"])
        print 

    """ Build a ConstraintAnalyzer and KeyframeGraphAnalyzer """
    constraint_analyzer = ConstraintAnalyzer(environment)
    graph_analyzer = KeyframeGraphAnalyzer(graph, moveit_interface, get_observation_joint_vector)

    sample_to_obsv_converter = SawyerSampleConverter(moveit_interface)
    sampler = KeyframeSampler(constraint_analyzer, sample_to_obsv_converter)
    model_score_ranker = ModelScoreSampleRanker()
    configraution_ranker = ConfigurationSpaceSampleRanker()

    """ Generate raw_samples from graph for each keyframe """
    prior_sample = None
    for node in graph.get_keyframe_sequence():
        print "Keyframe {}".format(node)
        # Keep sampling 
        if graph.nodes[node]["keyframe_type"] == "constraint_transition":
            rospy.loginfo("Sampling from a constraint transition keyframe.")
            constraints = [environment.get_constraint_by_id(constraint_id) for constraint_id in graph.nodes[node]["applied_constraints"]]
            attempts, samples, matched_ids = sampler.generate_n_valid_samples(graph.nodes[node]["model"], graph.nodes[node]["primal_observation"], constraints, n=n_samples)
            if len(samples) == 0:
                # Some constraints couldn't be sampled successfully, so using best available samples.
                diff = list(set(graph.nodes[node]["applied_constraints"]).difference(set(matched_ids)))
                if len(matched_ids) > 0:
                    rospy.logwarn("Constraints {} couldn't be met so attempting to find valid samples with constraints {}.".format(diff, matched_ids))
                    constraints = [environment.get_constraint_by_id(constraint_id) for constraint_id in graph.nodes[node]["applied_constraints"]]
                    attempts, samples, matched_ids = sampler.generate_n_valid_samples(graph.nodes[node]["model"], graph.nodes[node]["primal_observation"], constraints, n=n_samples)
                else:
                    rospy.logwarn("Constraints {} couldn't be met so. Cannot meet any constraints.".format(diff))
        else:
            n_samples = args.number_of_samples
            constraints = [environment.get_constraint_by_id(constraint_id) for constraint_id in graph.nodes[node]["applied_constraints"]]
            attempts, samples, matched_ids = sampler.generate_n_valid_samples(graph.nodes[node]["model"], graph.nodes[node]["primal_observation"], constraints, n=n_samples)

        rospy.loginfo("Keyframe %d: %s valid of %s attempts", node, len(samples), attempts)
        if len(samples) < n_samples:
            rospy.loginfo("Keyframe %d: only %s of %s waypoints provided", node, len(samples), n_samples)
        if len(samples) == 0:
            rospy.loginfo("Keyframe %d has no valid sample observations", node)
            graph.cull_node(node)
        else:
            # Order sampled points based on their intra-model log-likelihood
            if prior_sample is None:
                ranked_samples = model_score_ranker.rank(graph.nodes[node]["model"], samples)
            else:
                ranked_samples = configraution_ranker.rank(graph.nodes[node]["model"], samples, prior_sample)
                prior_sample = ranked_samples[0]

            # User converter object to convert raw sample vectors into LfD observations
            graph.nodes[node]["samples"] = [sample_to_obsv_converter.convert(sample, run_fk=True) for sample in ranked_samples]

    """ Clear occluded points (points in collision etc,.) """
    for node in graph.get_keyframe_sequence():
        samples = graph.nodes[node]["samples"]
        free_samples, trash = graph_analyzer.evaluate_keyframe_occlusion(samples)
        if free_samples == []:
            rospy.loginfo("Keyframe {} has no free samples and will be culled.".format(node))
            graph.cull_node(node)
        else:
            graph.nodes[node]["free_samples"] = free_samples

    """ Cull/remove keyframes/nodes that via change point estimation using log-likelihood """
    if "automated_culling_threshold" in config['settings']:
        automated_threshold = config['settings']['automated_culling_threshold']
    else:
        automated_culling = True
    graph_analyzer.cull_keyframes(threshold=args.threshold, automated=automated_threshold)

    # """ Order sampled points based on their intra-model log-likelihood """
    # for node in graph.get_keyframe_sequence():
    #     graph.rank_waypoint_samples(node)

    output = []
    """ Create a sequence of keyframe way points and execute motion plans to reconstruct skill """
    joint_config_array = []
    for node in graph.get_keyframe_sequence():
        output.append((node, graph.nodes[node]["applied_constraints"]))
        sample = graph.nodes[node]["free_samples"][0]
        joints = sample.get_joint_angle()
        joint_config_array.append(joints)

    print output

    moveit_interface.move_to_joint_targets(joint_config_array)

    return 0
Пример #5
0
class LFD():
    def __init__(self, configs, robot_interface):
        self.configs = configs
        self.robot_interface = robot_interface

    def build_environment(self):
        robots = RobotFactory(self.configs['robots']).generate_robots()
        items = ItemFactory(self.configs['items']).generate_items()
        triggers = TriggerFactory(self.configs['triggers']).generate_triggers()
        constraints = ConstraintFactory(
            self.configs['constraints']).generate_constraints()
        # We only have just the one robot...for now.......
        self.environment = Environment(items=items,
                                       robot=robots[0],
                                       constraints=constraints,
                                       triggers=triggers)

    def build_keyframe_graph(self, demonstrations, bandwidth):
        self.G = KeyframeGraph()
        keyframe_clustering = KeyframeClustering()
        """
        Generate clusters using labeled observations, build the models, graphs, and attributes for each
        cluster in the KeyFrameGraph
        """
        clusters = keyframe_clustering.get_clusters(demonstrations)
        for cluster_id in clusters.keys():
            self.G.add_node(cluster_id)
            self.G.nodes[cluster_id]["observations"] = clusters[cluster_id][
                "observations"]
            self.G.nodes[cluster_id]["keyframe_type"] = clusters[cluster_id][
                "keyframe_type"]
            self.G.nodes[cluster_id]["applied_constraints"] = []
            self.G.nodes[cluster_id]["model"] = KDEModel(kernel='gaussian',
                                                         bandwidth=bandwidth)
        self.G.add_path(self.G.nodes())
        self.G.fit_models(get_observation_joint_vector)
        self.G.identify_primal_observations(get_observation_joint_vector)

    def sample_keyframes(self,
                         number_of_samples,
                         automate_threshold=False,
                         culling_threshold=-1000):
        sample_to_obsv_converter = SawyerSampleConversion(self.robot_interface)

        keyframe_sampler = KeyframeModelSampler(sample_to_obsv_converter,
                                                self.robot_interface)

        prior_sample = None
        for node in self.G.get_keyframe_sequence():
            rospy.loginfo("")
            rospy.loginfo("KEYFRAME: {}".format(node))
            attempts, samples, matched_ids, constraints = self._generate_samples(
                node, keyframe_sampler, number_of_samples)
            rospy.loginfo("Initial sampling: %s valid of %s attempts",
                          len(samples), attempts)
            if len(samples) < number_of_samples:
                rospy.loginfo("Keyframe %d: only %s of %s waypoints provided",
                              node, len(samples), number_of_samples)
            if len(samples) == 0:
                self.G.cull_node(node)
                continue
            self.G.nodes[node]["samples"] = [
                sample_to_obsv_converter.convert(sample, run_fk=True)
                for sample in samples
            ]
            attempts, samples, matched_ids = self._refit_node_model(
                node, keyframe_sampler, constraints, number_of_samples)
            rospy.loginfo("Refitted keyframe: %s valid of %s attempts",
                          len(samples), attempts)
            if len(samples) < number_of_samples:
                rospy.loginfo("Keyframe %d: only %s of %s waypoints provided",
                              node, len(samples), number_of_samples)
            if len(samples) == 0:
                self.G.cull_node(node)
                continue
            ranked_samples = self._rank_node_valid_samples(
                node, samples, prior_sample)
            self.G.nodes[node]["samples"] = [
                sample_to_obsv_converter.convert(sample, run_fk=True)
                for sample in ranked_samples
            ]
            prior_sample = ranked_samples[0]

        # Cull candidate keyframes.
        for node in get_culling_candidates(self.G, automate_threshold,
                                           culling_threshold):
            self.G.cull_node(node)

    def _generate_samples(self, node, sampler, number_of_samples):

        if self.G.nodes[node]["keyframe_type"] == "constraint_transition":
            rospy.loginfo("Sampling from a constraint transition keyframe.")
            constraints = [
                self.environment.get_constraint_by_id(constraint_id)
                for constraint_id in self.G.nodes[node]["applied_constraints"]
            ]
            attempts, samples, matched_ids = sampler.sample(
                self.environment,
                self.G.nodes[node]["model"],
                self.G.nodes[node]["primal_observation"],
                constraints,
                n=number_of_samples)
            if len(samples) == 0:
                # Some constraints couldn't be sampled successfully, so using best available samples.
                diff = list(
                    set(self.G.nodes[node]["applied_constraints"]).difference(
                        set(matched_ids)))
                if len(matched_ids) > 0:
                    rospy.logwarn(
                        "Constraints {} couldn't be met so attempting to find valid samples with constraints {}."
                        .format(diff, matched_ids))
                    constraints = [
                        self.environment.get_constraint_by_id(constraint_id)
                        for constraint_id in self.G.nodes[node]
                        ["applied_constraints"]
                    ]
                    attempts, samples, matched_ids = sampler.sample(
                        self.environment,
                        self.G.nodes[node]["model"],
                        self.G.nodes[node]["primal_observation"],
                        constraints,
                        n=number_of_samples)
                else:
                    rospy.logwarn(
                        "Constraints {} couldn't be met so. Cannot meet any constraints."
                        .format(diff))
        else:
            constraints = [
                self.environment.get_constraint_by_id(constraint_id)
                for constraint_id in self.G.nodes[node]["applied_constraints"]
            ]
            attempts, samples, matched_ids = sampler.sample(
                self.environment,
                self.G.nodes[node]["model"],
                self.G.nodes[node]["primal_observation"],
                constraints,
                n=number_of_samples)
        return attempts, samples, matched_ids, constraints

    def _refit_node_model(self, node, sampler, constraints, number_of_samples):
        # refit models
        self.G.fit_models_on_valid_samples(node, get_observation_joint_vector)
        attempted_count, samples, matched_ids = sampler.sample(
            self.environment,
            self.G.nodes[node]["model"],
            self.G.nodes[node]["primal_observation"],
            constraints,
            n=number_of_samples)
        return attempted_count, samples, matched_ids

    def _rank_node_valid_samples(self, node, samples, prior_sample=None):
        model_score_ranker = ModelScoreRanking()
        configuration_ranker = ConfigurationSpaceRanking()
        # Order sampled points based on their intra-model log-likelihood if no prior sample if the first keyframe
        if prior_sample is None:
            ranked_samples = model_score_ranker.rank(
                self.G.nodes[node]["model"], samples)
        # else use the closest
        else:
            ranked_samples = configuration_ranker.rank(
                self.G.nodes[node]["model"], samples, prior_sample)
        return ranked_samples

    def perform_skill(self):
        """ Create a sequence of keyframe way points and execute motion plans to reconstruct skill """

        # Create publisher for node information
        # time_pub = rospy.Publisher('/lfd/node_time', NodeTime, queue_size=10)
        constraint_pub = rospy.Publisher('/lfd/applied_constraints',
                                         AppliedConstraints,
                                         queue_size=10)
        rospy.sleep(5)
        for i in range(len(self.G.get_keyframe_sequence()) - 1):
            cur_node = self.G.get_keyframe_sequence()[i]
            constraints = self.G.nodes[cur_node]["applied_constraints"]
            rospy.loginfo("Keyframe: {}; Constraints: {}".format(
                cur_node, constraints))
            rospy.loginfo("")

        for i in range(len(self.G.get_keyframe_sequence()) - 1):

            # Grab nodes, samples, and joints
            cur_node = self.G.get_keyframe_sequence()[i]
            next_node = self.G.get_keyframe_sequence()[i + 1]
            cur_sample = self.G.nodes[cur_node]["samples"][0]
            next_sample = self.G.nodes[next_node]["samples"][0]
            cur_joints = cur_sample.get_joint_angle()
            next_joints = next_sample.get_joint_angle()

            # Build and publish node data
            # time_msg = NodeTime()
            # time_msg.cur_node = int(cur_node)
            # time_msg.next_node = int(next_node)
            # time_msg.timestamp = rospy.Time.now()
            # time_pub.publish(time_msg)

            constraints = self.G.nodes[cur_node]["applied_constraints"]
            constraints_msg = AppliedConstraints()
            constraints_msg.constraints = constraints
            constraint_pub.publish(constraints_msg)
            rospy.loginfo(
                "LFD: Moving to a new point from keyframe {}".format(cur_node))
            # Execute movement using MoveIt!
            self.robot_interface.move_to_joint_targets(
                [cur_joints, next_joints])

    def generate_representation(self):
        keyframe_data = {}
        for node in self.G.get_keyframe_sequence():
            data = {}
            data["applied_constraints"] = self.G.nodes[node][
                "applied_constraints"]
            data["observation"] = self.G.nodes[node]["samples"][0].data
            keyframe_data[node] = data
        return keyframe_data
Пример #6
0
class ACC_LFD():
    def __init__(self, configs, modeling_settings, robot_interface):
        self.configs = configs
        self.settings = modeling_settings
        self.robot_interface = robot_interface

    def build_environment(self):
        items = ItemFactory(self.configs["robots"],
                            self.configs['items']).generate_items()
        constraints = ConstraintFactory(
            self.configs["constraints"]).generate_constraints()
        # We only have just the one robot...for now.......
        self.environment = Environment(items=items['items'],
                                       robot=items['robots'][0],
                                       constraints=constraints,
                                       triggers=None)

    def build_keyframe_graph(self, demonstrations, bandwidth, vectorizor=None):
        self.G = KeyframeGraph()
        keyframe_clustering = KeyframeClustering()
        """
        Generate clusters using labeled observations, build the models, graphs, and attributes for each
        cluster in the KeyFrameGraph
        """
        clusters = keyframe_clustering.get_clusters(demonstrations)
        for cluster_id in clusters.keys():
            self.G.add_node(cluster_id)
            self.G.nodes[cluster_id]["observations"] = clusters[cluster_id][
                "observations"]
            self.G.nodes[cluster_id]["keyframe_type"] = clusters[cluster_id][
                "keyframe_type"]

            ####################################################
            # NEED TO BE ABLE TO SUPPORT CC-LFD eventually!
            # graph.nodes[cluster_id]["applied_constraints"] = [clusters[cluster_id]["applied_constraints"]]
            self.G.nodes[cluster_id]["applied_constraints"] = []
            ####################################################
            self.G.nodes[cluster_id]["autoconstraints"] = {}
            # Used to track changes in the autoconstraint assignment according to segmentation and proximity style constraint assignment.
            self.G.nodes[cluster_id]["autoconstraint_transitions"] = []
            self.G.nodes[cluster_id]["model"] = KDEModel(kernel='gaussian',
                                                         bandwidth=bandwidth)
        self.G.add_path(self.G.nodes())
        if vectorizor is not None:
            self.G.fit_models(vectorizor)
        else:
            self.G.fit_models(get_observation_joint_vector)
        self.G.identify_primal_observations(get_observation_joint_vector)

    def generate_autoconstraints(self, demonstrations):
        rospy.loginfo("Building autoconstraints.")
        # Encapsulates all segmentation, heuristic modeling:
        autoconstraint_builders = AutoconstraintFactory(
            self.configs).generate_autoconstraint_builders(demonstrations)
        # Assigns built autosconstraints to keyframes nodes.
        assign_autoconstraints(self.G, autoconstraint_builders)
        self._apply_autoconstraint_transitions()

    def _apply_autoconstraint_transitions(self):
        prev_set = set()
        for node in self.G.get_keyframe_sequence():
            curr_set = set(self.G.nodes[node]["autoconstraint_transitions"])
            diff = prev_set.symmetric_difference(curr_set)
            if len(diff) > 0:
                self.G.nodes[node]["keyframe_type"] = "constraint_transition"
            prev_set = curr_set

    def sample_keyframes(self,
                         number_of_samples,
                         automate_threshold=False,
                         culling_threshold=-1000):
        number_of_samples = self.settings.get("number_of_samples",
                                              number_of_samples)
        sample_to_obsv_converter = SawyerSampleConversion(self.robot_interface)

        keyframe_sampler = KeyframeModelSampler(sample_to_obsv_converter,
                                                self.robot_interface)

        prior_sample = None
        for node in self.G.get_keyframe_sequence():
            rospy.loginfo("")
            rospy.loginfo("KEYFRAME: {}".format(node))
            attempts, samples, validated_set, autoconstraint_attempts, constraints = self._generate_samples(
                node, keyframe_sampler, number_of_samples)
            if autoconstraint_attempts == 10:
                rospy.logwarn(
                    "Not able to sample enough points for autoconstrained keyframe {}."
                    .format(node))
                self.G.cull_node(node)
                continue
            rospy.loginfo("Validated autoconstraints: {}".format(
                list(validated_set)))
            rospy.loginfo("Keyframe %d: %s valid of %s attempts", node,
                          len(samples), attempts)
            if len(samples) < number_of_samples:
                rospy.loginfo("Keyframe %d: only %s of %s waypoints provided",
                              node, len(samples), number_of_samples)
            if len(samples) == 0:
                self.G.cull_node(node)
                continue
            self.G.nodes[node]["samples"] = [
                sample_to_obsv_converter.convert(sample, run_fk=True)
                for sample in samples
            ]
            attempts, samples, matched_ids = self._refit_node_model(
                node, keyframe_sampler, constraints, number_of_samples)
            rospy.loginfo("Refitted keyframe %d: %s valid of %s attempts",
                          node, len(samples), attempts)
            if len(samples) < number_of_samples:
                rospy.loginfo("Keyframe %d: only %s of %s waypoints provided",
                              node, len(samples), number_of_samples)
            if len(samples) == 0:
                self.G.cull_node(node)
                continue
            ranked_samples = self._rank_node_valid_samples(
                node, samples, prior_sample)
            self.G.nodes[node]["samples"] = [
                sample_to_obsv_converter.convert(sample, run_fk=True)
                for sample in ranked_samples
            ]
            prior_sample = ranked_samples[0]

        # Cull candidate keyframes.
        for node in get_culling_candidates(self.G, automate_threshold,
                                           culling_threshold):
            self.G.cull_node(node)

    def _generate_samples(self,
                          node,
                          keyframe_sampler,
                          number_of_samples,
                          min_samples=5,
                          constraint_attempts=10):
        validated_set = set()
        attempted_count = 0
        autoconstraint_sampler = AutoconstraintSampler(
            self.G.nodes[node]["autoconstraints"])
        valid_samples = []
        constraints = []
        autoconstraint_attempts = 0

        while autoconstraint_sampler.validate(validated_set) is False or len(
                valid_samples) < min_samples:
            if autoconstraint_attempts == constraint_attempts:
                break
            autoconstraint_attempts += 1
            constraints = autoconstraint_sampler.sample(validated_set)
            attempted_count, samples, validated_set = keyframe_sampler.sample(
                self.environment,
                self.G.nodes[node]["model"],
                self.G.nodes[node]["primal_observation"],
                constraints,
                n=number_of_samples)
            valid_samples.extend(samples)
        return attempted_count, valid_samples, validated_set, autoconstraint_attempts, constraints

    def _refit_node_model(self, node, sampler, constraints, number_of_samples):
        # refit models
        self.G.fit_models_on_valid_samples(node, get_observation_joint_vector)
        attempted_count, samples, matched_ids = sampler.sample(
            self.environment,
            self.G.nodes[node]["model"],
            self.G.nodes[node]["primal_observation"],
            constraints,
            n=number_of_samples)
        return attempted_count, samples, matched_ids

    def _rank_node_valid_samples(self, node, samples, prior_sample=None):
        model_score_ranker = ModelScoreRanking()
        configuration_ranker = ConfigurationSpaceRanking()
        # Order sampled points based on their intra-model log-likelihood if no prior sample if the first keyframe
        if prior_sample is None:
            ranked_samples = model_score_ranker.rank(
                self.G.nodes[node]["model"], samples)
        # else use the closest
        else:
            ranked_samples = configuration_ranker.rank(
                self.G.nodes[node]["model"], samples, prior_sample)
        return ranked_samples

    def perform_skill(self):
        """ Create a sequence of keyframe way points and execute motion plans to reconstruct skill """
        joint_config_array = []
        for node in self.G.get_keyframe_sequence():
            sample = self.G.nodes[node]["samples"][0]
            joints = sample.get_joint_angle()
            joint_config_array.append(joints)

        self.robot_interface.move_to_joint_targets(joint_config_array)