def get_package_version(packagename): rp = RosPack() try: manifest = rp.get_manifest(packagename) return tuple(map(int,manifest.version.split('.'))) except ResourceNotFound: return tuple()
def collect_plugins(root_scan_dir): """ Scan directories starting in the designated root to locate all packages that depend on pluginlib. This will indirectly tell us which packages potentially export plugins. Then we search for the plugin manifest file and we parse it to obtain all the exported plugin classes. root_scan_dir indicates the starting point for the scan returns the collected plugin classes """ rp = RosPack([root_scan_dir]) packages = rp.list() debug_print("Found packages:\n") #print packages debug_print() # Find all packages that depend on pluginlib and nodelet explicitely pluginlib_users = rp.get_depends_on('pluginlib', implicit=False) nodelet_users = rp.get_depends_on('nodelet', implicit=False) image_transport_users = rp.get_depends_on('image_transport', implicit=False) # Concatenate both lists removing the duplicates pluginlib_users += list(set(nodelet_users) - set(pluginlib_users)) pluginlib_users += list(set(image_transport_users) - set(pluginlib_users)) debug_print("Packages that depend on pluginlib:\n") debug_print(pluginlib_users) debug_print() # Within the packages that require pluginlib, search all their # dependencies for plugins plugin_classes = [] for p in pluginlib_users: path = rp.get_path(p) debug_print(p + ": ") debug_print(path) exports = rp.get_manifest(p).exports debug_print("Exports: ") for e in exports: s = e.get("plugin") if s: s2 = string.replace(s, "${prefix}", path) debug_print(s2) f = open(s2, 'r') xml_str = f.read() xml_str = escape_xml(xml_str) plugin_classes += parse_plugin_xml(xml_str, p, path) #plugin_classes += parse_plugin_xml(s2) debug_print(plugin_classes) f.close() debug_print() return plugin_classes