Example #1
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter("camera_topic",
                                   "/camera/rgb/image_raw/compressed")
camera_file = node.getFileInPackage('roars',
                                    'data/camera_calibrations/asus_xtion.yml')
camera = CameraRGB(configuration_file=camera_file,
                   rgb_topic=camera_topic,
                   compressed_image="compressed" in camera_topic)

#⬢⬢⬢⬢⬢➤ ARP
arp_configuration = node.getFileInPackage(
    'roars', 'data/arp_configurations/prototype_configuration.json')
Example #2
0
import os
import pprint
import cv2
import sys
import numpy as np
import math
from roars.rosutils.rosnode import RosNode
import roars.geometry.transformations as transformations
import roars.vision.colors as colors
from visualization_msgs.msg import MarkerArray, Marker
from geometry_msgs.msg import Point
from std_msgs.msg import ColorRGBA
import random
import tools

node = RosNode("ciao", disable_signals=True)
node.setHz(10)

#⬢⬢⬢⬢⬢➤ Visualization topic
vis_topic = node.createPublisher('/showmap/clusters', MarkerArray)

map_file = node.setupParameter("map_file", "")

instance_folder = node.setupParameter("instance_folder", "")

files = glob.glob(os.path.join(instance_folder, "*.txt"))

box_file = ''
box_opt_file = ''
instance_files = []
instances = []
Example #3
0
import glob
import os
import pprint
import cv2
import sys
import numpy as np
import math
from roars.rosutils.rosnode import RosNode
import roars.geometry.transformations as transformations
import roars.vision.colors as colors
from visualization_msgs.msg import MarkerArray, Marker
from geometry_msgs.msg import Point
from std_msgs.msg import ColorRGBA
import random
from scipy.cluster.hierarchy import dendrogram, linkage, fcluster, centroid, fclusterdata
# from sklearn.decomposition import TruncatedSVD
from sklearn.decomposition import PCA

import pymesh

node = RosNode("match_boxes", disable_signals=True)

file1 = node.setupParameter("file1", "/tmp/daniele/instances_02.map_0_GT.txt")
file2 = node.setupParameter("file2",
                            "/tmp/daniele/instances_scene_02.map_200.txt")

d1 = np.loadtxt(file1)
d2 = np.loadtxt(file2)

mesh = pymesh.form_mesh([], [])
Example #4
0
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.augmentereality import VirtualObject
import roars.geometry.transformations as transformations
from roars.detections.prediction import prediction
from roars.datasets.datasetutils import TrainingClass
import cv2
import numpy as np
import os
import glob

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("make_video_from_predictions")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

dataset_folder = node.setupParameter("dataset_folder", "")
predictions_name = node.setupParameter("predictions_name", "")

images_folder = node.setupParameter("images_folder", "")
predictions_folder = node.setupParameter("predictions_folder", "")

print "Loading", images_folder

images_files = sorted(glob.glob(images_folder + "/*.jpg"))
predictions_files = sorted(glob.glob(predictions_folder + "/*.txt"))
min_th = 0.2
Example #5
0
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rgb_dataset_to_roars")

scene_folder = node.setupParameter("scene_folder", "")
out_folder = node.setupParameter("out_folder", "")
zeros = node.setupParameter("zeros", 5)

basename = os.path.basename(scene_folder)

output_folder = os.path.join(out_folder, basename)
rgb_path = os.path.join(output_folder, "images")
depth_path = os.path.join(output_folder, "depth")
pose_path = os.path.join(output_folder, "robot_poses.txt")
extr_path = os.path.join(output_folder, "camera_extrinsics.txt")
intr_path = os.path.join(output_folder, "camera_intrinsics.txt")
try:
    os.makedirs(output_folder)
Example #6
0
            f = open(output_id, 'w')
            f.write(id + "\n")

            if scene_name not in id_map:
                id_map[scene_name] = 0
            id_map[scene_name] = id_map[scene_name] + 1

            #print(counter_string, img, label, id)
            counter = counter + 1

    return collections.OrderedDict(sorted(id_map.items()))


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("manual_labels_fusion")

dataset_folder = node.setupParameter(
    "dataset_folder",
    "/home/daniele/Desktop/datasets/roars_manual_2017/original_tolabel")

output_folder_test = node.setupParameter(
    "output_folder",
    "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_test")
output_folder_train = node.setupParameter(
    "output_folder",
    "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_train")

class_map = {
    "comp1": 0,
    "comp2": 1,
Example #7
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.augmentereality import VirtualObject
import roars.geometry.transformations as transformations
import cv2
import numpy as np
import os
import glob
from shutil import copyfile
import shutil
import collections


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("manual_labels_matcher")

roars_dataset_folder = node.setupParameter(
    "roars_dataset_folder", "/home/daniele/Desktop/datasets/roars_2017/indust")

output_folder_test = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_test")
output_folder_train = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_train")

elements = []
for file in glob.glob(dataset_folder + "/*.txt"):
    name = os.path.basename(file).split(".")[0]
    img_folder = os.path.join(os.path.dirname(file), name + "_images")
    labels_folder = os.path.join(os.path.dirname(file), name + "_labels")
    f = open(file, 'r')
Example #8
0
    all_count = len(frames)
    test_count = int(all_count * test_percentage)
    val_count = int(all_count * validity_percentage)
    train_count = all_count - test_count - val_count

    trains = frames[:train_count]
    remains = frames[train_count:]

    tests = remains[:test_count]
    vals = remains[test_count:]
    return {"train": trains, "test": tests, "val": vals}


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge_all")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")
test_percentage = node.setupParameter("test_percentage", 0.1)
val_percentage = node.setupParameter("val_percentage", 0.05)

if len(output_folder) == 0:
    print("Error in outputfolder")
    sys.exit(0)

os.mkdir(output_folder)

raw_dataset_folders = sorted(os.listdir(datasets_folder))

print raw_dataset_folders
Example #9
0
        error = self.computeClusterError(cumulation_points)
        if ArpBuilder.DEBUG_OPTIMIZITION_OUTPUT:
            print error
        return error

    def extractOnlyId(self, whole_data, id):
        frames = []
        for data in whole_data:
            if id in data:
                frames.append(data[id])
        return frames


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_optimizer_test")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", False)

camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/microsoft_hd.yml'
)
camera = Camera(configuration_file=camera_file)


detections_folder = node.setupParameter(
    "detections_folder", "/home/daniele/Desktop/tmp/arp_calibration/markers_detections/")
Example #10
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_subset")

dataset_folder = node.setupParameter("dataset_folder", "")
output_folder = node.setupParameter("output_folder", "")
max_counter = node.setupParameter("max_counter", 1000)

dataset_in = RawDataset(dataset_folder)
dataset = RawDataset(output_folder, create=True)
ZERO_PADDING_SIZE = 5
counter = 0
for data in dataset_in.data_list:

    counter_string = '{}'.format(str(counter).zfill(ZERO_PADDING_SIZE))

    img_file = os.path.join(dataset.img_folder, counter_string + ".jpg")
    label_file = os.path.join(dataset.label_folder, counter_string + ".txt")
Example #11
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("ar_object_visualizer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter("camera_topic",
                                   "/camera/rgb/image_raw/compressed")
camera_file = node.getFileInPackage('roars',
                                    'data/camera_calibrations/asus_xtion.yml')
camera = CameraRGB(configuration_file=camera_file,
                   rgb_topic=camera_topic,
                   compressed_image="compressed" in camera_topic)

#⬢⬢⬢⬢⬢➤ Robot pose
robot_ee_tf_name = node.setupParameter("robot_ee_tf_name",
Example #12
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_filter")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")
filter_file = node.setupParameter("filter_file", "")
negative = node.setupParameter("negative", False)

filter_ids = open(filter_file, 'r').read().splitlines()

raw_dataset_folders = sorted(os.listdir(datasets_folder))

#⬢⬢⬢⬢⬢➤ Groups all data
whole_data = {}
for d in raw_dataset_folders:
    dataset_path = os.path.join(datasets_folder, d)
    dataset = RawDataset(dataset_path)
Example #13
0

def asSpherical(p):
    r = np.ravel(p)
    x = r[0]
    y = r[1]
    z = r[2]
    # takes list xyz (single coord)
    r = math.sqrt(x * x + y * y + z * z)
    theta = math.acos(z / r) * 180 / math.pi  # to degrees
    phi = math.atan2(y, x) * 180 / math.pi
    return [r, theta, phi]


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_dataset_export")


roars_datasets_path = node.setupParameter("roars_datasets_path", '')
raw_dataset_path = node.setupParameter("raw_dataset_path", "")
output_folder = node.setupParameter("output_folder", "/tmp")

files = glob.glob(os.path.join(roars_datasets_path, "*.txt"))
scenes = {}
for f in files:
    scene = TrainingScene.loadFromFile(f)
    if scene != None and scene.isValid():
        name = scene.getScenePath()
        scenes[name] = scene

for name, scene in scenes.iteritems():
Example #14
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.augmentereality import BoundingBoxGenerator
from roars.datasets.datasetutils import TrainingInstance, JSONHelper
import roars.geometry.transformations as transformations
import os
import json
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("convert_arp_points")

#⬢⬢⬢⬢⬢➤ Output File
output_file = node.setupParameter('output_file', '')
if output_file == '':
    print("Invalid output file ''".format(output_file))
    sys.exit(0)

#⬢⬢⬢⬢⬢➤ Object Points
objects_points_file = node.setupParameter('objects_points_file', '')

if os.path.exists(objects_points_file):
    objects_data = JSONHelper.loadFromFile(objects_points_file)

    #⬢⬢⬢⬢⬢➤ Creates a Box Generator based on number of points
    number_of_points = objects_data["points_per_objects"]
    box_generator = BoundingBoxGenerator.getGenerator(number_of_points)

    if box_generator != None:
Example #15
0
        help="path to the pbtxt containing the label definition",
        required=True)
    parser.add_argument('-v',
                        '--visualization',
                        help="flag to enable visualization (only for debug)",
                        action='store_true')
    args = parser.parse_args()

    #create tensorflow wrapper class
    detector = tensorflow_detector_wrapper(args.graph, args.labelMap)
    if args.visualization:
        classes = detector.getClassDictionary()
        c_map = cv_show_detection.getColorMap(classes)

    #node creation
    node = RosNode("tf_obj_detector")

    #⬢⬢⬢⬢⬢➤ Sets HZ from parameters
    node.setHz(node.setupParameter("hz", 60))

    #create publisher
    prediction_topic = node.setupParameter("prediction_topic", "/detections")
    publisher = node.createPublisher(prediction_topic, numpy_msg(Floats))

    #⬢⬢⬢⬢⬢➤ Creates Camera Proxy
    camera_topic = node.setupParameter("camera_topic",
                                       "/camera/rgb/image_raw/compressed")
    camera_file = node.getFileInPackage(
        'roars', 'data/camera_calibrations/asus_xtion.yml')
    camera = CameraRGB(configuration_file=camera_file,
                       rgb_topic=camera_topic,
Example #16
0
from roars.gui.widgets.WInstanceEditor import WInstanceEditor
from roars.gui.widgets.WSceneFrameVisualizer import WSceneFrameVisualizer
from roars.gui.widgets.WInstanceCreator import WInstanceCreator
from roars.gui.widgets.WInstancesList import WInstancesList
from roars.gui.widgets.WAxesEditor import WAxesEditor
from PyQt4 import QtCore
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtGui import QFileDialog
import PyQt4.QtGui as QtGui
import sys
import math
import os

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_scene_explorer")
window_size = node.setupParameter("window_size", "1400;900", array_type=int)
scene_manifest_file = node.setupParameter("manifest", '')

WBaseWidget.DEFAULT_UI_WIDGETS_FOLDER = node.getFileInPackage(
    'roars', 'data/gui_forms/widgets')


class MainWindow(PyQtWindow):
    def __init__(self, uifile):
        super(MainWindow, self).__init__(uifile)

        # self.showMaximized()
        self.setFixedSize(window_size[0], window_size[1])
        #⬢⬢⬢⬢⬢➤ Scene Management
        self.scene_filename = ''
Example #17
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import random
from collections import OrderedDict
#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_to_yolo")


seed = node.setupParameter("seed", -1)
if seed >= 0:
    random.seed(seed)
else:
    random.seed()


prefix = node.setupParameter("prefix", "yolo_")

#⬢⬢⬢⬢⬢➤ dataset
dataset_folder = node.setupParameter("dataset_folder", "")
dataset = RawDataset(dataset_folder)
Example #18
0

def asSpherical(p):
    r = np.ravel(p)
    x = r[0]
    y = r[1]
    z = r[2]
    # takes list xyz (single coord)
    r = math.sqrt(x * x + y * y + z * z)
    theta = math.acos(z / r) * 180 / math.pi  # to degrees
    phi = math.atan2(y, x) * 180 / math.pi
    return [r, theta, phi]


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_dataset_segmenter")

manifest_file = node.setupParameter("manifest_file", '')

#⬢⬢⬢⬢⬢➤ Create Scenes
scene = TrainingScene.loadFromFile(manifest_file)
target_label = 3

grid = VoxelGrid(center=np.array([0.819, 0.125, -0.484]),
                 side=64,
                 resolution=0.005)

if scene.isValid():
    frames = scene.getAllFrames()

    for i in range(0, len(frames), 50):
Example #19
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.datasets.datasetutils import TrainingScene, TrainingClassesMap
from roars.rosutils.rosnode import RosNode
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("generate_roars_dataset_manifest")

scene_path = node.setupParameter("scene_path", '')
images_path = node.setupParameter("images_path", 'images')
depth_path = node.setupParameter("images_path", 'depth')
robot_pose_name = node.setupParameter("robot_pose_name", 'robot_poses.txt')
camera_intrisics_file = node.setupParameter("camera_intrisics_file", '')
camera_extrinsics_file = node.setupParameter("camera_extrinsics_file", '')
output_manifest_file = node.setupParameter("output_manifest_file", '')
force_relative = node.setupParameter("force_relative", True)
classes = node.setupParameter("classes", "", array_type=str)
force_no_classes = node.setupParameter("force_no_classes", False)

if not force_no_classes and len(classes) == 0:
    print("No classes found!")
    sys.exit(0)

#⬢⬢⬢⬢⬢➤ Create Scenes
scene = TrainingScene(
    scene_path=scene_path,
    images_path=images_path,
    images_depth_path=depth_path,
    robot_pose_name=robot_pose_name,
Example #20
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json
import glob
import sys
import PyKDL

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_optimizer_validation")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", True)


camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/microsoft_hd.yml'
)
camera = Camera(configuration_file=camera_file)

#⬢⬢⬢⬢⬢➤ ARP
arp_configuration = node.getFileInPackage(
    'roars',
Example #21
0
from roars.vision.cameras import Camera
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json
import glob
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("batch_marker_detector")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", False)
output_folder = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/tmp/arp_calibration/markers_detections")

if not os.path.exists(output_folder):
    print ("Path doesn't exist'")
    sys.exit(0)

camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/microsoft_hd.yml'
)
Example #22
0
        point_3d = np.array([
            current_arp_pose.p.x(),
            current_arp_pose.p.y(),
            current_arp_pose.p.z()
        ])
        collected_points.append(point_3d)
        print("Collected points", len(collected_points))
        if len(collected_points) == 4:
            box = BoundingBoxFromFourPoints(collected_points)
            collected_boxes.append(box)
            collected_points = []



#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_live_viewer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", True)

new_point_publisher = node.createPublisher("~new_point_event", Bool)
new_point_publisher_callback = node.createSubscriber(
    "~new_point_event", Bool, newPointCallback)


#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/usb_cam/image_raw/compressed"
)
Example #23
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_dataset_ids_list")

dataset_folder = node.setupParameter("dataset_folder", "")

img_folder = os.path.join(dataset_folder, "images")
label_folder = os.path.join(dataset_folder, "labels")
ids_folder = os.path.join(dataset_folder, "ids")

if not os.path.exists(img_folder):
    print("Invalud path '{}'".format(img_folder))
    sys.exit(0)


image_files = sorted(glob.glob(img_folder + "/*.jpg"))
label_files = sorted(glob.glob(label_folder + "/*.txt"))
id_files = sorted(glob.glob(ids_folder + "/*.txt"))
Example #24
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge")

dataset_folder_1 = node.setupParameter("dataset_folder_1", "")
dataset_folder_2 = node.setupParameter("dataset_folder_2", "")
output_folder = node.setupParameter("output_folder", "")

dataset1 = RawDataset(dataset_folder_1)
dataset2 = RawDataset(dataset_folder_2)

dataset = RawDataset(output_folder, create=True)

data_list = dataset1.data_list
data_list.extend(dataset2.data_list)

ZERO_PADDING_SIZE = 5
counter = 0
Example #25
0
from roars.vision.augmentereality import VirtualObject
from PyQt4 import QtCore
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtGui import QFileDialog
import PyQt4.QtGui as QtGui
import PyKDL
import sys
import cv2
import numpy as np
import functools
import random
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_scene_export")

scene_manifest_file = node.setupParameter("scene_manifest_file", '')

output_folder = node.setupParameter(
    "output_folder", "")


scene = TrainingScene.loadFromFile(scene_manifest_file)

dataset = TrainingDataset([scene])
dataset_builder = RawDatasetBuilder(dataset, output_folder)

if dataset_builder.build():
    pass
else: