Beispiel #1
0
from roars.vision.cameras import Camera
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json
import glob
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("batch_marker_detector")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", False)
output_folder = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/tmp/arp_calibration/markers_detections")

if not os.path.exists(output_folder):
    print ("Path doesn't exist'")
    sys.exit(0)

camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/microsoft_hd.yml'
)
Beispiel #2
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import random
from collections import OrderedDict
#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_to_yolo")


seed = node.setupParameter("seed", -1)
if seed >= 0:
    random.seed(seed)
else:
    random.seed()


prefix = node.setupParameter("prefix", "yolo_")

#⬢⬢⬢⬢⬢➤ dataset
dataset_folder = node.setupParameter("dataset_folder", "")
dataset = RawDataset(dataset_folder)
Beispiel #3
0
###########################
ap.add_argument("--manifest",
                required=True,
                type=str,
                help="Manifest file")
###########################
ap.add_argument("--window_size",
                default="1400x900",
                type=str,
                help="Window resolution ('x' separated) e.g.: '800x600'")

args = vars(ap.parse_args())


# ⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_scene_explorer")
window_size = map(int, args['window_size'].split("x"))
scene_manifest_file = args['manifest']

# Base Widget Folder
WBaseWidget.DEFAULT_UI_WIDGETS_FOLDER = os.path.join(
    os.path.dirname(roars.__file__),
    '../../data/gui_forms/widgets'
)

# Window UI Filer
gui_file = os.path.join(
    os.path.dirname(roars.__file__),
    '../../data/gui_forms/roars_labeler_window.ui'
)
collected_objects = [{
    'rf': [
        -0.0615924, 0.06422103, 0.64401128, 0.85372638, 0.39145536,
        -0.12949947, 0.31525226
    ],
    'size': [0.05621781, 0.04149845, 0.07425442]
}, {
    'rf': [
        0.04838199, 0.06189513, 0.6455786, 0.9376154, 0.11573905, -0.03412503,
        0.32530516
    ],
    'size': [0.06787839, 0.04252589, 0.07461602]
}]

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_live_viewer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", True)

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter("camera_topic",
                                   "/usb_cam/image_raw/compressed")
camera_file = node.getFileInPackage(
    'roars', 'data/camera_calibrations/microsoft_hd_focus2.yml')
camera = CameraRGB(configuration_file=camera_file,
                   rgb_topic=camera_topic,
                   compressed_image="compressed" in camera_topic)

#⬢⬢⬢⬢⬢➤ ARP
Beispiel #5
0
        help="path to the pbtxt containing the label definition",
        required=True)
    parser.add_argument('-v',
                        '--visualization',
                        help="flag to enable visualization (only for debug)",
                        action='store_true')
    args = parser.parse_args()

    #create tensorflow wrapper class
    detector = tensorflow_detector_wrapper(args.graph, args.labelMap)
    if args.visualization:
        classes = detector.getClassDictionary()
        c_map = cv_show_detection.getColorMap(classes)

    #node creation
    node = RosNode("tf_obj_detector")

    #⬢⬢⬢⬢⬢➤ Sets HZ from parameters
    node.setHz(node.setupParameter("hz", 60))

    #create publisher
    prediction_topic = node.setupParameter("prediction_topic", "/detections")
    publisher = node.createPublisher(prediction_topic, numpy_msg(Floats))

    #⬢⬢⬢⬢⬢➤ Creates Camera Proxy
    camera_topic = node.setupParameter("camera_topic",
                                       "/camera/rgb/image_raw/compressed")
    camera_file = node.getFileInPackage(
        'roars', 'data/camera_calibrations/asus_xtion.yml')
    camera = CameraRGB(configuration_file=camera_file,
                       rgb_topic=camera_topic,
Beispiel #6
0
        point_3d = np.array([
            current_arp_pose.p.x(),
            current_arp_pose.p.y(),
            current_arp_pose.p.z()
        ])
        collected_points.append(point_3d)
        print("Collected points", len(collected_points))
        if len(collected_points) == 4:
            box = BoundingBoxFromFourPoints(collected_points)
            collected_boxes.append(box)
            collected_points = []



#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_live_viewer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", True)

new_point_publisher = node.createPublisher("~new_point_event", Bool)
new_point_publisher_callback = node.createSubscriber(
    "~new_point_event", Bool, newPointCallback)


#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/usb_cam/image_raw/compressed"
)
Beispiel #7
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arp import ARP
import roars.geometry.transformations as transformations
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter("camera_topic",
                                   "/camera/rgb/image_raw/compressed")
camera_file = node.getFileInPackage(
    'roars', 'data/camera_calibrations/asus_xtion_short_focus.yml')
camera = CameraRGB(configuration_file=camera_file,
                   rgb_topic=camera_topic,
                   compressed_image="compressed" in camera_topic)

#⬢⬢⬢⬢⬢➤ ARP
arp_configuration = node.getFileInPackage(
    'roars', 'data/arp_configurations/prototype_configuration.json')
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.augmentereality import VirtualObject
import roars.geometry.transformations as transformations
from roars.detections.prediction import prediction
from roars.datasets.datasetutils import TrainingClass
import cv2
import numpy as np
import os
import glob

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("make_video_from_predictions")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

dataset_folder = node.setupParameter("dataset_folder", "")
predictions_name = node.setupParameter("predictions_name", "")

images_folder = node.setupParameter("images_folder", "")
predictions_folder = node.setupParameter("predictions_folder", "")

print "Loading", images_folder

images_files = sorted(glob.glob(images_folder + "/*.jpg"))
predictions_files = sorted(glob.glob(predictions_folder + "/*.txt"))
min_th = 0.2
Beispiel #9
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arp import ARP
import roars.geometry.transformations as transformations
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/camera/rgb/image_raw/compressed"
)
camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/asus_xtion_short_focus.yml'
)
camera = CameraRGB(
    configuration_file=camera_file,
    rgb_topic=camera_topic,
Beispiel #10
0

def asSpherical(p):
    r = np.ravel(p)
    x = r[0]
    y = r[1]
    z = r[2]
    # takes list xyz (single coord)
    r = math.sqrt(x * x + y * y + z * z)
    theta = math.acos(z / r) * 180 / math.pi  # to degrees
    phi = math.atan2(y, x) * 180 / math.pi
    return [r, theta, phi]


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_dataset_segmenter")


manifest_file = node.setupParameter("manifest_file", '')

#⬢⬢⬢⬢⬢➤ Create Scenes
scene = TrainingScene.loadFromFile(manifest_file)
target_label = 3

grid = VoxelGrid(center=np.array(
    [0.819, 0.125, -0.484]), side=64, resolution=0.005)


if scene.isValid():
    frames = scene.getAllFrames()
Beispiel #11
0
        pp = Point()
        pp.x = p[0]
        pp.y = p[1]
        pp.z = p[2]
        marker.points.append(pp)

        cc = ColorRGBA()
        cc.r = color[0]
        cc.g = color[1]
        cc.b = color[2]
        cc.a = 1.0
        marker.colors.append(cc)
    return marker


node = RosNode("ciao", disable_signals=True)
node.setHz(10)

#⬢⬢⬢⬢⬢➤ Visualization topic
vis_topic = node.createPublisher('/showmap/clusters', MarkerArray)

map_file = node.setupParameter("map_file", "")

bench_folder = node.setupParameter("instance_folder", "")

folders = glob.glob(os.path.join(bench_folder, "*/"))

for instance_folder in folders:
    print("Converting: ", instance_folder)
    files = glob.glob(os.path.join(instance_folder, "*.txt"))
Beispiel #12
0
from roars.vision.augmentereality import VirtualObject
from PyQt4 import QtCore
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtGui import QFileDialog
import PyQt4.QtGui as QtGui
import PyKDL
import sys
import cv2
import numpy as np
import functools
import random
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_scene_export")

scene_manifest_file = node.setupParameter("scene_manifest_file", '')

output_folder = node.setupParameter(
    "output_folder", "")


scene = TrainingScene.loadFromFile(scene_manifest_file)

dataset = TrainingDataset([scene])
dataset_builder = RawDatasetBuilder(dataset, output_folder)

if dataset_builder.build():
    pass
else:
Beispiel #13
0
        pp = Point()
        pp.x = p[0]
        pp.y = p[1]
        pp.z = p[2]
        marker.points.append(pp)

        cc = ColorRGBA()
        cc.r = color[0]
        cc.g = color[1]
        cc.b = color[2]
        cc.a = 1.0
        marker.colors.append(cc)
    return marker


node = RosNode("ciao", disable_signals=True)
node.setHz(10)

#⬢⬢⬢⬢⬢➤ Visualization topic
vis_topic = node.createPublisher('/showmap/clusters', MarkerArray)


map_file = node.setupParameter("map_file", "")

instance_folder = node.setupParameter("instance_folder", "")

test = '/media/psf/Home/Desktop/remote_Temp/rgb-dataset-masks-squared-maps/benchs/scene_01_w0.01/instance_000_0.txt'

points = np.loadtxt(test)

volume_marker = createMarkerVolume(
Beispiel #14
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 1))

while node.isActive():
    node.getLogger().log("Time: {}".format(node.getCurrentTimeInSecs()))
    node.tick()
Beispiel #15
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_dataset_ids_list")

dataset_folder = node.setupParameter("dataset_folder", "")

img_folder = os.path.join(dataset_folder, "images")
label_folder = os.path.join(dataset_folder, "labels")
ids_folder = os.path.join(dataset_folder, "ids")

if not os.path.exists(img_folder):
    print("Invalud path '{}'".format(img_folder))
    sys.exit(0)


image_files = sorted(glob.glob(img_folder + "/*.jpg"))
label_files = sorted(glob.glob(label_folder + "/*.txt"))
id_files = sorted(glob.glob(ids_folder + "/*.txt"))
Beispiel #16
0
                # if l == -1:
                return colors.getColor(MultiLabelVoxel.colorFromLabel(-1))
                # else:
                #     return colors.getColor(MultiLabelVoxel.colorFromLabel(l))
            else:
                return (0, 0, 0)

    @staticmethod
    def colorFromLabel(label):
        try:
            return MultiLabelVoxel.CLASSES_COLOR_MAP[label]
        except:
            return 'black'


node = RosNode("ciao", disable_signals=True)
node.setHz(1)
map_file = node.setupParameter("map_file", "")
min_w = node.setupParameter("min_w", 0)
min_cluster_size = node.setupParameter("min_cluster_size", 0)
debug = node.setupParameter("debug", True)
is_gt = node.setupParameter("is_gt", False)
output_path = node.setupParameter("output_path", "/tmp/daniele/")
output_name = node.setupParameter("output_name", "")
#⬢⬢⬢⬢⬢➤ Map Rototranslation
roll = node.setupParameter("roll", 0.0)
pitch = node.setupParameter("pitch", 0.0)
yaw = node.setupParameter("yaw", 0.0)
map_transform = PyKDL.Frame()
map_transform.M = PyKDL.Rotation.RPY(roll, pitch, yaw)
Beispiel #17
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/camera/rgb/image_raw/compressed"
)
camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/asus_xtion.yml'
)
camera = CameraRGB(
    configuration_file=camera_file,
    rgb_topic=camera_topic,
Beispiel #18
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("ar_object_visualizer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/camera/rgb/image_raw/compressed"
)
camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/asus_xtion.yml'
)
camera = CameraRGB(
    configuration_file=camera_file,
Beispiel #19
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
import cv2
import numpy as np
import os

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/camera/rgb/image_raw/compressed"
)
camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/asus_xtion.yml'
)
camera = CameraRGB(
    configuration_file=camera_file,
    rgb_topic=camera_topic,
    compressed_image="compressed" in camera_topic
)


def asSpherical(p):
    r = np.ravel(p)
    x = r[0]
    y = r[1]
    z = r[2]
    # takes list xyz (single coord)
    r = math.sqrt(x * x + y * y + z * z)
    theta = math.acos(z / r) * 180 / math.pi  # to degrees
    phi = math.atan2(y, x) * 180 / math.pi
    return [r, theta, phi]


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_dataset_export")


roars_datasets_path = node.setupParameter("roars_datasets_path", '')
raw_dataset_path = node.setupParameter("raw_dataset_path", "")
output_folder = node.setupParameter("output_folder", "/tmp")

files = glob.glob(os.path.join(roars_datasets_path, "*.txt"))
scenes = {}
for f in files:
    scene = TrainingScene.loadFromFile(f)
    if scene != None and scene.isValid():
        name = scene.getScenePath()
        scenes[name] = scene

for name, scene in scenes.iteritems():

def asSpherical(p):
    r = np.ravel(p)
    x = r[0]
    y = r[1]
    z = r[2]
    # takes list xyz (single coord)
    r = math.sqrt(x * x + y * y + z * z)
    theta = math.acos(z / r) * 180 / math.pi  # to degrees
    phi = math.atan2(y, x) * 180 / math.pi
    return [r, theta, phi]


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_dataset_export")


roars_datasets_path = node.setupParameter("roars_datasets_path", '')
raw_dataset_path = node.setupParameter("raw_dataset_path", "")
output_folder = node.setupParameter("output_folder", "/tmp")

files = glob.glob(os.path.join(roars_datasets_path, "*.txt"))
scenes = {}
for f in files:
    scene = TrainingScene.loadFromFile(f)
    if scene != None and scene.isValid():
        name = scene.getScenePath()
        scenes[name] = scene

for name, scene in scenes.iteritems():
Beispiel #22
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_filter")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")
filter_file = node.setupParameter("filter_file", "")
negative = node.setupParameter("negative", False)

filter_ids = open(filter_file, 'r').read().splitlines()

raw_dataset_folders = sorted(os.listdir(datasets_folder))

#⬢⬢⬢⬢⬢➤ Groups all data
whole_data = {}
for d in raw_dataset_folders:
    dataset_path = os.path.join(datasets_folder, d)
    dataset = RawDataset(dataset_path)
Beispiel #23
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.augmentereality import VirtualObject
import roars.geometry.transformations as transformations
import cv2
import numpy as np
import os
import glob
from shutil import copyfile
import shutil
import collections


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("manual_labels_matcher")

roars_dataset_folder = node.setupParameter(
    "roars_dataset_folder", "/home/daniele/Desktop/datasets/roars_2017/indust")

output_folder_test = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_test")
output_folder_train = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_train")

elements = []
for file in glob.glob(dataset_folder + "/*.txt"):
    name = os.path.basename(file).split(".")[0]
    img_folder = os.path.join(os.path.dirname(file), name + "_images")
    labels_folder = os.path.join(os.path.dirname(file), name + "_labels")
    f = open(file, 'r')
Beispiel #24
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge_all")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")
include_tags = node.setupParameter("include_tags", "gt_", array_type=str)
exclude_tags = node.setupParameter("exclude_tags", "4_", array_type=str)
debug = node.setupParameter("debug", True)
raw_dataset_folders = sorted(os.listdir(datasets_folder))

print("Include tags", include_tags, len(include_tags))

#⬢⬢⬢⬢⬢➤ Filter
folders = []
for d in os.listdir(datasets_folder):

    if any(w in d for w in include_tags):
Beispiel #25
0
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.augmentereality import VirtualObject
import roars.geometry.transformations as transformations
from roars.detections.prediction import prediction
from roars.datasets.datasetutils import TrainingClass
import cv2
import numpy as np
import os
import glob

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("make_video_from_predictions")


#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

dataset_folder = node.setupParameter("dataset_folder", "")
predictions_name = node.setupParameter("predictions_name", "")


images_folder = node.setupParameter("images_folder", "")
predictions_folder = node.setupParameter("predictions_folder", "")

print "Loading", images_folder

images_files = sorted(glob.glob(images_folder + "/*.jpg"))
Beispiel #26
0
from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import json

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("ar_object_visualizer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter("camera_topic",
                                   "/camera/rgb/image_raw/compressed")
camera_file = node.getFileInPackage('roars',
                                    'data/camera_calibrations/asus_xtion.yml')
camera = CameraRGB(configuration_file=camera_file,
                   rgb_topic=camera_topic,
                   compressed_image="compressed" in camera_topic)

#⬢⬢⬢⬢⬢➤ Robot pose
robot_ee_tf_name = node.setupParameter("robot_ee_tf_name",
Beispiel #27
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.datasets.datasetutils import TrainingScene, TrainingClassesMap
from roars.rosutils.rosnode import RosNode
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("generate_roars_dataset_manifest")

scene_path = node.setupParameter("scene_path", '')
images_path = node.setupParameter("images_path", 'images')
depth_path = node.setupParameter("images_path", 'depth')
robot_pose_name = node.setupParameter("robot_pose_name", 'robot_poses.txt')
camera_intrisics_file = node.setupParameter("camera_intrisics_file", '')
camera_extrinsics_file = node.setupParameter("camera_extrinsics_file", '')
output_manifest_file = node.setupParameter("output_manifest_file", '')
force_relative = node.setupParameter("force_relative", True)
classes = node.setupParameter("classes", "", array_type=str)
force_no_classes = node.setupParameter("force_no_classes", False)

if not force_no_classes and len(classes) == 0:
    print("No classes found!")
    sys.exit(0)

#⬢⬢⬢⬢⬢➤ Create Scenes
scene = TrainingScene(
    scene_path=scene_path,
    images_path=images_path,
    images_depth_path=depth_path,
    robot_pose_name=robot_pose_name,
Beispiel #28
0
        error = self.computeClusterError(cumulation_points)
        if ArpBuilder.DEBUG_OPTIMIZITION_OUTPUT:
            print error
        return error

    def extractOnlyId(self, whole_data, id):
        frames = []
        for data in whole_data:
            if id in data:
                frames.append(data[id])
        return frames


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_optimizer_test")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", False)

camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/microsoft_hd.yml'
)
camera = Camera(configuration_file=camera_file)


detections_folder = node.setupParameter(
    "detections_folder", "/home/daniele/Desktop/tmp/arp_calibration/markers_detections/")
Beispiel #29
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge_all")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")
include_tags = node.setupParameter("include_tags", "gt_", array_type=str)
exclude_tags = node.setupParameter("exclude_tags", "4_", array_type=str)
debug = node.setupParameter("debug", True)
raw_dataset_folders = sorted(os.listdir(datasets_folder))


print("Include tags", include_tags, len(include_tags))

#⬢⬢⬢⬢⬢➤ Filter
folders = []
for d in os.listdir(datasets_folder):

collected_objects = [
    {
        'rf': [-0.0615924,   0.06422103,  0.64401128, 0.85372638, 0.39145536, -0.12949947, 0.31525226],
        'size': [0.05621781,  0.04149845,  0.07425442]
    },
    {
        'rf': [0.04838199,  0.06189513,  0.6455786,  0.9376154, 0.11573905, -0.03412503,  0.32530516],
        'size': [0.06787839,  0.04252589,  0.07461602]
    }
]


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("arp_live_viewer")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))
debug = node.setupParameter("debug", True)


#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter(
    "camera_topic",
    "/usb_cam/image_raw/compressed"
)
camera_file = node.getFileInPackage(
    'roars',
    'data/camera_calibrations/microsoft_hd_focus2.yml'
)
Beispiel #31
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.cameras import CameraRGB
import cv2
import numpy as np
import os

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rosnode_example")

#⬢⬢⬢⬢⬢➤ Sets HZ from parameters
node.setHz(node.setupParameter("hz", 30))

#⬢⬢⬢⬢⬢➤ Creates Camera Proxy
camera_topic = node.setupParameter("camera_topic",
                                   "/camera/rgb/image_raw/compressed")
camera_file = node.getFileInPackage('roars',
                                    'data/camera_calibrations/asus_xtion.yml')
camera = CameraRGB(configuration_file=camera_file,
                   rgb_topic=camera_topic,
                   compressed_image="compressed" in camera_topic)


#⬢⬢⬢⬢⬢➤ Camera Callback
def cameraCallback(frame):
    #⬢⬢⬢⬢⬢➤ Grabs image from Frame
    img = frame.rgb_image.copy()

    #⬢⬢⬢⬢⬢➤ Show
Beispiel #32
0
            f = open(output_id, 'w')
            f.write(id + "\n")

            if scene_name not in id_map:
                id_map[scene_name] = 0
            id_map[scene_name] = id_map[scene_name] + 1

            #print(counter_string, img, label, id)
            counter = counter + 1

    return collections.OrderedDict(sorted(id_map.items()))


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("manual_labels_fusion")

dataset_folder = node.setupParameter(
    "dataset_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/original_tolabel")

output_folder_test = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_test")
output_folder_train = node.setupParameter(
    "output_folder", "/home/daniele/Desktop/datasets/roars_manual_2017/manual_dataset_train")


class_map = {
    "comp1": 0,
    "comp2": 1,
    "comp3": 2,
    "comp4": 3,
Beispiel #33
0
from roars.gui.widgets.WSceneFrameVisualizer import WSceneFrameVisualizer
from roars.gui.widgets.WInstanceCreator import WInstanceCreator
from roars.gui.widgets.WInstancesList import WInstancesList
from roars.gui.widgets.WAxesEditor import WAxesEditor
from PyQt4 import QtCore
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtGui import QFileDialog
import PyQt4.QtGui as QtGui
import sys
import math
import os


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("roars_scene_explorer")
window_size = node.setupParameter("window_size", "1400;900", array_type=int)
scene_manifest_file = node.setupParameter("manifest", '')

WBaseWidget.DEFAULT_UI_WIDGETS_FOLDER = node.getFileInPackage(
    'roars', 'data/gui_forms/widgets'
)


class MainWindow(PyQtWindow):

    def __init__(self, uifile):
        super(MainWindow, self).__init__(uifile)

        # self.showMaximized()
        self.setFixedSize(window_size[0], window_size[1])
Beispiel #34
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_subset")

dataset_folder = node.setupParameter("dataset_folder", "")
output_folder = node.setupParameter("output_folder", "")
max_counter = node.setupParameter("max_counter", 1000)

dataset_in = RawDataset(dataset_folder)
dataset = RawDataset(output_folder, create=True)
ZERO_PADDING_SIZE = 5
counter = 0
for data in dataset_in.data_list:

    counter_string = '{}'.format(str(counter).zfill(ZERO_PADDING_SIZE))

    img_file = os.path.join(dataset.img_folder, counter_string + ".jpg")
    label_file = os.path.join(dataset.label_folder, counter_string + ".txt")
Beispiel #35
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

from roars.rosutils.rosnode import RosNode
from roars.vision.augmentereality import BoundingBoxGenerator
from roars.datasets.datasetutils import TrainingInstance, JSONHelper
import roars.geometry.transformations as transformations
import os
import json
import sys

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("convert_arp_points")

#⬢⬢⬢⬢⬢➤ Output File
output_file = node.setupParameter('output_file', '')
if output_file == '':
    print("Invalid output file ''".format(output_file))
    sys.exit(0)

#⬢⬢⬢⬢⬢➤ Object Points
objects_points_file = node.setupParameter('objects_points_file', '')

if os.path.exists(objects_points_file):
    objects_data = JSONHelper.loadFromFile(objects_points_file)

    #⬢⬢⬢⬢⬢➤ Creates a Box Generator based on number of points
    number_of_points = objects_data["points_per_objects"]
    box_generator = BoundingBoxGenerator.getGenerator(number_of_points)

    if box_generator != None:
Beispiel #36
0
from roars.vision.cameras import CameraRGB
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
import roars.geometry.transformations as transformations

import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("rgb_dataset_to_roars")

scene_folder = node.setupParameter("scene_folder", "")
out_folder = node.setupParameter("out_folder", "")
zeros = node.setupParameter("zeros", 5)

basename = os.path.basename(scene_folder)

output_folder = os.path.join(out_folder, basename)
rgb_path = os.path.join(output_folder, "images")
depth_path = os.path.join(output_folder, "depth")
pose_path = os.path.join(output_folder, "robot_poses.txt")
extr_path = os.path.join(output_folder, "camera_extrinsics.txt")
intr_path = os.path.join(output_folder, "camera_intrinsics.txt")
try:
    os.makedirs(output_folder)
Beispiel #37
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge")

dataset_folder_1 = node.setupParameter("dataset_folder_1", "")
dataset_folder_2 = node.setupParameter("dataset_folder_2", "")
output_folder = node.setupParameter("output_folder", "")

dataset1 = RawDataset(dataset_folder_1)
dataset2 = RawDataset(dataset_folder_2)

dataset = RawDataset(output_folder, create=True)

data_list = dataset1.data_list
data_list.extend(dataset2.data_list)

ZERO_PADDING_SIZE = 5
counter = 0
Beispiel #38
0
from roars.vision.arucoutils import MarkerDetector
from roars.vision.arp import ARP
from roars.vision.augmentereality import BoundingBoxFromSixPoints, VirtualObject
from roars.datasets.datasetutils import RawDataset
import roars.geometry.transformations as transformations
from collections import OrderedDict
import roars.vision.cvutils as cvutils
import cv2
import numpy as np
import os
import glob
import sys
import shutil

#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge_all")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")

raw_dataset_folders = sorted(os.listdir(datasets_folder))

data_list = []
#⬢⬢⬢⬢⬢➤ Groups all data
whole_data = {}
for d in raw_dataset_folders:
    dataset_path = os.path.join(datasets_folder, d)
    dataset = RawDataset(dataset_path)
    data_list.extend(dataset.data_list)

dataset = RawDataset(output_folder, create=True)
Beispiel #39
0
    all_count = len(frames)
    test_count = int(all_count * test_percentage)
    val_count = int(all_count * validity_percentage)
    train_count = all_count - test_count - val_count

    trains = frames[:train_count]
    remains = frames[train_count:]

    tests = remains[:test_count]
    vals = remains[test_count:]
    return {"train": trains, "test": tests, "val": vals}


#⬢⬢⬢⬢⬢➤ NODE
node = RosNode("raw_datasets_merge_all")

datasets_folder = node.setupParameter("datasets_folder", "")
output_folder = node.setupParameter("output_folder", "")
test_percentage = node.setupParameter("test_percentage", 0.1)
val_percentage = node.setupParameter("val_percentage", 0.05)

if len(output_folder) == 0:
    print("Error in outputfolder")
    sys.exit(0)

os.mkdir(output_folder)

raw_dataset_folders = sorted(os.listdir(datasets_folder))

print raw_dataset_folders
Beispiel #40
0
if __name__=='__main__':
    parser = argparse.ArgumentParser(description="Load an inference graph and use it on the input images")
    parser.add_argument('-g','--graph',help="path to the pb file with the graph and weight definition",required=True)
    parser.add_argument('-l','--labelMap',help="path to the pbtxt containing the label definition",required=True)
    parser.add_argument('-v','--visualization',help="flag to enable visualization (only for debug)",action='store_true')
    args = parser.parse_args()

    #create tensorflow wrapper class
    detector = tensorflow_detector_wrapper(args.graph,args.labelMap)
    if args.visualization:
        classes = detector.getClassDictionary()
        c_map = cv_show_detection.getColorMap(classes)

    #node creation
    node = RosNode("tf_obj_detector")

    #⬢⬢⬢⬢⬢➤ Sets HZ from parameters
    node.setHz(node.setupParameter("hz", 60))

    #create publisher
    prediction_topic = node.setupParameter(
        "prediction_topic",
        "/detections"
    )
    publisher = node.createPublisher(prediction_topic,numpy_msg(Floats))


    #⬢⬢⬢⬢⬢➤ Creates Camera Proxy
    camera_topic = node.setupParameter(
        "camera_topic",