コード例 #1
0
ファイル: main.py プロジェクト: DaniFoldi/Python-Gomoku
    def __init__(self):
        if len(sys.argv) > 1:
            self.locale = Localization(sys.argv[1])
        else:
            self.locale = Localization("en_US")
        self.version = "V1.2"
        self.window = tk.Tk()
        self.center_window()
        self.display_gui(0)
        self.local_ip = socket.gethostbyname(socket.gethostname())
        self.local_name = socket.gethostname().replace("-", " ")

        self.discovery = Bidirectional_discovery()
        self.communication = Bidirectional_communication()
コード例 #2
0
    def __init__(self):

        # Create Marker Follower Object
        self.marker_follower = Marker_Follower()
        self.marker_follower.initialize_gains(
        )  # Initialize PID with Default Gains
        # Create a Localization object
        self.localization = Localization()
        # Create a Color Evaluation object
        self.color_evaluation = Color_Evaluation()
        # Create a Target Create object
        self.limits_exploration = Limits_Exploration()

        # FIXME: Parameters from YAML File
        self.height_down = rospy.get_param(
            'low_height')  # Used in fly_to() Function
        self.height_up = rospy.get_param(
            'exploration_height')  # Used in fly_to() Function
        self.drone_speed_topic = rospy.get_param('drone_speed_topic')
        self.drone_takeoff_topic = rospy.get_param('drone_takeoff_topic')
        self.drone_land_topic = rospy.get_param('drone_land_topic')
        self.debug = rospy.get_param('debug')

        # Service Responsible for Color Evaluation of Potential Targets
        self.drone_control_srv_name = rospy.get_param('drone_control_srv')
        self.drone_control_service = rospy.Service(
            self.drone_control_srv_name, ControlDrone,
            self.drone_control_srv_function)
        # Drone Speed/Takeoff/Land Publish Topics
        self.pub_cmd = rospy.Publisher(self.drone_speed_topic,
                                       Twist,
                                       queue_size=10)
        self.takeoff = rospy.Publisher(self.drone_takeoff_topic,
                                       Empty,
                                       queue_size=100)
        self.land = rospy.Publisher(self.drone_land_topic,
                                    Empty,
                                    queue_size=100)

        # Flags Needed for Drone Control
        self.marker_follow_flag = False  # Flag to start/stop PID to Follow Marker
        self.fly_flag = False  # Flag to Control Drone to fly Up/Down
        self.recovery_behavior = False  # Used in recovery() Function if Marker is Lost
        self.fly_direction = ''  # Holds the Direction that Drone is Flying (Up/Down)
        self.state = False  # Falg to check if Drone has Taken Off Ground and False if Drone has Landed
        self.take_image = False  # True if Needed to Take Image from Drone Bottom Camera

        # Parameters
        self.empty = Twist()
        self.taken_image = Image(
        )  # Hold the Image Taken from Drone Bottom Camera
コード例 #3
0
ファイル: localmodel.py プロジェクト: sethlee0111/MobilityHMM
def loadModel_eval():
    venueData = pd.read_csv("./VenueID_data.csv")
    l = Localization(venueData)
    trajectorydata = pd.read_csv("./trainTrajectory_final.csv")
    #print(trajectorydata["VenueID"].describe())
    t = Trajectory(trajectorydata)
    usersgroup = l.grouping(groupNum)
    models = []
    models = load_models(models)
    testtrajectorydata = pd.read_csv("./testTrajectory_smaller.csv")
    testTrajectory = Trajectory(testtrajectorydata)
    for i in range(0, len(models)):
        print(str(datetime.datetime.now()) + "   eval model " + str(i))
        data, length, prob, dic = testTrajectory.get(usersgroup[i])
        print(models[i].score(data, length) / len(length))
    eval_loc_model(models, usersgroup)
コード例 #4
0
ファイル: backendServer.py プロジェクト: PKehnel/ENSE
    def do_POST(self):
        """
        Handling of the Post Request. First determines the sight against which to match.
        Then start the matching process and send the computed pose as answer.
        Afterwards the matching is visualized, this can be turned of for production.
        """
        print("Got a Post Request")
        self._set_headers()
        content_length = int(self.headers["Content-Length"])
        json_data = self.rfile.read(content_length)
        project_name = allocate_sight(json_data)
        try:
            getattr(self, project_name)
        except AttributeError:
            setattr(self, project_name, Localization(project_name))

        answer = getattr(self, project_name).localize_image(json_data)

        json_answer = to_json(answer)
        self.wfile.write(bytes(json_answer, "utf-8"))
        self.current_name = project_name
        self.request_over = True
コード例 #5
0
ファイル: localmodel.py プロジェクト: sethlee0111/MobilityHMM
def main():
    venueData = pd.read_csv("./VenueID_data.csv")
    l = Localization(venueData)
    trajectorydata = pd.read_csv("./trainTrajectory_final.csv")
    #print(trajectorydata["VenueID"].describe())
    t = Trajectory(trajectorydata)
    usersgroup = l.grouping(groupNum)

    testtrajectorydata = pd.read_csv("./testTrajectory_final.csv")
    testTrajectory = Trajectory(testtrajectorydata)

    models, dics = train(usersgroup=usersgroup, trajectory=t)
    for i in range(0, len(models)):
        output = open(
            './LocalizationModel/' + str(groupNum) + 'model_state' +
            str(states) + "_" + str(i) + '.pkl', 'wb')
        s = pickle.dump(models[i], output)
        output.close()

    for i in range(0, len(models)):
        eval_loc_model(testTrajectory=testTrajectory,
                       model=models[i],
                       users=usersgroup[i],
                       dic=dics[i])
コード例 #6
0
ファイル: main.py プロジェクト: Aenteas/localisation
from map import Map
from map import Robot
import numpy as np
from map import Map
import random
from localization import Localization
import math

#####made by David

num_features = 30
map_size = (500, 600)
min_timediff_sec = 1.0/50.0
#generate features
features1 = []
for i in range(num_features):
        pos_x = np.random.rand() * map_size[1]
        pos_y = np.random.rand() * map_size[0]
        features1.append((pos_x, pos_y))
pos = (300, 250)

features2 = [(200, 300),(250, 300),(300, 300),(350, 300),(400, 300),(450, 300)]
pos = (50, 300)

map = Map(Robot(pos=pos), features2, Localization(min_timediff_sec))
map.simulate()
コード例 #7
0
import rospy
import argparse
import matplotlib.pyplot as plt
import matplotlib.animation as animation

from modules.planning.proto import planning_pb2
from subplot_st_main import StMainSubplot
from subplot_path import PathSubplot
from subplot_sl_main import SlMainSubplot
from subplot_st_speed import StSpeedSubplot
from subplot_speed import SpeedSubplot
from localization import Localization
from planning import Planning

planning = Planning()
localization = Localization()


def update(frame_number):
    #st_main_subplot.show(planning)
    #st_speed_subplot.show(planning)
    map_path_subplot.show(planning, localization)
    dp_st_main_subplot.show(planning)
    qp_st_main_subplot.show(planning)
    speed_subplot.show(planning)
    sl_main_subplot.show(planning)
    st_speed_subplot.show(planning)


def planning_callback(planning_pb):
    planning.update_planning_pb(planning_pb)
コード例 #8
0
ファイル: create_model.py プロジェクト: PKehnel/ENSE
import argparse

from localization import Localization

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("--project_name", type=str, required=True)
    args = parser.parse_args()
    loc = Localization(project_name=args.project_name)
    loc.create_model()
コード例 #9
0
        print '\nVOX coords not available\n'
        exit(1)
    leads = build_leads(files_mother, args.add_fs)
    show_leads(leads)
    leads_as_dict = leads_to_dict(leads)
    clean_json_dump(leads_as_dict,
                    open(args.output, 'w'),
                    indent=2,
                    sort_keys=True)

    print '\nStage 0: json file saved with vox coords\n'
    """
    Add test for fs coordinate
    """

    leads_fs = Localization(args.output)
    files_fs = file_locations_fs(args.subject)
    if not os.path.isfile(files_fs["coord_t1"]) or not os.path.isfile(
            files_fs["fs_orig_t1"]):
        print '\n\nCoregistration not available\n'
        exit(1)
    leads_fs = build_leads_fs(files_fs, leads_fs)

    leads_fs.to_json(args.output + '_fs')
    print '\n\nStage 1: json file saved with freesurfer space coordinates\n'
    """
    Add test for localization info
    """
    leads_loc = Localization(args.output + '_fs')
    files_loc = file_locations_loc(args.subject)
    if not os.path.isfile(files_loc["native_loc"]):
コード例 #10
0
ファイル: main.py プロジェクト: StarkitRobots/kondo_mv
# robotHeight = 0.37 #[m]
ROBOT_HEIGHT = 0.42

clock = time.clock()
sensor.reset()
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QVGA)
sensor.set_auto_exposure(False)
sensor.set_auto_whitebal(False)
sensor.skip_frames(time=2000)
sensor.set_auto_gain(False, gain_db=0)
sensor.set_auto_whitebal(False, (-6.02073, -5.11, 1.002))
sensor.set_auto_exposure(False, 2000)

vision = Vision({})
loc = Localization(-0.7, -1.3, math.pi / 2, side)
strat = Strategy()
motion = Motion()
model = Model()
vision_postprocessing = Vision_postprocessing()
vision.load_detectors("vision/detectors_config.json")
with open("calibration/cam_col.json") as f:
    calib = json.load(f)

# setting model parametrs
mass1 = [0, 0, 0, 0, 0, 0]
mass2 = [0, 0]
model.setParams(calib["cam_col"], ROBOT_HEIGHT, mass1, mass2)

#motion.move_head()
#model.updateCameraPanTilt(0, -math.pi/6)
コード例 #11
0
    :param triangles: Nx3 matrix containing the indices of the connected vertices
    """
    mayavi.mlab.triangular_mesh(points[:, 0],
                                points[:, 1],
                                points[:, 2],
                                triangles,
                                opacity=.4,
                                color=(.7, .7, .7))


subject = sys.argv[1]
loc_file = sys.argv[2]

files = file_locations(subject)

loc = Localization(loc_file)
contacts = loc.get_contacts()

coords = loc.get_contact_coordinates('fs', contacts)
coords_corrected = loc.get_contact_coordinates('fs', contacts, 'corrected')

plot_coordinates(coords)
plot_coordinates(coords_corrected, (0, 0, 1))

p, f = load_surface(files['surface_l'])
p2, f2 = load_surface(files['surface_r'])
plot_surface(p2, f2)
plot_surface(p, f)

mayavi.mlab.show()