Ejemplo n.º 1
0
    def evaluate(self):
        self.max_alt = 0 
        self.max_sun_alt = -2**30
        self.min_range = 2**30
        self.eclipsed  = 0
        self.measure_points = 0
        for date, obj in tracker.sat_stepper(self.observer, self.obj, self.start, self.end, 10.0 / 86400):
            sun.compute(self.observer)

            self.measure_points += 1
            
            if obj.eclipsed: 
                self.eclipsed += 1
            
            if obj.alt > self.max_alt:
                self.max_alt = obj.alt 
            if sun.alt > self.max_sun_alt:
                self.max_sun_alt = sun.alt
            if obj.range < self.min_range:
                self.min_range = obj.range 
            
        self.shadow_ratio = self.eclipsed * 100 / self.measure_points
        
        total = criteria.Score()
        
        total.add(Pass.shadow_crit, self.shadow_ratio)
        total.add(Pass.max_alt_crit, rad2deg(self.max_alt))
        total.add(Pass.max_sun_alt_crit, rad2deg(self.max_sun_alt))

        self.score = total.score()
Ejemplo n.º 2
0
def inverse_kinematics(x, y, l1, l2):
    solve = []
    # theta2存在两个解
    D = (x**2 + y**2 - l1**2 - l2**2) / (2 * l1 * l2)
    S1 = np.sqrt(1 - D**2)
    S2 = -S1
    a_theta2 = arctan2(S1, D)
    b_theta2 = arctan2(S2, D)
    # 计算theta1
    kesi = np.arctan2(y, x)
    a_theta1 = kesi - arcsin(l2 * sin(pi - a_theta2) / np.sqrt(x**2 + y**2))
    b_theta1 = kesi - arcsin(l2 * sin(pi - b_theta2) / np.sqrt(x**2 + y**2))
    solve.append((utils.rad2deg(a_theta1), utils.rad2deg(a_theta2)))
    solve.append((utils.rad2deg(b_theta1), utils.rad2deg(b_theta2)))
    return solve
def main():
    # points to be followed
    pts = [(-6, -7), (-6, 0), (-4, 6), (0, 5), (0, -2), (-2, -6), (3, -5),
           (3, 6), (6, 4)]

    # generate PATH so the vectors are pointing at each other
    PATH = []
    for i in range(len(pts) - 1):
        dx = pts[i + 1][0] - pts[i][0]
        dy = pts[i + 1][1] - pts[i][1]
        theta = math.atan2(dy, dx)
        PATH.append((pts[i][0], pts[i][1], utils.rad2deg(theta)))
    PATH.append((pts[-1][0], pts[-1][1], 0))

    # or you can also manually set the angles:
    # PATH = [(-5,5,90),(-5,5,-90),(1,4,180), (5,4,0), (6,-3,90), (4,-4,-40),(-2,0,240), \
    #         (-6, -7, 160), (-7,-1,80)]

    # or just generate a random route:
    # PATH = []
    # for _ in range(10):
    #     PATH.append((rd.randint(-7,7), rd.randint(-7,7), rd.randint(0,359)))

    # init turtle
    tesla = turtle.Turtle()
    tesla.speed(0)  # 0: fast; 1: slow, 8.4: cool
    tesla.shape('arrow')
    tesla.resizemode('user')
    tesla.shapesize(1, 1)

    # draw vectors representing points in PATH
    for pt in PATH:
        draw.goto(tesla, pt)
        draw.vec(tesla)

    # draw all routes found
    tesla.speed(0)
    for i in range(len(PATH) - 1):
        paths = rs.get_all_paths(PATH[i], PATH[i + 1])

        for path in paths:
            draw.set_random_pencolor(tesla)
            draw.goto(tesla, PATH[i])
            draw.draw_path(tesla, path)

    # draw shortest route
    tesla.pencolor(1, 0, 0)
    tesla.pensize(3)
    tesla.speed(10)
    draw.goto(tesla, PATH[0])
    path_length = 0
    for i in range(len(PATH) - 1):
        path = rs.get_optimal_path(PATH[i], PATH[i + 1])
        path_length += rs.path_length(path)
        draw.draw_path(tesla, path)

    print("Shortest path length: {} px.".format(int(draw.scale(path_length))))

    turtle.done()
Ejemplo n.º 4
0
def meters_and_degrees_error(xy, theta, xy_predicted, theta_predicted):
    # q1 = q / np.linalg.norm(q)
    # q2 = q_predicted / np.linalg.norm(q_predicted)
    # d = np.abs(np.sum(np.multiply(q1, q2)))
    # orientation_error = 2 * np.arccos(d) * 180 / np.pi

    diff = np.abs(theta - theta_predicted)
    orientation_error = rad2deg(min(np.pi - diff, diff))
    position_error = np.linalg.norm(xy - xy_predicted)
    return position_error, orientation_error
Ejemplo n.º 5
0
    def joint_command_cb(self, joint_desired):
        """ Transform subscribed joint command to dynamixel byte information."""
        i = 0
        while i < 4:
            self.q_desired[i] = joint_desired.data[i]
            dxl_command = int(
                rad2deg(self.q_desired[i]) / self.cfg["DXL_RESOLUTION"] +
                self.cfg["DXL_POS_OFFSET"])
            if dxl_command > self.cfg["CW_LIMIT"]:
                dxl_command = self.cfg["CW_LIMIT"]
            elif dxl_command < self.cfg["CCW_LIMIT"]:
                dxl_command = self.cfg["CCW_LIMIT"]

            self.dxl_goal_position[i] = [
                DXL_LOBYTE(DXL_LOWORD(dxl_command)),
                DXL_HIBYTE(DXL_LOWORD(dxl_command)),
                DXL_LOBYTE(DXL_HIWORD(dxl_command)),
                DXL_HIBYTE(DXL_HIWORD(dxl_command)),
            ]
            i += 1
Ejemplo n.º 6
0
    def __str__(self, form: str = "r") -> "ComplexNumber":
        i = float(re.sub(r"^[+\-]", "", str(self.i)))
        s = "-" if self.i < 0 else "+"

        d = hideDecimal(round(self.distance, 2))
        a = hideDecimal(round(rad2deg(self.angle), 2))

        if (form == "r"):
            space = (" " if self.r and i else "")
            r = (hideDecimal(self.r) if self.r else "") + space
            i = (s if (r or s == "-") and i else
                 "") + space + (hideDecimal(i) + self.sign if i else "")
            return r + i
        elif (form == "a"):
            return f"{d} / {a}°"
        elif (form == "p"):
            return f"{d} * (cos({a}°) + {self.sign} * sin({a}°))"
        elif (form == "e"):
            return f"{d} * e ^ ({self.sign} * {a}°)"
        else:
            raise TypeError(
                "Unknown number form. Supported r = rectangular, a = angle, p = polar, e = exponential"
            )
Ejemplo n.º 7
0
        frame = np.int0(frame)

        # Get the 2 main angles of the frame
        angles = ut.get_angles(frame)

        rect2 = (rect[0], rect[1], angles[-1])
        img_croped = ut.crop_min_area_rect(color, rect2)
        cv2.imshow("Cropped", img_croped)
        cv2.waitKey(0)

        # As we scale the image, we need to scale back the contour to the original image
        frame_orig = list(
            map(lambda x: ut.rescale(x, size_ini, size_end), frame))

        # Save data to export
        frames.append([ut.rad2deg(angles[-1]), frame_orig])

    # Save the data in a pickle file
    ut.bbox_to_pkl(frames, fname='frames', folder='pkl')
    """
    ##################################  TASK 3: Filter keypoints  ####################################
    """
    # Check for training database file
    if not os.path.exists(PICKLE_MUSEUM_DATASET):
        logger.info("Creating pickle database for museum dataset...")
        db_museum = ut.create_db(TRAIN_MUSEUM_DIR, FEATURES, candidates)
        ut.save_db(db_museum, PICKLE_MUSEUM_DATASET)
    else:
        logger.info("Reading pickle database for museum dataset...")
        db_museum = ut.get_db(PICKLE_MUSEUM_DATASET)
Ejemplo n.º 8
0
            
        self.shadow_ratio = self.eclipsed * 100 / self.measure_points
        
        total = criteria.Score()
        
        total.add(Pass.shadow_crit, self.shadow_ratio)
        total.add(Pass.max_alt_crit, rad2deg(self.max_alt))
        total.add(Pass.max_sun_alt_crit, rad2deg(self.max_sun_alt))

        self.score = total.score()
    
    def shadow_ratio(self):
        return 

if __name__ == '__main__':
    from data import reto, iss
    sun = ephem.Sun()
    
    observer = reto
    sat = iss
           
    for start, end in risings(observer, sat, ephem.now(), ephem.now() + 14):
        p = Pass(sat, observer, start, end)
    
        if p.score > 70:
            start = localtime(p.start)
            end = localtime(p.end)
            print "%s - %s: sh=%i alt=%i sun=%i score=%i%%" % \
                (start.strftime("%Y-%m-%d %H:%M:%S"), end.strftime("%H:%M:%S"), \
                p.shadow_ratio, rad2deg(p.max_alt), rad2deg(p.max_sun_alt), p.score)
Ejemplo n.º 9
0
from utils import rad2deg


from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np

from data import reto, iss

lats = []
longs = []

start = ephem.now()
for date, o in tracker.sat_stepper(reto, iss,  start, start+ 1. /  24):
    lats.append(rad2deg(o.sublat))
    longs.append(rad2deg(o.sublong))



# set up orthographic map projection with
# perspective of satellite looking down at 50N, 100W.
# use low resolution coastlines.
# don't plot features that are smaller than 1000 square km.
map = Basemap(projection='robin',lat_0=50,lon_0=-100,
              resolution='l',area_thresh=1000.)
# draw coastlines, country boundaries, fill continents.
map.drawcoastlines()
map.drawcountries()
map.fillcontinents(color='coral')
# draw the edge of the map projection region (the projection limb)
Ejemplo n.º 10
0
    def update(self, id, x, y, theta):
        lat, lng = PosePlotter.utm2latlng(x, y)
        
        pose = self.poses[id]
        lats, lngs = pose["lats"], pose["lngs"]
        data = pose["data"]
        # label = pose["label"]
        data.set_visible(True)

        
        
        if self.trajectory:
            lats.append(lat)
            lngs.append(lng)
            i = len(lats)
            data.set_xdata([lngs[: (i + 1)]])
            data.set_ydata([lats[: (i + 1)]])
        else:
            marker_reference = pose["marker"]
            new_marker = marker_reference.transformed(matplotlib.transforms.Affine2D().rotate_deg(rad2deg(theta)))
            data.set_marker(new_marker)
            data.set_xdata([lng])
            data.set_ydata([lat])
Ejemplo n.º 11
0
        default = 1
    )

    args = args_parser.parse_args()
    UPDATE_INTERVAL = args.update_interval


    plotter = PosePlotter(update_interval = UPDATE_INTERVAL, trajectory = False)
    plotter.register("GT", "red")
    # plotter.register("x", "blue") # TEST

    data = PerceptionCarDatasetMerged(
        "PerceptionCarDataset",
        "PerceptionCarDataset2",
        mode = "visualize",
        preprocess = None,
        augment = False
    )

    for image, pose in data:
        x, y, theta = PerceptionCarDataset.unnormalize(*pose)
        lat, lng = PosePlotter.utm2latlng(x, y)
        azimuth = rad2deg(-theta.item())
        print(lat, lng, azimuth)
        plotter.update("GT", x, y, theta)
        plotter.draw()
        # plotter.update("x", x + 10, y + 10, theta + 3.14) # TEST
        image_cv = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR)
        cv2.imshow("front/center", image_cv)
        cv2.waitKey(1)
Ejemplo n.º 12
0
plotter = PosePlotter(update_interval=0.05, trajectory=True)

dataset_path = "PerceptionCarDataset"
os.makedirs(dataset_path, exist_ok=True)

# Write origin to a file
origin_path = os.path.join(dataset_path, "origin.txt")
with open(origin_path, "wt", encoding="utf-8") as origin_file:
    x, y, z = images_db.origin.numpy()
    print(x, y, z, file=origin_file)

for i, (image, R_camera, T_camera, R_final_inv, T_final_inv, origin,
        image_path) in enumerate(images_loader):
    a1, a2, theta = euler_from_matrix(R_camera[0][0])
    if visualize:
        azimuth = rad2deg(-theta)
        X = origin + T_camera
        x, y, z = X[0][0][0].numpy()
        lat, lng = PosePlotter.utm2latlng(x, y)
        print(lat, lng, azimuth)
        plotter.update(x, y)
        show_image(image, "Image")
        cv2.waitKey(1)
    else:
        # Save the image as JPEG
        directory, image_name = os.path.split(image_path[0])
        images_directory, _ = os.path.split(directory)
        final_directory, _ = os.path.split(images_directory)
        jpeg_image_name = image_name.replace(".png", ".jpeg")
        dest_path = os.path.join(dataset_path, final_directory)
        os.makedirs(dest_path, exist_ok=True)