예제 #1
0
def test_ecef_lla_consistency():
    lla_before = [46.5274109,6.5722075, 402.16]
    ecef = geo.ecef_from_lla(lla_before[0],
                             lla_before[1],
                             lla_before[2])
    lla_after = geo.lla_from_ecef(ecef[0],
                                  ecef[1],
                                  ecef[2])
    assert np.allclose(lla_after, lla_before)
예제 #2
0
파일: odm_sfm.py 프로젝트: CosmosHua/GLD
def dedup_gcp(GPS, EPS=0.01):
    from opensfm.geo import ecef_from_lla
    res = {}
    with open(GPS + '/gcp_list.txt') as f:
        gcp = f.readlines()
    for i in gcp[1:]:  # skip 1st-row
        *lla, u, v, im = i.split()
        k = (u, v, im)
        lla = np.float64(lla)[[1, 0, 2]]  # lat,lon,alt
        res.setdefault(k, [])
        res[k].append(lla)
    new = [gcp[0]]  # retain 1st-row
    for k in sorted(res, key=lambda x: x[-1]):
        m = np.mean(res[k], axis=0)  # first mean()
        d = np.float64([ecef_from_lla(*i) for i in res[k]])
        d = np.linalg.norm(d - ecef_from_lla(*m), axis=1)
        #d = res[k]-m; d[:,:2] *= 1E5 # degree->meter
        if np.all(d < EPS): new += [GCF([*m[[1, 0, 2]], *k])]
    with open(GPS + '/gcp_list.txt', 'w') as f:
        f.writelines(new)
    INFO(f'Dedup_GCPs: {len(gcp)} -> {len(new)}')
    return new
예제 #3
0
파일: photo.py 프로젝트: originlake/ODM
    def compute_opk(self):
        if self.has_ypr() and self.has_geo():
            y, p, r = math.radians(self.yaw), math.radians(
                self.pitch), math.radians(self.roll)

            # Ref: New Calibration and Computing Method for Direct
            # Georeferencing of Image and Scanner Data Using the
            # Position and Angular Data of an Hybrid Inertial Navigation System
            # by Manfred Bäumker

            # YPR rotation matrix
            cnb = np.array([
                [
                    math.cos(y) * math.cos(p),
                    math.cos(y) * math.sin(p) * math.sin(r) -
                    math.sin(y) * math.cos(r),
                    math.cos(y) * math.sin(p) * math.cos(r) +
                    math.sin(y) * math.sin(r)
                ],
                [
                    math.sin(y) * math.cos(p),
                    math.sin(y) * math.sin(p) * math.sin(r) +
                    math.cos(y) * math.cos(r),
                    math.sin(y) * math.sin(p) * math.cos(r) -
                    math.cos(y) * math.sin(r)
                ],
                [
                    -math.sin(p),
                    math.cos(p) * math.sin(r),
                    math.cos(p) * math.cos(r)
                ],
            ])

            # Convert between image and body coordinates
            # Top of image pixels point to flying direction
            # and camera is looking down.
            # We might need to change this if we want different
            # camera mount orientations (e.g. backward or sideways)

            # (Swap X/Y, flip Z)
            cbb = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1]])

            delta = 1e-7

            alt = self.altitude if self.altitude is not None else 0.0
            p1 = np.array(
                ecef_from_lla(self.latitude + delta, self.longitude, alt))
            p2 = np.array(
                ecef_from_lla(self.latitude - delta, self.longitude, alt))
            xnp = p1 - p2
            m = np.linalg.norm(xnp)

            if m == 0:
                log.ODM_WARNING("Cannot compute OPK angles, divider = 0")
                return

            # Unit vector pointing north
            xnp /= m

            znp = np.array([0, 0, -1]).T
            ynp = np.cross(znp, xnp)

            cen = np.array([xnp, ynp, znp]).T

            # OPK rotation matrix
            ceb = cen.dot(cnb).dot(cbb)

            self.omega = math.degrees(math.atan2(-ceb[1][2], ceb[2][2]))
            self.phi = math.degrees(math.asin(ceb[0][2]))
            self.kappa = math.degrees(math.atan2(-ceb[0][1], ceb[0][0]))
예제 #4
0
def test_ecef_lla_consistency() -> None:
    lla_before = [46.5274109, 6.5722075, 402.16]
    ecef = geo.ecef_from_lla(lla_before[0], lla_before[1], lla_before[2])
    lla_after = geo.lla_from_ecef(ecef[0], ecef[1], ecef[2])
    assert np.allclose(lla_after, lla_before)
예제 #5
0
def space_next_point(a, b, last, dx):
    A = geo.ecef_from_lla(a[1], a[2], 0.0)
    B = geo.ecef_from_lla(b[1], b[2], 0.0)
    C = geo.ecef_from_lla(last[1], last[2], 0.0)
    alpha = segment_sphere_intersection(A, B, C, dx)
    return gpx_lerp(alpha, a, b)
예제 #6
0
def space_next_point(a, b, last, dx):
    A = geo.ecef_from_lla(a[1], a[2], 0.)
    B = geo.ecef_from_lla(b[1], b[2], 0.)
    C = geo.ecef_from_lla(last[1], last[2], 0.)
    alpha = segment_sphere_intersection(A, B, C, dx)
    return gpx_lerp(alpha, a, b)
예제 #7
0
    def extract_opk(self, geo):
        opk = None

        if self.has_xmp() and geo and "latitude" in geo and "longitude" in geo:
            ypr = np.array([None, None, None])

            try:
                # YPR conventions (assuming nadir camera)
                # Yaw: 0 --> top of image points north
                # Yaw: 90 --> top of image points east
                # Yaw: 270 --> top of image points west
                # Pitch: 0 --> nadir camera
                # Pitch: 90 --> camera is looking forward
                # Roll: 0 (assuming gimbal)

                if ("@Camera:Yaw" in self.xmp[0]
                        and "@Camera:Pitch" in self.xmp[0]
                        and "@Camera:Roll" in self.xmp[0]):
                    ypr = np.array([
                        float(self.xmp[0]["@Camera:Yaw"]),
                        float(self.xmp[0]["@Camera:Pitch"]),
                        float(self.xmp[0]["@Camera:Roll"]),
                    ])
                elif ("@drone-dji:GimbalYawDegree" in self.xmp[0]
                      and "@drone-dji:GimbalPitchDegree" in self.xmp[0]
                      and "@drone-dji:GimbalRollDegree" in self.xmp[0]):
                    ypr = np.array([
                        float(self.xmp[0]["@drone-dji:GimbalYawDegree"]),
                        float(self.xmp[0]["@drone-dji:GimbalPitchDegree"]),
                        float(self.xmp[0]["@drone-dji:GimbalRollDegree"]),
                    ])
                    ypr[1] += 90  # DJI's values need to be offset
            except ValueError:
                logger.debug(
                    'Invalid yaw/pitch/roll tag in image file "{0:s}"'.format(
                        self.fileobj_name))

            if np.all(ypr) is not None:
                ypr = np.radians(ypr)

                # Convert YPR --> OPK
                # Ref: New Calibration and Computing Method for Direct
                # Georeferencing of Image and Scanner Data Using the
                # Position and Angular Data of an Hybrid Inertial Navigation System
                # by Manfred Bäumker
                y, p, r = ypr

                # YPR rotation matrix
                cnb = np.array([
                    [
                        np.cos(y) * np.cos(p),
                        np.cos(y) * np.sin(p) * np.sin(r) -
                        np.sin(y) * np.cos(r),
                        np.cos(y) * np.sin(p) * np.cos(r) +
                        np.sin(y) * np.sin(r),
                    ],
                    [
                        np.sin(y) * np.cos(p),
                        np.sin(y) * np.sin(p) * np.sin(r) +
                        np.cos(y) * np.cos(r),
                        np.sin(y) * np.sin(p) * np.cos(r) -
                        np.cos(y) * np.sin(r),
                    ],
                    [-np.sin(p),
                     np.cos(p) * np.sin(r),
                     np.cos(p) * np.cos(r)],
                ])

                # Convert between image and body coordinates
                # Top of image pixels point to flying direction
                # and camera is looking down.
                # We might need to change this if we want different
                # camera mount orientations (e.g. backward or sideways)

                # (Swap X/Y, flip Z)
                cbb = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1]])

                delta = 1e-7

                p1 = np.array(
                    ecef_from_lla(
                        geo["latitude"] + delta,
                        geo["longitude"],
                        geo.get("altitude", 0),
                    ))
                p2 = np.array(
                    ecef_from_lla(
                        geo["latitude"] - delta,
                        geo["longitude"],
                        geo.get("altitude", 0),
                    ))
                xnp = p1 - p2
                m = np.linalg.norm(xnp)

                if m == 0:
                    logger.debug("Cannot compute OPK angles, divider = 0")
                    return opk

                # Unit vector pointing north
                xnp /= m

                znp = np.array([0, 0, -1]).T
                ynp = np.cross(znp, xnp)

                cen = np.array([xnp, ynp, znp]).T

                # OPK rotation matrix
                ceb = cen.dot(cnb).dot(cbb)

                opk = {}
                opk["omega"] = np.degrees(np.arctan2(-ceb[1][2], ceb[2][2]))
                opk["phi"] = np.degrees(np.arcsin(ceb[0][2]))
                opk["kappa"] = np.degrees(np.arctan2(-ceb[0][1], ceb[0][0]))

        return opk