def getValues(self): """ Geometry representing the values taken by the temporal value. """ values = [inst._value for inst in self._instantList] result = values[0] if len(values) == 1 else LineString(values) return result
def test_tgeompointseq_accessors(cursor, expected_tgeompointseq): assert TGeomPointSeq(expected_tgeompointseq).srid == 4326 assert TGeomPointSeq(expected_tgeompointseq).tempSubtype() == 'Sequence' assert TGeomPointSeq(expected_tgeompointseq).getValues == LineString( [Point(10.0, 10.0), Point(20.0, 20.0), Point(10.0, 10.0)]) assert TGeomPointSeq(expected_tgeompointseq).startValue == Point( 10.0, 10.0) assert TGeomPointSeq(expected_tgeompointseq).endValue == Point(10.0, 10.0) assert TGeomPointSeq(expected_tgeompointseq).getTime == PeriodSet( '{[2019-09-01 00:00:00+01, 2019-09-03 00:00:00+01]}') assert TGeomPointSeq(expected_tgeompointseq).duration == timedelta(2) assert TGeomPointSeq(expected_tgeompointseq).timespan == timedelta(2) assert TGeomPointSeq(expected_tgeompointseq).period == Period( '[2019-09-01 00:00:00+01, 2019-09-03 00:00:00+01]') assert TGeomPointSeq(expected_tgeompointseq).numInstants == 3 assert TGeomPointSeq( expected_tgeompointseq).startInstant == TGeomPointInst( 'Point(10.0 10.0)@2019-09-01 00:00:00+01') assert TGeomPointSeq(expected_tgeompointseq).endInstant == TGeomPointInst( 'Point(10.0 10.0)@2019-09-03 00:00:00+01') assert TGeomPointSeq(expected_tgeompointseq).instantN(2) == TGeomPointInst( 'Point(20.0 20.0)@2019-09-02 00:00:00+01') assert TGeomPointSeq(expected_tgeompointseq).instants == \ [TGeomPointInst('Point(10.0 10.0)@2019-09-01 00:00:00+01'), TGeomPointInst('Point(20.0 20.0)@2019-09-02 00:00:00+01'), TGeomPointInst('Point(10.0 10.0)@2019-09-03 00:00:00+01')] assert TGeomPointSeq(expected_tgeompointseq).numTimestamps == 3 assert TGeomPointSeq(expected_tgeompointseq).startTimestamp == parse( '2019-09-01 00:00:00+01') assert TGeomPointSeq(expected_tgeompointseq).endTimestamp == parse( '2019-09-03 00:00:00+01') assert TGeomPointSeq(expected_tgeompointseq).timestampN(2) == parse( '2019-09-02 00:00:00+01') assert TGeomPointSeq(expected_tgeompointseq).timestamps == [ parse('2019-09-01 00:00:00+01'), parse('2019-09-02 00:00:00+01'), parse('2019-09-03 00:00:00+01') ] assert TGeomPointSeq(expected_tgeompointseq).intersectsTimestamp( parse('2019-09-01 00:00:00+01')) == True assert TGeomPointSeq(expected_tgeompointseq).intersectsTimestamp( parse('2019-09-04 00:00:00+01')) == False assert TGeomPointSeq(expected_tgeompointseq).intersectsTimestampSet( TimestampSet( '{2019-09-01 00:00:00+01, 2019-09-02 00:00:00+01}')) == True assert TGeomPointSeq(expected_tgeompointseq).intersectsTimestampSet( TimestampSet( '{2019-09-04 00:00:00+01, 2019-09-05 00:00:00+01}')) == False assert TGeomPointSeq(expected_tgeompointseq).intersectsPeriod( Period('[2019-09-01 00:00:00+01, 2019-09-02 00:00:00+01]')) == True assert TGeomPointSeq(expected_tgeompointseq).intersectsPeriod( Period('[2019-09-04 00:00:00+01, 2019-09-05 00:00:00+01]')) == False assert TGeomPointSeq(expected_tgeompointseq).intersectsPeriodSet( PeriodSet( '{[2019-09-01 00:00:00+01, 2019-09-02 00:00:00+01]}')) == True assert TGeomPointSeq(expected_tgeompointseq).intersectsPeriodSet( PeriodSet( '{[2019-09-04 00:00:00+01, 2019-09-05 00:00:00+01]}')) == False
async def test_linestring_should_round(connection, expected): geom = LineString(expected, srid=4326) await connection.execute('INSERT INTO linestring_async (geom) VALUES ($1)', geom) geom = await connection.fetchval( 'SELECT geom FROM linestring_async WHERE ' 'geom=$1', geom, column=0) assert geom.coords == expected
async def get_relation(**tags): if "iso" in tags: tags["ISO3166-1:alpha2"] = tags.pop("iso") tags = "".join(f'["{k}"="{v}"]' for k, v in tags.items()) path = Path("tmp/boundary") path.mkdir(parents=True, exist_ok=True) file_ = ( tags.replace("/", "_") .replace("][", "_") .replace('"', "") .replace(":", "_") .replace("[", "") .replace("]", "") + ".json" ) path = path / file_ if not path.exists(): print(f"Downloading {path}") params = {"data": f"[out:json];relation{tags};(._;>;);out body;"} try: resp = requests.get(OVERPASS, params=params) resp.raise_for_status() except requests.exceptions.ConnectionError: print(f"\nError: Network problem retrieving data") sys.exit(1) except requests.exceptions.HTTPError as err: print(f"\nHTTPError: {err}") sys.exit(1) data = resp.content with path.open("wb") as f: f.write(data) data = data.decode() else: with path.open() as f: data = f.read() try: relation = overpy.Result.from_json(json.loads(data)).relations[0] except IndexError: raise ValueError(f"Cannot find relation for {tags}") collection = [] for member in relation.members: coords = [] # Nepal disputed way without outer role: # http://www.openstreetmap.org/way/202061325 if member.role != "outer" and member.ref != 202_061_325: continue way = member.resolve() for node in way.nodes: coords.append((float(node.lon), float(node.lat))) collection.append(LineString(coords)) shape = await make_polygon(MultiLineString(collection)) return shape
def process_surface(ride, accelerations): raw_coords = ride.raw_coords timestamps = ride.timestamps first_five_seconds = [] for X, Y, Z, ts, coords in accelerations: if ts - accelerations[0][3] > timedelta(seconds=5): break first_five_seconds.append((X, Y, Z)) A5 = [sum(y) / len(y) for y in zip(*first_five_seconds) ] # get average of accelerations in first 5 seconds of ride sliding_window_size = 5 # seconds per direction every_x_seconds = 2.5 current_max_idx = 0 IRI = [] ride_sections = [] in_window = [] for i in range(len(raw_coords)): current = raw_coords[i] ls = raw_coords[i:i + 2] ts = timestamps[i] while in_window and ts - in_window[0][3] > timedelta( seconds=sliding_window_size): in_window.remove(in_window[0]) while current_max_idx < len(accelerations) and accelerations[ current_max_idx][3] - ts < timedelta( seconds=sliding_window_size): A = accelerations[current_max_idx][:-2] d = math.sqrt(A5[0]**2 + A5[1]**2 + A5[2]**2) if d == 0: d = 1 avs = (A[0] * A5[0] + A[1] * A5[1] + A[2] * A5[2]) / d if len(in_window) > 1: dt = (in_window[-1][3] - in_window[-2][3]).total_seconds() else: dt = 0.1 in_window.append(accelerations[current_max_idx] + (abs(avs * dt**2 / 2), )) current_max_idx += 1 if len(in_window) < 2: continue Sh = great_circle(in_window[0][4], in_window[-1][4]).meters if Sh > 0: iri = sum(list(zip(*in_window))[5]) / Sh if not (IRI and (ts - IRI[-1][1]).total_seconds() < every_x_seconds): IRI.append((iri, ts, current, Sh)) if len(ls) == 2: ride_sections.append((LineString(ls, srid=4326), iri)) return IRI, ride_sections
def test_tgeompointseqset_accessors(cursor, expected_tgeompointseqset): assert TGeomPointSeqSet(expected_tgeompointseqset).srid == 4326 assert TGeomPointSeqSet(expected_tgeompointseqset).tempSubtype() == 'SequenceSet' assert TGeomPointSeqSet(expected_tgeompointseqset).getValues == \ GeometryCollection([Point(10.0, 10.0), LineString([Point(20.0, 20.0), Point(30.0, 30.0)])]) assert TGeomPointSeqSet(expected_tgeompointseqset).startValue == Point(10.0, 10.0) assert TGeomPointSeqSet(expected_tgeompointseqset).endValue == Point(30.0, 30.0) # assert TGeomPointSeqSet(expected_tgeompointseqset).valueRange == geompointrange(Point(10.0, 10.0), Point(30.0, 30.0), upper_inc=True) assert TGeomPointSeqSet(expected_tgeompointseqset).getTime == PeriodSet( '{[2019-09-01 00:00:00+01, 2019-09-01 00:00:00+01],[2019-09-02 00:00:00+01, 2019-09-03 00:00:00+01]}') assert TGeomPointSeqSet(expected_tgeompointseqset).duration == timedelta(1) assert TGeomPointSeqSet(expected_tgeompointseqset).timespan == timedelta(2) assert TGeomPointSeqSet(expected_tgeompointseqset).period == Period('[2019-09-01 00:00:00+01, 2019-09-03 00:00:00+01]') assert TGeomPointSeqSet(expected_tgeompointseqset).numInstants == 3 assert TGeomPointSeqSet(expected_tgeompointseqset).startInstant == TGeomPointInst('Point(10.0 10.0)@2019-09-01 00:00:00+01') assert TGeomPointSeqSet(expected_tgeompointseqset).endInstant == TGeomPointInst('Point(30.0 30.0)@2019-09-03 00:00:00+01') assert TGeomPointSeqSet(expected_tgeompointseqset).instantN(2) == TGeomPointInst('Point(20.0 20.0)@2019-09-02 00:00:00+01') assert TGeomPointSeqSet(expected_tgeompointseqset).instants == [TGeomPointInst('Point(10.0 10.0)@2019-09-01 00:00:00+01'), TGeomPointInst('Point(20.0 20.0)@2019-09-02 00:00:00+01'), TGeomPointInst('Point(30.0 30.0)@2019-09-03 00:00:00+01')] assert TGeomPointSeqSet(expected_tgeompointseqset).numTimestamps == 3 assert TGeomPointSeqSet(expected_tgeompointseqset).startTimestamp == parse('2019-09-01 00:00:00+01') assert TGeomPointSeqSet(expected_tgeompointseqset).endTimestamp == parse('2019-09-03 00:00:00+01') assert TGeomPointSeqSet(expected_tgeompointseqset).timestampN(2) == parse('2019-09-02 00:00:00+01') assert TGeomPointSeqSet(expected_tgeompointseqset).timestamps == [parse('2019-09-01 00:00:00+01'), parse('2019-09-02 00:00:00+01'), parse('2019-09-03 00:00:00+01')] assert TGeomPointSeqSet(expected_tgeompointseqset).numSequences == 2 assert TGeomPointSeqSet(expected_tgeompointseqset).startSequence == TGeomPointSeq('[Point(10.0 10.0)@2019-09-01 00:00:00+01]') assert TGeomPointSeqSet(expected_tgeompointseqset).endSequence == TGeomPointSeq( '[Point(20.0 20.0)@2019-09-02 00:00:00+01, Point(30.0 30.0)@2019-09-03 00:00:00+01]') assert TGeomPointSeqSet(expected_tgeompointseqset).sequenceN(2) == TGeomPointSeq( '[Point(20.0 20.0)@2019-09-02 00:00:00+01, Point(30.0 30.0)@2019-09-03 00:00:00+01]') assert TGeomPointSeqSet(expected_tgeompointseqset).sequences == [TGeomPointSeq('[Point(10.0 10.0)@2019-09-01 00:00:00+01]'), TGeomPointSeq('[Point(20.0 20.0)@2019-09-02 00:00:00+01, Point(30.0 30.0)@2019-09-03 00:00:00+01]')] assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsTimestamp(parse('2019-09-01 00:00:00+01')) == True assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsTimestamp(parse('2019-09-04 00:00:00+01')) == False assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsTimestampSet( TimestampSet('{2019-09-01 00:00:00+01, 2019-09-02 00:00:00+01}')) == True assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsTimestampSet( TimestampSet('{2019-09-04 00:00:00+01, 2019-09-05 00:00:00+01}')) == False assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsPeriod( Period('[2019-09-01 00:00:00+01, 2019-09-02 00:00:00+01]')) == True assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsPeriod( Period('[2019-09-04 00:00:00+01, 2019-09-05 00:00:00+01]')) == False assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsPeriodSet( PeriodSet('{[2019-09-01 00:00:00+01, 2019-09-02 00:00:00+01]}')) == True assert TGeomPointSeqSet(expected_tgeompointseqset).intersectsPeriodSet( PeriodSet('{[2019-09-04 00:00:00+01, 2019-09-05 00:00:00+01]}')) == False
async def get_relation(conn, **tags): if 'iso' in tags: tags['ISO3166-1:alpha2'] = tags.pop('iso') tags = "".join(f'["{k}"="{v}"]' for k, v in tags.items()) path = Path('tmp/boundary') path.mkdir(parents=True, exist_ok=True) file_ = tags.replace('/', '_').replace('][', '_').replace('"', '').replace( ':', '_').replace('[', '').replace(']', '') + '.json' path = path / file_ if not path.exists(): params = {'data': f'[out:json];relation{tags};(._;>;);out body;'} try: resp = requests.get(OVERPASS, params=params) resp.raise_for_status() except requests.exceptions.ConnectionError as err: print(f'\nError: Network problem retrieving data') sys.exit(1) except requests.exceptions.HTTPError as err: print(f'\nHTTPError: {err}') sys.exit(1) data = resp.content with path.open('wb') as f: f.write(data) data = data.decode() else: with path.open() as f: data = f.read() try: relation = overpy.Result.from_json(json.loads(data)).relations[0] except IndexError: raise ValueError(f'Cannot find relation for {tags}') collection = [] for member in relation.members: coords = [] # Nepal disputed way without outer role: # http://www.openstreetmap.org/way/202061325 if member.role != 'outer' and member.ref != 202061325: continue way = member.resolve() for node in way.nodes: coords.append((float(node.lon), float(node.lat))) collection.append(LineString(coords)) shape = await make_polygon(conn, MultiLineString(collection)) return shape
from postgis import Point, LineString, Polygon, GeometryCollection POLYGON = Polygon(( ((1, 2), (3, 4), (5, 6), (1, 2)), ((2, 3), (4, 5), (6, 7), (2, 3)) )) COLLECTION = [ Point(1, 2), LineString(((1, 2), (3, 4))), POLYGON ] def test_geometrycollection_geojson(): collection = GeometryCollection(COLLECTION) assert collection.geojson == { "type": "GeometryCollection", "geometries": [ {'type': 'Point', 'coordinates': (1, 2)}, {'type': 'LineString', 'coordinates': ((1, 2), (3, 4))}, {'type': 'Polygon', 'coordinates': ( ((1, 2), (3, 4), (5, 6), (1, 2)), ((2, 3), (4, 5), (6, 7), (2, 3)) )}, ] } def test_geometrycollection_get_item(): collection = GeometryCollection(COLLECTION)
import pytest from postgis import Point, LineString, Polygon, GeometryCollection pytestmark = pytest.mark.asyncio POLYGON = Polygon( (((1, 2), (3, 4), (5, 6), (1, 2)), ((2, 3), (4, 5), (6, 7), (2, 3)))) COLLECTION = [Point(1, 2), LineString(((1, 2), (3, 4))), POLYGON] async def test_geometrycollection_should_round(connection): geom = GeometryCollection(COLLECTION, srid=4326) await connection.execute( 'INSERT INTO geometrycollection_async (geom) ' 'VALUES ($1)', geom) geom = await connection.fetchval( 'SELECT geom ' 'FROM geometrycollection_async ' 'WHERE geom=$1', geom, column=0) assert geom == COLLECTION
def test_linestring_geojson(): line = LineString(((1, 2), (3, 4))) assert line.geojson == { "type": "LineString", "coordinates": ((1, 2), (3, 4)) }
def test_linestring_get_item(): line = LineString(((30, 10), (10, 30), (40, 40))) assert line[0] == (30, 10)
def test_linestring_geojson_as_string(): line = LineString(((1, 2), (3, 4))) geojson = str(line.geojson) assert '"type": "LineString"' in geojson assert '"coordinates": [[1.0, 2.0], [3.0, 4.0]]' in geojson
def test_linestring_should_round(cursor, expected): params = [LineString(expected, srid=4326)] cursor.execute('INSERT INTO linestring (geom) VALUES (%s)', params) cursor.execute('SELECT geom FROM linestring WHERE geom=%s', params) geom = cursor.fetchone()[0] assert geom.coords == expected
def test_multilinestring_get_item(): multi = MultiLineString((((30, 10), (10, 30)), ((40, 10), (10, 40)))) assert multi[0] == LineString(((30, 10), (10, 30)))
def run(self): print "my_thread_id:", self.threadID #conn = psycopg2.connect(PSQL_URI) #cur = conn.cursor() register(self.cur) for car_id in self.car_list: file_name = self.file_path + 'one_car_traj_' + str( car_id) + '.geojson' if os.path.exists(file_name): if car_id % 100 == 0: print "car_id:", car_id, "," with open(file_name) as f: data = json.load(f) insert_data = [] insert_string_block = "INSERT INTO " + table_name + \ "(unix_time, car_id, time_string, osm_id, gid, percent, unix_time_pre, gid_pre, pick_or_drop, speed,date, the_geom) " + \ "VALUES " for feature in data['features']: if len(feature['geometry']['coordinates']) > 1: try: dist = feature['properties']['Dist'] except: continue time_current = feature['properties']['ABSTIME'] time_current_string = feature['properties']['TIME'] osm_id = feature['properties']['OSM_ID'] gid_current = feature['properties']['GID'] percent_current = feature['properties']['Percent'] #percent_pre = feature['properties']['Percent_pre'] gid_pre = feature['properties']['GID_pre'] pick_or_drop = feature['properties']['Pick_or_drop'] time_pre = feature['properties']["ABS_TIME_PRE"] coors = feature['geometry']['coordinates'] line_seg = [] for item in coors: line_seg.append(tuple(item)) insert_string = "INSERT INTO " + table_name + \ "(unix_time, car_id, time_string, osm_id, gid, percent, unix_time_pre, gid_pre, pick_or_drop, speed,date, the_geom) " + \ "VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);" """ insert_data = (time_current,car_id,time_current_string,osm_id,gid_current, percent_current,time_pre,gid_pre,pick_or_drop,dist/(time_current - time_pre),date, LineString(line_seg, srid=4326)) """ insert_data.append( (time_current, car_id, time_current_string, osm_id, gid_current, percent_current, time_pre, gid_pre, pick_or_drop, dist / (time_current - time_pre), date, LineString(line_seg, srid=4326))) if len(insert_data) > 0: try: """ self.cur.execute(insert_string,insert_data) self.conn.commit() """ args_str = ','.join( self.cur.mogrify( "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", x) for x in insert_data) self.cur.execute(insert_string_block + args_str) self.conn.commit() except: # Rollback in case there is any error e, t, tb = sys.exc_info() print "caught", e, t, tb self.conn.rollback() print "Insert done! Thread id = ", self.threadID self.cur.close() self.conn.close()
def test_geom_should_compare_with_coords(): assert ((30, 10), (10, 30), (40, 40)) == LineString( ((30, 10), (10, 30), (40, 40))) # noqa
def handle_ride(data, filename, cur, phone_loc, incident_locs): data = csv.DictReader(data[1:], delimiter=",") raw_coords = [] accuracies = [] timestamps = [] accelerations = [] for row in data: if row["lat"]: raw_coords.append([float(row["lon"]), float(row["lat"])]) try: if row["acc"]: if float(row["acc"]) > 100.0: # ride goes to trash print("Ride is filtered due to accuracies > 100m") return accuracies.append(float(row["acc"])) except KeyError: return timestamps.append(datetime.utcfromtimestamp(int(row["timeStamp"]) / 1000)) # timeStamp is in Java TS Format try: if row["X"]: accelerations.append((float(row["X"]), float(row["Y"]), float(row["Z"]), datetime.utcfromtimestamp(int(row["timeStamp"]) / 1000), raw_coords[-1])) except TypeError: return ride = Ride(raw_coords, accuracies, timestamps) if len(ride.raw_coords) == 0: print("Ride is filtered due to len(coords) == 0") return if is_teleportation(ride.timestamps): print("Ride is filtered due to teleportation") return IRI, ride_sections_surface = surface_quality_service.process_surface(ride, accelerations) ride_sections_velocity = velocity_service.process_velocity(ride) ride = filters.apply_smoothing_filters(ride) if filters.apply_removal_filters(ride): return map_matched = map_match_service.map_match(ride) if len(map_matched) == 0: return legs = leg_service.determine_legs(map_matched, cur) leg_service.update_legs(ride, legs, cur, IRI, phone_loc, ride_sections_velocity, incident_locs) stop_service.process_stops(ride, legs, cur) ls = LineString(ride.raw_coords_filtered, srid=4326) filename = filename.split("/")[-1] start = Point(ride.raw_coords_filtered[0], srid=4326) end = Point(ride.raw_coords_filtered[-1], srid=4326) if phone_loc == 1 or phone_loc == "1": # Handlebar print("Phone is on Handlebar, finding road surface quality") try: cur.executemany(""" INSERT INTO public."SimRaAPI_ridesegmentsurface" (geom, score) VALUES (%s, %s) """, ride_sections_surface) except Exception as e: print("Can't create surface ride segments.") raise (e) try: cur.executemany(""" INSERT INTO public."SimRaAPI_ridesegmentvelocity" (geom, velocity) VALUES (%s, %s) """, list(map(lambda x: (LineString(x[0], srid=4326), x[2]), ride_sections_velocity))) except Exception as e: print("Can't create velocity ride segments.") raise (e) try: cur.execute(""" INSERT INTO public."SimRaAPI_ride" (geom, timestamps, legs, filename, "start", "end") VALUES (%s, %s, %s, %s, %s, %s) RETURNING id; """, [ls, timestamps, [i[0] for i in legs], filename, start, end]) ride_id = cur.fetchone()[0] incidents.update_ride_ids([i[2] for i in incident_locs], ride_id, cur) except: print(f"Problem parsing {filename}") raise Exception("Can not parse ride!")