def transform_coverage_to_coordinates(coverage_list: list) -> list: """ Takes a list of read depths where the list index is equal to the read position + 1 and returns a list of (x, y) coordinates. The coordinates will be simplified using Visvalingham-Wyatt algorithm if the list exceeds 100 pairs. :param coverage_list: a list of position-indexed depth values :return: a list of (x, y) coordinates """ previous_depth = coverage_list[0] coordinates = {(0, previous_depth)} last = len(coverage_list) - 1 for i, depth in enumerate(coverage_list): if depth != previous_depth or i == last: coordinates.add((i - 1, previous_depth)) coordinates.add((i, depth)) previous_depth = depth coordinates = sorted(list(coordinates), key=lambda x: x[0]) if len(coordinates) > 100: return vw.simplify(coordinates, ratio=0.4) return coordinates
def testMinimumRingSize(self): """ Polygon rings with < 4 points are invalid in geojson. """ coordinates = [[0.0, 0.0], [1.1, 0], [2.1, 3], [4.1, 5], [1.1, 2], [5.1, 2], [0.0, 0.0]] a = vw.simplify(coordinates, threshold=1.5) assert a[0] == [0, 0] assert a[-1] == [0, 0] assert len(a) <= len(coordinates) assert len(coordinates) >= 4
def testSimplifyIntegerCoords(self): # coordinates are in the shape # # c # b d # a e # # so b and d are eliminated a, b, c, d, e = (0, 0), (1, 1), (2, 2), (3, 1), (4, 0) inp = [a, b, c, d, e] expected_output = np.array([a, c, e]) actual_output = vw.simplify(inp, threshold=0.001) self.assertTrue(np.array_equal(actual_output, expected_output))
def initialize(overwriteLinks=False): print("Lade Landkreise herunter. Das kann etwas dauern.") #openURL = urllib.request.urlopen('https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=GEN,Shape__Length,cases7_per_100k,RS,BL,BEZ&outSR=4326&f=json') openURL = urllib.request.urlopen( 'https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=*&outSR=4326&f=json' ) if (openURL.getcode() == 200): data = json.loads(openURL.read()) neueKreise = [] for kreis in data["features"]: coordinates = [] for ring in kreis["geometry"]["rings"]: a = vw.simplify(ring, ratio=0.1) for pair in a: tmp = pair[0] pair[0] = pair[1] pair[1] = tmp coordinates.append(a) if not districts.query.get(int(kreis["attributes"]["RS"])): dnew = districts( int(kreis["attributes"]["RS"]), kreis["attributes"]["GEN"], int(kreis["attributes"]["cases7_per_100k"]), coloring(int(kreis["attributes"]["cases7_per_100k"])), coordinates, kreis["attributes"]["BEZ"]) dnew.region = createRegionIfNotExists( kreis["attributes"]["BL"]) db.session.add(dnew) db.session.flush() neueKreise.append(int(kreis["attributes"]["RS"])) # Links zu den Kreisen erstellen mainLinks(neueKreise, overwrite=overwriteLinks) # Default - Categorien erstellen getOrmakeCategory("Kontaktbestimmungen", is_OFP=True, weight=-300, force=True) getOrmakeCategory("Geschäfte", is_OFP=True, weight=-200, force=True) #getOrmakeCategory("Private Feiern", is_OFP = True, weight =-100, force=True) getOrmakeCategory("Bußgelder", is_OFP=True, weight=200, force=True) getOrmakeCategory("Impf-Informationen", is_OFP=True, weight=300, force=True) db.session.flush() else: print("ERROR loading Landkreise")
def initialize(): print("Lade Landkreise herunter. Das kann etwas dauern.") openURL = urllib.request.urlopen( 'https://services7.arcgis.com/mOBPykOjAyBO2ZKk/arcgis/rest/services/RKI_Landkreisdaten/FeatureServer/0/query?where=1%3D1&outFields=GEN,Shape__Length,cases7_per_100k,RS,BL,BEZ&outSR=4326&f=json' ) if (openURL.getcode() == 200): data = json.loads(openURL.read()) neueKreise = [] for kreis in data["features"]: coordinates = [] for ring in kreis["geometry"]["rings"]: a = vw.simplify(ring, ratio=0.1) for pair in a: tmp = pair[0] pair[0] = pair[1] pair[1] = tmp coordinates.append(a) if not districts.query.get(int(kreis["attributes"]["RS"])): dnew = districts( int(kreis["attributes"]["RS"]), kreis["attributes"]["GEN"], int(kreis["attributes"]["cases7_per_100k"]), coloring(int(kreis["attributes"]["cases7_per_100k"])), [], coordinates, kreis["attributes"]["BEZ"]) dnew.region = createRegionIfNotExists( kreis["attributes"]["BL"]) db.session.add(dnew) db.session.flush() neueKreise.append(int(kreis["attributes"]["RS"])) with open('links.json') as f: data = json.load(f) for d in data: if d["id"] in neueKreise: dist = districts.query.get(d["id"]) dist.links = [{ "href": d["link"], "title": "Zur Webseite des Kreises" }] if not users.query.filter(users.username == "cms").first( ): #TODO: REMOVE THIS FOR PRODUCTION!!!! (and add hashing) u = users("cms", "pw") db.session.add(u) db.session.commit() else: print("ERROR loading Landkreise")
def step_processCoordinates(self, _ZONES): print('Processing coordinates...') for idg, ZONE_BOUNDARIES in enumerate(_ZONES): lonLat = [] coordinates = ZONE_BOUNDARIES["coordinates"] self.IDS.append(ZONE_BOUNDARIES["properties"]["postalcode"]) className = "region__" + ZONE_BOUNDARIES["properties"][ "region"] + "--" + str(idg) self.CLASSES.append(className) ##------------------------------------------------------------------ Reduce Method 2 #print len( coordinates ) if len(coordinates) <= 70: _ratio = 0.25 elif len(coordinates) > 70 and len(coordinates) < 150: _ratio = 0.15 elif len(coordinates) >= 150 and len(coordinates) < 500: _ratio = 0.05 elif len(coordinates) >= 500 and len(coordinates) < 1000: _ratio = round( Decimal(float(len(coordinates) / 2) / float(10000)), 4) else: _ratio = round( Decimal(float(len(coordinates)) / float(100000)), 4) ZONE_BOUNDARIES_REDUCED = vw.simplify(coordinates, ratio=_ratio) # 0.10 # ----------------------------------------------------------------------------------- Start Calculate Map for idc, countyBoundary in enumerate(ZONE_BOUNDARIES_REDUCED): xy = self.mapCoorToVector(countyBoundary) self.minXY.x = xy.x if (self.minXY.x == -1) else min( self.minXY.x, xy.x) self.minXY.y = xy.y if (self.minXY.y == -1) else min( self.minXY.y, xy.y) lonLat.append(xy) self.countyBoundaries.append(lonLat) print('Processing coordinates ready')
def coverage_to_coordinates(coverage_list): previous_depth = coverage_list[0] coordinates = {(0, previous_depth)} last = len(coverage_list) - 1 for i, depth in enumerate(coverage_list): if depth != previous_depth or i == last: coordinates.add((i - 1, previous_depth)) coordinates.add((i, depth)) previous_depth = depth coordinates = sorted(list(coordinates), key=lambda x: x[0]) if len(coordinates) > 100: return vw.simplify(coordinates, ratio=0.4) return coordinates
def testSimplifyTupleLike(self): Point = namedtuple("Point", ("x", "y")) # coordinates are in the shape # # c # b d # a e # # so b and d are eliminated a, b, c, d, e = Point(0, 0), Point(1, 1), Point(2, 2), Point(3, 1), Point(4, 0) inp = [a, b, c, d, e] expected_output = np.array([a, c, e]) actual_output = vw.simplify(inp, threshold=0.001) self.assertTrue(np.array_equal(actual_output, expected_output))
def step_processCoordinates(self, _GEOMETRIES): for idg, ZONE_BOUNDARIES in enumerate(_GEOMETRIES): lonLat = [] print 'Geometries ' + str(idg) coordinates = ZONE_BOUNDARIES["coordinates"] self.IDS.append(ZONE_BOUNDARIES["properties"]["postalcode"]) className = "region__" + ZONE_BOUNDARIES["properties"][ "region"] + "--" + str(idg) self.CLASSES.append(className) ##------------------------------------------------------------------ Reduce Method 2 if len(coordinates) < 50: _ratio = 0.15 elif len(coordinates) >= 50 and len(coordinates) < 200: _ratio = 0.05 elif len(coordinates) >= 200 and len(coordinates) < 1000: _ratio = round( Decimal(float(len(coordinates) / 2) / float(10000)), 4) # (714, 0.0714, 51) -> (714, 0.014, x) else: _ratio = round( Decimal(float(len(coordinates)) / float(100000)), 4) ZONE_BOUNDARIES_REDUCED = vw.simplify(coordinates, ratio=_ratio) # 0.10 # ----------------------------------------------------------------------------------- Start Calculate Map for idc, countyBoundary in enumerate(ZONE_BOUNDARIES_REDUCED): xy = self.mapCoorToVector(countyBoundary) self.minXY.x = xy.x if (self.minXY.x == -1) else min( self.minXY.x, xy.x) self.minXY.y = xy.y if (self.minXY.y == -1) else min( self.minXY.y, xy.y) lonLat.append(xy) self.countyBoundaries.append(lonLat)
def __init__(self, IMAGE_FILE_PATH, IMAGE_FILE_PATH_PNG, IMAGE_WIDTH_IN_PX, IMAGE_HEIGHT_IN_PX, GEOMETRIES): MINIMUM_IMAGE_PADDING_IN_PX = 50 QUARTERPI = pi / 4.0 dwg = svgwrite.Drawing(IMAGE_FILE_PATH, profile='tiny', size=(IMAGE_WIDTH_IN_PX, IMAGE_HEIGHT_IN_PX)) minXY = Vector(-1, -1) maxXY = Vector(-1, -1) countyBoundaries = [] IDS = [] CLASSES = [] for idg, ZONE_BOUNDARIES in enumerate(GEOMETRIES): lonLat = [] print 'Geometries ' + str(idg) coordinates = ZONE_BOUNDARIES["coordinates"] IDS.append(ZONE_BOUNDARIES["properties"]["postalcode"]) className = "region__" + ZONE_BOUNDARIES["properties"][ "region"] + "--" + str(idg) CLASSES.append(className) ##------------------------------------------------------------------ Reduce Method 1 # numPoints = len( ZONE_BOUNDARIES ) # if numPoints <= 20: # jump = 1 # elif numPoints > 20 and numPoints <= 50: # jump = 4 # elif numPoints > 50 and numPoints <= 100: # jump = 8 # elif numPoints > 100 and numPoints <= 250: # jump = 12 # elif numPoints > 250 and numPoints <= 500: # jump = 20 # elif numPoints > 500 and numPoints <= 750: # jump = 75 # elif numPoints > 750 and numPoints <= 1200: # jump = 90 # else: # jump = 110 #ZONE_BOUNDARIES_REDUCED = [ ZONE_BOUNDARIES[e] for e in range( 0, numPoints, jump )] # print( numPoints, jump, len( ZONE_BOUNDARIES_REDUCED ) ) ##------------------------------------------------------------------ Reduce Method 2 if len(coordinates) < 50: _ratio = 0.15 elif len(coordinates) >= 50 and len(coordinates) < 200: _ratio = 0.05 elif len(coordinates) >= 200 and len(coordinates) < 1000: _ratio = round( Decimal(float(len(coordinates) / 2) / float(10000)), 4) # (714, 0.0714, 51) -> (714, 0.014, x) else: _ratio = round( Decimal(float(len(coordinates)) / float(100000)), 4) ZONE_BOUNDARIES_REDUCED = vw.simplify(coordinates, ratio=_ratio) # 0.10 print(len(coordinates), _ratio, len(ZONE_BOUNDARIES_REDUCED)) # ----------------------------------------------------------------------------------- Start Calculate Map for idc, countyBoundary in enumerate(ZONE_BOUNDARIES_REDUCED): longitude = countyBoundary[0] * pi / 180 latitude = countyBoundary[1] * pi / 180 xy = Vector(0, 0) xy.x = longitude xy.y = log(tan(QUARTERPI + 0.5 * latitude)) minXY.x = xy.x if (minXY.x == -1) else min(minXY.x, xy.x) minXY.y = xy.y if (minXY.y == -1) else min(minXY.y, xy.y) lonLat.append(xy) countyBoundaries.append(lonLat) for lonLatList in countyBoundaries: for point in lonLatList: point.x = point.x - minXY.x point.y = point.y - minXY.y maxXY.x = point.x if (maxXY.x == -1) else max( maxXY.x, point.x) maxXY.y = point.y if (maxXY.y == -1) else max( maxXY.y, point.y) paddingBothSides = MINIMUM_IMAGE_PADDING_IN_PX * 2 mapWidth = IMAGE_WIDTH_IN_PX - paddingBothSides mapHeight = IMAGE_HEIGHT_IN_PX - paddingBothSides mapWidthRatio = mapWidth / maxXY.x mapHeightRatio = mapHeight / maxXY.y globalRatio = min(mapWidthRatio, mapHeightRatio) heightPadding = (IMAGE_HEIGHT_IN_PX - (globalRatio * maxXY.y)) / 2 widthPadding = (IMAGE_WIDTH_IN_PX - (globalRatio * maxXY.x)) / 2 def drawShape(pointsList, polygonConfig): print ' Drawing shape' pathStr = "M " + str(int(pointsList[0][0])) + ',' + str( int(pointsList[0][1])) + " " for idx, coor in enumerate(pointsList): if idx == 0: continue pathStr += "L " pathStr += str(int(coor[0])) + ',' + str(int(coor[1])) + " " pathStr += "Z" objPath = dwg.path(d=pathStr, fill='#ffffff', stroke='#000000', stroke_width=1, id=polygonConfig["id"], class_=polygonConfig["class"]) #objPath.stroke( opacity=0.4 ) return objPath #DOCU: https://svgwrite.readthedocs.io/en/latest/classes/base.html for idl, lonLatList in enumerate(countyBoundaries): polygonPoints = [] polygonConfig = { "id": "region_" + str(IDS[idl]), "class": CLASSES[idl] } for point in lonLatList: adjustedX = int(widthPadding + (point.x * globalRatio)) adjustedY = int(IMAGE_HEIGHT_IN_PX - heightPadding - (point.y * globalRatio)) polygonPoints.append([adjustedX, adjustedY]) shape = drawShape(polygonPoints, polygonConfig) dwg.add(shape) dwg.save()
def test3dCoords(self): coordinates = [[0.0, 0.0, 0.0], [1.1, 0, 1], [2.1, 3, 0], [4.1, 5, 10], [1.1, 2, 0], [5.1, 2, 0]] a = vw.simplify(coordinates) assert a[0] == [0, 0, 0] assert len(a) <= len(coordinates)
def line_simplify(coordinates: list, area_threshold_im_meters: float): # https://github.com/Permafacture/Py-Visvalingam-Whyatt/blob/master/polysimplify.py # https://pypi.org/project/visvalingamwyatt/ # https://hull-repository.worktribe.com/preview/376364/000870493786962263.pdf return vw.simplify(coordinates, threshold=area_threshold_im_meters)
def test3dCoords(self): coordinates = [[0.0, 0.0, 0.0], [1.1, 0, 1], [2.1, 3, 0], [4.1, 5, 10], [1.1, 2, 0], [5.1, 2, 0]] a = vw.simplify(coordinates) self.assertEqual(a[0], [0, 0, 0]) self.assertLessEqual(len(a), len(coordinates))
def load_data(self): platform = db.session.query(Platform).get(self.platform) self.name = platform.unique_id # First get the variable st0 = db.session.query(Station).filter( Station.platform == platform).first() datatype_keys = db.session.query(db.func.distinct( Sample.datatype_key)).filter(Sample.station == st0).all() datatypes = db.session.query(DataType).filter( DataType.key.in_(datatype_keys)).order_by(DataType.key).all() variables = [datatypes[int(x)] for x in self.trackvariables] self.data_names = [dt.name for dt in variables] self.data_units = [dt.unit for dt in variables] self.track_cmaps = [ colormap.find_colormap(dt.name) for dt in variables ] d = [] for v in variables: d.append( get_platform_variable_track( db.session, platform, v.key, self.track_quantum, starttime=self.starttime, endtime=self.endtime, )) d = np.array(d) self.points = d[0, :, 1:3].astype(float) add_tz_utc = np.vectorize(lambda x: x.replace(tzinfo=pytz.UTC)) self.times = add_tz_utc(d[0, :, 0]) self.data = d[:, :, 4].astype(float) self.depth = d[0, :, 3].astype(float) d_delta = [ distance(p0, p1).km for p0, p1 in zip(self.points[0:-1], self.points[1:]) ] d_delta.insert(0, 0) self.distances = np.cumsum(d_delta) start = int( datetime_to_timestamp(self.times[0], self.dataset_config.time_dim_units)) end = int( datetime_to_timestamp(self.times[-1], self.dataset_config.time_dim_units)) points_simplified = self.points if len(self.points) > 100: points_simplified = np.array(vw.simplify(self.points, number=100)) if len(self.variables) > 0: with open_dataset(self.dataset_config, timestamp=start, endtime=end, variable=self.variables, nearest_timestamp=True) as dataset: # Make distance -> time function dist_to_time = interp1d( self.distances, [time.mktime(t.timetuple()) for t in self.times], assume_sorted=True, bounds_error=False, ) output_times = dist_to_time( np.linspace(0, self.distances[-1], 100)) model_times = sorted([ time.mktime(t.timetuple()) for t in dataset.nc_data.timestamps ]) self.model_depths = dataset.depths d = [] depth = 0 for v in self.variables: if len(np.unique(self.depth)) > 1: pts, dist, md, dep = dataset.get_path_profile( points_simplified, v, int( datetime_to_timestamp( dataset.nc_data.timestamps[0], self.dataset_config.time_dim_units)), endtime=int( datetime_to_timestamp( dataset.nc_data.timestamps[-1], self.dataset_config.time_dim_units)), ) if len(model_times) > 1: f = interp1d( model_times, md.filled(np.nan), assume_sorted=True, bounds_error=False, ) ot = dist_to_time(dist) od = f(ot).diagonal(0, 0, 2).copy() else: od = md # Clear model data beneath observed data od[np.where(self.model_depths > max(self.depth) )[0][1:], :] = np.nan d.append(od) mt = [ int( datetime_to_timestamp( t, self.dataset_config.time_dim_units)) for t in dataset.nc_data.timestamps ] model_dist = dist else: pts, dist, mt, md = dataset.get_path( self.points, depth, v, datetime_to_timestamp( dataset.nc_data.timestamps[0], self.dataset_config.time_dim_units), endtime=datetime_to_timestamp( dataset.nc_data.timestamps[-1], self.dataset_config.time_dim_units), times=output_times) model_dist = dist if len(model_times) > 1: f = interp1d( model_times, md, assume_sorted=True, bounds_error=False, ) d.append(np.diag(f(mt))) else: d.append(md) model_data = np.ma.array(d) variable_units = [] variable_names = [] scale_factors = [] cmaps = [] for v in self.variables: vc = self.dataset_config.variable[v] variable_units.append(vc.unit) variable_names.append(vc.name) scale_factors.append(vc.scale_factor) cmaps.append(colormap.find_colormap(vc.name)) for idx, sf in enumerate(scale_factors): model_data[idx, :] = np.multiply(model_data[idx, :], sf) self.model_data = model_data self.model_dist = model_dist self.model_times = list( map(datetime.datetime.utcfromtimestamp, model_times)) self.variable_names = variable_names self.variable_units = variable_units self.cmaps = cmaps