def updateSubId(subId, verifytoken): """ Updates webhook subscriptions table with the new subscription id provided by Strava then updates all athletes with the new subID foreign key. @param subId: Int. Webhook subscription ID provided by Strava API @param verifytoken: String. Script generated verification token @return: Nothing. Database is updated """ session = Session() try: application.logger.debug( f"Updating record with the following token: {verifytoken}") # Update recently created record which only has the verify token populated # application.logger.debug(session.query(webhook_subs.verify_token == verifytoken).first()) insQ = session.query(webhook_subs).filter( webhook_subs.verify_token == verifytoken).update({ webhook_subs.sub_id: subId, webhook_subs.activesub: "Yes" }) session.commit() # Get the primary key from the new webhook subscription record = session.query(webhook_subs).filter( webhook_subs.verify_token == verifytoken).first() # Update all athletes with the new subscription entry foreign key session.query(athletes).update({athletes.sub_id: record.id}) session.commit() session.close() except Exception as e: application.logger.debug( f"Update Strava athlete sub Id failed with the exception: {e}") errorEmail.sendErrorEmail(script=updateSubId.__name__, exceptiontype=e.__class__.__name__, body=e)
def deleteVerifyTokenRecord(token): """ Deletes script generated Strava webhook verification token from database, called when an existing webhook is removed @param token: String. Strava verify token generated in script @return: Nothing """ # Open session session = Session() session.query(webhook_subs).filter( webhook_subs.verify_token == token).delete() session.commit() session.close()
def getAuth(): """ Loads Strava client authentication details from Postgres and creates a authorized client instance. Checks if access token is expired, if so it is refreshed and updated. Returns ------- client. Stravalib model client instance. Contains access token to Strava API for the athlete, ID is hard coded for now. """ # Build empty stravalib client instance client = Client() # create db session session = Session() # Hard coded athlete id athleteID = 7170058 authDict = {} # Load tokens and expiration time from Postgres query = session.query(athletes).filter(athletes.athlete_id == athleteID) for i in query: authDict["Access_Token"] = i.access_token authDict["Expiration"] = i.access_token_exp authDict["Refresh_Token"] = i.refresh_token application.logger.debug(f"Auth token details are: {authDict}") # Check if access token has expired, if so request a new one and update Postgres if time.time() > authDict["Expiration"]: application.logger.debug("Access token has expired, refreshing") refresh_response = client.refresh_access_token( client_id=int(os.environ.get('STRAVA_CLIENT_ID')), client_secret=os.environ.get('STRAVA_CLIENT_SECRET'), refresh_token=authDict["Refresh_Token"]) # Update access token and expiration date session.query(athletes).filter(athletes.athlete_id == athleteID). \ update({athletes.access_token: refresh_response['access_token'], athletes.access_token_exp: refresh_response['expires_at']}) # Commit update session.commit() # Set Strava auth details client.access_token = refresh_response['access_token'] client.refresh_token = authDict["Refresh_Token"] client.token_expires_at = refresh_response['expires_at'] else: application.logger.debug("Access token is fresh, no refresh required") # Access token is up-to-date, set client details client.access_token = authDict["Access_Token"] client.refresh_token = authDict["Refresh_Token"] client.token_expires_at = authDict["Expiration"] # Close out session session.close() return client
def setWebhookInactive(subID): """ Sets provided subscription to inactive status @param subID: Int. @return: Nothing """ # Open session session = Session() # Set active status to No session.query(webhook_subs).filter(webhook_subs.sub_id == subID).update( {webhook_subs.activesub: "No"}) # Commit changes session.commit() # Close out session session.close()
def getHashPass(username): """ Get the hashed password from PostgreSQL database using the username supplied by HTTP Basic Auth. Parameters ---------- username : String Username as provided by user in the auth prompt. Returns ------- res_dict : dictionary keys: username (string, parameter) value: hashed password (string) None (if no matching user in table) A dictionary is returned to maintain connection between username supplied and password. """ session = Session() query = session.query(User.hashpass).filter(User.user == username).all() res_dict = {} for row in query: res_dict[username] = row.hashpass session.close() if len(res_dict) == 0: return None else: return res_dict
def getDist(coordinate1, coordinate2): """ Get the distance between the newest incoming point and the most recent previously recorded point, distance is reported as meters by default by ST_DistanceSphere. Parameters ---------- coordinate:1 String WKT representation of the most recently recorded GPS point coordinates. coordinate2: String WKT representation of the incoming GPS point coordinates. Returns ------- dist : Float Distance in meters between the newest and most recent recorded points. """ session = Session() # Geoalchemy ORM expression res = session.query( sqlfunc.ST_DistanceSphere(sqlfunc.ST_GeomFromText(coordinate1), sqlfunc.ST_GeomFromText(coordinate2))) # coordinate1 = coordinate1 # res = db.session.query(sqlfunc.ST_DistanceSphere(coordinate1,coordinate2)) dist = None for i in res: # Make sure the data comes through as a float, not a ORM object dist = float(i[0]) # Round off number, don't need high precision dist = round(dist, 1) session.close() return dist
def checkmd5(hash, pdfDate): """ Checks if the downloaded PDF's MD5 hash is already in Postgres and returns result. :param hash: String New MD5 hash :param pdfDate: String New PDF Date :return: String "Exists" - Hash is already in Postgres "New" - Hash is not in Postgres and no other hashes exist for the PDF result week "Update" - Hash is not in Postgres but other hashes exist for the PDF result week """ # Query Postgres with pdfDate of newly downloaded PDF session = Session() application.logger.debug( f"Querying water quality MD5 hashes with the date {pdfDate}") query = session.query(waterQualityMD5).filter( waterQualityMD5.pdfdate == pdfDate).all() hashList = [] application.logger.debug(f"Iterating ") for i in query: hashList.append(i.md5) session.close() if hash in hashList: return "Exists" elif len(hashList) == 0: return "New" else: return "Update"
def getBeachWaterQual(): """ Queries Postgres AWS RDS to return the most recent water quality report data for each beach that is tested in SB County. Data are spread across tables with mapped relationships. This query joins the relevant tables and uses "distinct" on the waterQuality beach ID field, selecting only one record per beach, then "order_by" is used on the joined MD5 table to grab only the most recent record per beach. :return: List: Nested lists containing SQL Alchemy query results: 3 query result objects: waterQuality, waterqualityMD5 beaches 1 string: geometry type of associated beach 2 floats: x and y coordinates of the associated beach """ session = Session() records = session.query(waterQuality, waterQualityMD5, beaches, sqlfunc.ST_GeometryType(beaches.geom), sqlfunc.st_x(beaches.geom), sqlfunc.st_y(beaches.geom)) \ .join(waterQualityMD5) \ .join(beaches) \ .distinct(waterQuality.beach_id) \ .order_by(waterQuality.beach_id, waterQualityMD5.insdate.desc()).all() # Can't close session here because these results are used in another function # session.close() return records
def getroles(username): """ Queries PostgreSQL database for user roles using username supplied by Flask HTTP Basic Auth. Parses roles into a list. Parameters ---------- username : String Username as provided by user in the auth prompt. Returns ------- res : List List of strings containing user roles. All roles need to be returned. None if empty results, user not in database. """ session = Session() query = session.query(Roles.roles).filter(Roles.user == username).all() # res += row res = query[0] if len(res) == 0: res = None # Roles are stored as comma seperated strings # Convert result tuple into a list of strings split by commas else: res = res[0].split(",") session.close() return res
def updateExistingActivity(update): """ Updates existing activity in database, currently only handles activity title updates. @param update: Stravalib update instance @return: Nothing. """ # Get object ID objectID = update.object_id # Get new activity title, if applicable newTitle = update.updates['title'] session = Session() # use SQL alchemy to update existing feature title session.query(strava_activities).filter(strava_activities.actID == objectID). \ update({strava_activities.name: newTitle}) session.commit() session.close()
def update(job_id, name, url): '''修改''' session = Session() job = session.query(Job).get(job_id) job.name = name job.url = url session.commit() session.close()
def removeActivityFromDB(actID): """ Removes a activity from the original and public activities database tables. @param actID: Int. Strava Activity ID @return: Nothing. """ # Open session session = Session() # Delete from masked table session.query(strava_activities_masked).filter( strava_activities_masked.actID == actID).delete() # Delete from original DB table session.query(strava_activities).filter( strava_activities.actID == actID).delete() # Commit changes session.commit() # Close session session.close()
def getActiveSubID(): """ Gets the active Strava webhook subscription ID from the database. @return: Int. Active subscription ID """ session = Session() query = session.query(webhook_subs).filter( webhook_subs.activesub == "Yes").first() session.close() if query: return int(query.sub_id)
def getIntersectingPoints(wktStr): """ Takes an EWKT string of a Strava Activity Stream's latlngs and returns a list of float points which reside within the privacy areas. @param wktStr: String. EWKT representation of Strava Activity Stream latlngs @return: List of strings. Points are returned as WGS 1984 coordinate strings in the format lon,lat """ # geometricProj = 32610 collectionExtract = 3 # Open session session = Session() # Get coordinates from within privacy zones try: # Create a labled common table expression to query privacy zones geometries collected into a single multi-polygon privacy_cte = session.query( sqlfunc.ST_CollectionExtract(sqlfunc.ST_Collect( AOI.geom), collectionExtract).label("ctelab")).filter( AOI.privacy == "Yes").cte() # points_cte = session.query(sqlfunc.ST_DumpPoints(sqlfunc.st_geomfromewkt(wktStr))) # Take provided EWKT string and convert to GeoAlchemy geometry # lineString = sqlfunc.ST_GeomFromEWKT(wktStr) # application.logger.debug(f"Geoalchemy Geom is: \n{dir(lineString)}") # Get a list of points from the linestring which fall inside the privacy zone # ST_DumpPoints provides a point geometry per iterative loop which is converted to a text representation using As_Text pointQuery = session.query( sqlfunc.ST_AsText( sqlfunc.ST_DumpPoints( sqlfunc.ST_Intersection(sqlfunc.ST_GeomFromEWKT(wktStr), privacy_cte.c.ctelab)).geom)) # pointQuery = session.query(sqlfunc.ST_AsText( # sqlfunc.ST_DumpPoints(sqlfunc.ST_Intersection(sqlfunc.ST_GeomFromEWKT(wktStr), # privacy_cte.c.ctelab)).geom)).filter(privacy_cte.c.ctelab. # ST_Intersects(sqlfunc.ST_GeomFromEWKT(wktStr))) coordinateList = [] for i in pointQuery: # application.logger.debug(f"Point query response is: {i}") # strip out the WKT parts of the coordinates, only want list of [lon,lat] coordinateList.append(formatPointResponse(i)) finally: session.close() return coordinateList
def checkathleteID(athID): """ Checks if the provided actID is already within the database. @param actID: Int. Strava Activity ID @return: String. "True" or "False" depending if record exists in databse """ # Open session session = Session() # Query database record = session.query(athletes.athlete_id == athID).first() session.close() return record
def getSubIdList(): """ Gets list of subscription webhook IDs from database. Check if used, likely can delete Returns ------- List. Subscription webhook IDs (Int) stored in database. """ session = Session() query = session.query(athletes).all() subIdList = [] for i in query: subIdList.append(i.sub_id) session.close() return subIdList
def getAthleteList(): """ Gets list of athlete IDs from database. Returns ------- List. Athlete IDs (int) stored in database. """ session = Session() query = session.query(athletes).all() athleteList = [] for i in query: athleteList.append(i.athlete_id) session.close() return athleteList
def checkVerificationToken(token): """ Verifies that the provided verification token is in the database. Used as part Strava Webhook subscription callback verification and setup process. Only needed on setup, further POST requests won't contain the token. @param token: String. Strava verify token generated in script @return: Instance of webhook sub model if exists, None otherwise """ # Open session session = Session() # Query database, get most recent record in case the token is in the database multiple times record = session.query(webhook_subs.verify_token == token).order_by( webhook_subs.id.desc()).first() session.close() if record: return record
def checkAthleteAndSub(athID, subID): """ Checks if provided athlete and subscription ID are in the database with an active subscription status @param athID: Int. Strava athlete ID @param subID: Int. Strava Webhook Subscription ID @return: Object Instance. Instance of Athletes Model with results """ # Open session session = Session() # Query database record = session.query(athletes). \ join(webhook_subs). \ filter(athletes.athlete_id == athID, webhook_subs.sub_id == subID, webhook_subs.activesub == "Yes").first() session.close() return record
def AOIIntersection(geomdat): """ Issues a SQLAlchemy/GeoAlchemy intersection query against the AOI PostGIS table and returns a string of results. The AOI table contains hand digitized locations of interest. Currently only single POI results are returned, however this will change as POIs are added. Parameters ---------- geomdat : String WKT representation of the incoming GPS point coordinates. Returns ------- result : String AOI results as a string with individual results comma seperated or None in case of empty result. A comma seperated string is returned for easier processing and insertion into dictionary created by the handleTrackerQueries function. """ session = Session() # SQLAlchemy and GeoAlchemy SQL query query = session.query(AOI).filter(AOI.geom.ST_Intersects(geomdat)) # Get the size of the result, used for building out the string result. query_count = 0 for i in query: query_count += 1 if query_count > 0: result = "" count = 0 # Iterate over SQL Alchemy result object, if greater than 1 result build out with comma seperation. for POI in query: if count > 0: # result object columns can be individually called with their column names, only want location info. result += "," + POI.location count += 1 else: result += POI.location count += 1 else: session.close() return None session.close() return result
def getStandards(): """ Get the state health standards for ocean water quality tests. Returns ------- recDict : Dictionary Dict of State health standards, with the standard name as the keys and values as values. """ session = Session() records = session.query(stateStandards).all() recDict = {} for i in records: recDict[i.Name] = i.StandardMPN session.close() return recDict
def getPathPointRecords(): """ Gets the most recently recorded GPS point, used to check for movement and to generate a GPS track. This entire function may not be needed and can likely be combined with "getrecords". Returns a dictionary with a single top level key and a nested dictionary of record details, kept logic for multiple top level keys in case I need to build this function out. Parameters ---------- Returns ------- res_dict : Dictionary Results of gps point query with details as keys. """ session = Session() # records = session.query(gpsdatmodel.id, gpsdatmodel.lat, gpsdatmodel.lon, gpsdatmodel.geom, gpsdatmodel.timeutc, # gpsdatmodel.date). \ # filter(gpsdatmodel.getLocalTime() >= dateTime). \ # order_by(gpsdatmodel.timeutc.desc()).limit(1).all() records = session.query(gpsPointModel.id, gpsPointModel.lat, gpsPointModel.lon, gpsPointModel.geom, gpsPointModel.timeutc, gpsPointModel.date). \ order_by(gpsPointModel.timeutc.desc()).limit(1).all() res_dict = {} row_count = 0 for row in records: row_count += 1 if row_count > 0: for row in records: res_dict[row.id] = { "id": row.id, "lat": row.lat, "lon": row.lon, "utc": row.timeutc, "date": row.date, "geom": row.geom } session.close() return res_dict else: session.close() return None
def countyIntersection(geomdat): """ Issues a SQLAlchemy/GeoAlchemy intersection query against the county PostGIS table and returns a string of results. Parameters ---------- geomdat : String WKT representation of the incoming GPS point coordinates. Returns ------- TYPE String of intersecting city or None if empty result. Should only ever be a single result but logic is included in case of multiple records returned. """ session = Session() query = session.query(CACounty).filter( CACounty.geom.ST_Intersects(geomdat)) query_count = 0 for i in query: query_count += 1 # Logic to create a comma separted string of all results in case multiple counties # are returned, this should not happen under normal circumstances if query_count > 0: result = "" count = 0 for county in query: if count > 0: result += "," + county.name count += 1 else: result += county.name count += 1 else: session.close() return "Out of State!" session.close() return result
def getNullBeaches(pdfDate): """ Returns list of beaches with null values for the given PDF test week. Only called when a update/re-sample PDF is downloaded. :param pdfDate: String Date of new weekly PDF results :return: List[Strings,] Names of beaches with null test results """ session = Session() query = session.query(waterQuality) \ .join(waterQualityMD5) \ .join(beaches) \ .filter(waterQualityMD5.pdfdate == pdfDate) \ .filter(or_(waterQuality.FecColi == None, waterQuality.Entero == None, waterQuality.TotColi == None)) \ .all() nullbeaches = [] for i in query: nullbeaches.append(i.beach_rel.BeachName) session.close() return nullbeaches
def getBeachResults(beach): """ @param beach: @return: """ session = Session() records = session.query(waterQuality, waterQualityMD5) \ .join(waterQualityMD5) \ .join(beaches) \ .filter(beaches.BeachName == beach) \ .order_by(waterQuality.id.desc()) \ .limit(10) resultDict = {} for i in records: resultDict[i[0].id] = {} resultDict[i[0].id]["status"] = i[0].BeachStatus resultDict[i[0].id]["date"] = i[1].pdfdate.isoformat() resultDict[i[0].id]["s3PDFURL"] = create_presigned_url(i[1].pdfName) # print(i[0].FecColi) session.close() return resultDict
def read(job_id): '''读取''' session = Session() job = session.query(Job).get(job_id) session.close() return job
def get_list(): '''列表''' session = Session() jobs = session.query(Job).order_by(Job.create_time.desc()).all() session.close() return jobs
def getTrackerFeatCollection(datatype, reclimit): """ handleTrackerQueries PostgreSQL using SQLAlchemy and GeoAlchemy functions, returns data formatted as a geoJSON feature collection. All stored attribute information are returned along with the geometries. gpstracks are returned for the current day, using the pytz library to set the today date to the US/Pacific timezone, instead of using the system clock. This enables the map to be accurate for the west coast, where most of the usage of the app will take place. Parameters ---------- datatype: String. Type of data being queried, points or tracks reclimit: Int. Number of gpspoints to return Returns ------- GeoJSON Feature Collection of datatype parameter containing all stored attribute information. """ session = Session() if datatype == "gpspoints": # Query using GeoAlchemy PostGIS function to get geojson representation of geometry and regular query to get # tabular data query = session.query(sqlfunc.ST_AsGeoJSON(gpsPointModel.geom), gpsPointModel).order_by(gpsPointModel.id.desc()).\ limit(reclimit) elif datatype == "gpstracks": # Get timezone and datetime(UTC) from the most recently recorded gpstrack record newestRecordTime = session.query(gpsPointModel.timezone, gpsPointModel.timeutc). \ order_by(gpsPointModel.timeutc.desc()).limit(1).all() recTZ = [] recDateTime = [] for row in newestRecordTime: recTZ.append(row.timezone) recDateTime.append(row.timeutc) # Set time to UTC time zone: # utcTime = recDateTime[0].replace(tzinfo=recTZ[0]) # application.logger.debug(f"Queried time is: {recDateTime[0]}") utcTime = recDateTime[0].replace(tzinfo=pytz.utc) # application.logger.debug(f"UTC time is: {utcTime }") # Convert from utc time to localtime localTime = utcTime.astimezone(pytz.timezone(recTZ[0])) # application.logger.debug(f"Local time time is: {localTime}") # Set local time to start of day startofDayLocal = localTime.replace(hour=0, minute=0, second=0, microsecond=0) # Convert startofDayLocal to UTC time # application.logger.debug(f"Start of day in local is: {startofDayLocal}") startofDayUTC = startofDayLocal.astimezone(pytz.utc) # application.logger.debug(f"Start of day in UTC is: {startofDayUTC}") # Query using GeoAlchemy PostGIS function to get geojson representation of geometry and regular query to get # tabular data records from the start of day in localtime and later are returned # query = session.query(sqlfunc.ST_AsGeoJSON(gpstracks.geom), gpstracks).filter_by(date=todaydate) query = session.query( sqlfunc.ST_AsGeoJSON(gpstracks.geom), gpstracks).filter(gpstracks.timeutc >= startofDayUTC) features = [] for row in query: # Build a dictionary of the attribute information prop_dict = row[1].builddict() # Take ST_AsGeoJSON() result and load as geojson object geojson_geom = geojson.loads(row[0]) # Build the feature and add to feature list features.append(Feature(geometry=geojson_geom, properties=prop_dict)) session.close() # Build the feature collection result feature_collection = FeatureCollection(features) return feature_collection
def createStravaPublicActTopoJSON(): """ Creates a in memory TopoJSON file containing all database stored Strava Activities. This file will be uploaded to a S3 Bucket, replacing the existing file. A pre-generated file is used to speed up response time, as generating the file may take a few seconds. This function is called whenever a new subscription update adds a new activity to the database or when triggered on the admin page. Returns ------- In memory TopoJSON file. """ # Create Postgres connection session = Session() # Query geom as GeoJSON and other attribute information query = session.query(sqlfunc.ST_AsGeoJSON(strava_activities_masked.geom, 5), strava_activities.name, strava_activities.actID, strava_activities.type, strava_activities.distance, strava_activities.private, strava_activities.calories, strava_activities.start_date, strava_activities.elapsed_time, strava_activities.moving_time, strava_activities.average_watts, strava_activities.start_date_local, strava_activities.total_elevation_gain, strava_activities.average_speed, strava_activities.max_speed, strava_activities.type_extended, strava_activities.has_heartrate, strava_activities.average_cadence, strava_activities.max_heartrate, strava_activities.average_heartrate, strava_gear.gear_name) \ .join(strava_activities_masked.act_rel) \ .join(strava_activities.gear_rel, isouter=True) \ .order_by(strava_activities.start_date.desc()) features = [] for row in query: # Build a dictionary of the attribute information propDict = { "name": row.name, "actID": row.actID, "type": row.type, "distance": round(row.distance), "private": row.private, "calories": round(row.calories), "startDate": row.start_date_local.isoformat(), "elapsed_time": row.elapsed_time.seconds, "total_elevation_gain": round(row.total_elevation_gain), "average_speed": round(row.average_speed, 1), "max_speed": row.max_speed, "gear_name": row.gear_name, "type_extended": row.type_extended, "moving_time": row.moving_time.seconds, "average_watts": row.average_watts, "has_heartrate": row.has_heartrate, "average_cadence": row.average_cadence, "max_heartrate": row.max_heartrate, "average_heartrate": row.average_heartrate } # Take ST_AsGeoJSON() result and load as geojson object geojsonGeom = geojson.loads(row[0]) # Build the feature and add to feature list features.append( Feature(geometry=MultiLineString(geojsonGeom), properties=propDict)) session.close() # Build the feature collection result feature_collection = FeatureCollection(features) # Create local topoJSON file of geoJSON Feature Collection. Don't create a topology, doesn't matter for a polyline # and prequantize the data, this reduces file size at the cost of processing time. # prequantize 1e7 is used over default, 1e6, to avoid errors in which data were placed in the South Pacific Ocean return tp.Topology(feature_collection, topology=False, prequantize=10000000).to_json()
def processActivitiesPublic(recordID): """ Processes Strava activity by simplifying geometry and removing private areas. This prepares the activity to be shared publicly on a Leaflet map. These functions greatly reduce the number of vertices, reducing JSON file size, and process the data to be topoJSON friendly, preventing geometries from failing to be converted. SQLAlchemy and GeoAlchemy2 ORM queries are used to do the following: 1. Create a common table expression(CTE) to select privacy zones geometry. This expression selects AOI polygons flagged as privacy zones, combines them into a single multi-part polygon contained inside a geometry. collection(ST_Collect), extracts the multi-polygon from the collection(ST_CollectionExtract), and transforms (ST_transform) the geometry to the projected coordinate system geometricProj. This CTE is used to create a single multi-part polygon containing all privacy zones. This ensures that ST_Difference only calculates the difference between each activity and the privacy zones only once. If the privacy zones are not combined, then the difference between each privacy zone record and the activity would be calculated, resulting in duplicated results. Using a projected coordinate allows for faster geometric calculations and allows for meters to be used in PostGIS function parameters which use the geometry's units. 2. Select strava_activities activity linestring geometry based on Record ID and transform(ST_Transform) to geometricProj. 3. Snap activity linestrings to a 0.0001m grid (ST_SnapToGrid, variant 3). This solves a non-node intersection error when running ST_Difference. See this thread: https://gis.stackexchange.com/q/50399 for explanation for this problem and solution. 4. Calculate difference(ST_Difference) between activity linestring and privacy zone CTE result. ST_Difference subtracts geometry B from A, removing the vertices from A that are within B and segments that touch B. 5. Snap activity linestring vertices to a 5m grid(ST_SnapToGrid, variant 3). This removes some messy areas by combining and removing excess vertices while also reducing resulting geometry memory/file size. This also solves geometric errors when exporting data to a topoJSON format. However, resulting linestring geometries have a step-shaped appearance resembling the grid. 6. Simplify activity linestring with a 15m tolerance(ST_Simplify). This further removes messy areas and bends in the linestring by removing vertices to create longer straight line segments. This provides large reductions in resulting geometry memory/file sizes and mitigates the step-shaped results created by ST_SnapToGrid. 7. Convert linestrings to multi-linestrings(ST_Multi). Geometries in the strava_activities table are stored as linestrings since activity data provided by Strava are contiguous and don't need to be stored in a multi-part format. However, ST_Difference may create multi-linestrings that must be stored as such, so all geometries are converted to this format. 8. Fix any invalid activity linestring geometries(ST_MakeValid) that were generated during prior processing. 9. Transform activity linestring geometry(ST_Transform) back into WGS 1984, SRID 4326. WGS 1984 is best for database storage and required for display in Leaflet. 10. Convert linestring geometry representation to Extended Well Known Binary(ST_AsEWKB). This ensures that data can be be easily inserted into the strava_activities_masked table. 11. Query Activity ID of strava_activities record. Will be inserted as a foreign in strava_activities_masked table. Parameters ---------- recordID. Int. Strava activity record ID. Returns ------- Nothing. Data are processed and committed to PostgresSQL/PostGIS database. """ session = Session() # Simplification tolerance in geometry's units, which is meters here. Higher values more aggressively simplify # geometries simplifyFactor = 15 # Projected coordinate system SRID to transform geometries into. WGS84 UTM 10N is used since most # activities are in within its zone in California. geometricProj = 32610 # SRID of final data product, WGS 1984, to be used in Leaflet webSRID = 4326 # Grid snapping grid size geometry's units, which is meters here. Larger values mean larger cells and greater # vertex snapping gridSnap = 5 # See https://gis.stackexchange.com/a/90271, fixes non-noded intersection error nonNodedSnap = 0.0001 # Extract polygons from geometry collection collectionExtract = 3 # Create CTE to query privacy zone polygons, combine them, extract polygons, and transform to geometricProj privacy_cte = session.query( sqlfunc.ST_Transform( sqlfunc.ST_CollectionExtract(sqlfunc.ST_Collect(AOI.geom), collectionExtract), geometricProj).label("priv_aoi")).filter( AOI.privacy == "Yes").cte("privacy_aoi") if recordID == "All": privacyClipQuery = session.query( strava_activities.actID, sqlfunc.ST_AsEWKB( sqlfunc.ST_Transform( sqlfunc.ST_MakeValid( sqlfunc.ST_Multi( sqlfunc.ST_Simplify( sqlfunc.ST_SnapToGrid( sqlfunc.ST_Difference( sqlfunc.ST_SnapToGrid( sqlfunc.ST_Transform( strava_activities.geom, geometricProj), nonNodedSnap), privacy_cte.c.priv_aoi), gridSnap), simplifyFactor), )), webSRID))) else: privacyClipQuery = session.query(strava_activities.actID, sqlfunc.ST_AsEWKB( sqlfunc.ST_Transform( sqlfunc.ST_MakeValid( sqlfunc.ST_Multi( sqlfunc.ST_Simplify( sqlfunc.ST_SnapToGrid( sqlfunc.ST_Difference( sqlfunc.ST_SnapToGrid(sqlfunc.ST_Transform(strava_activities.geom, geometricProj), nonNodedSnap), privacy_cte.c.priv_aoi) , gridSnap), simplifyFactor), )), webSRID))) \ .filter(strava_activities.actID == recordID) # Iterate over query to process data, add data to strava_activities_masked instance, and add instance to session for i in privacyClipQuery: session.add(strava_activities_masked(actID=i[0], geom=i[1])) session.commit() session.close()