def remove_all_team_members(team_id): """Remove teamId attribute for all users of the team.""" fb_db = firebaseDB() # noqa E841 try: # check if team exist in firebase if not fb_db.reference(f"v2/teams/{team_id}").get(): raise CustomError(f"can't find team in firebase: {team_id}") # get team name from firebase team_name = fb_db.reference(f"v2/teams/{team_id}/teamName").get() # generate random uuid4 token team_members = (fb_db.reference(f"v2/users/").order_by_child( "teamId").equal_to(team_id).get()) # remove teamId attribute for each members if not team_members: logger.info( f"there are no members of the team {team_id} - '{team_name}'") else: for user_id in team_members.keys(): # update data in firebase ref = fb_db.reference(f"v2/users/{user_id}/") ref.update({"teamId": None}) logger.info( f"removed teamId {team_id} - '{team_name}' for user {user_id}" ) logger.info( f"removed all team members from team: {team_id} - '{team_name}'" ) except Exception as e: logger.info(f"could not create team: {team_name}") raise CustomError(e)
def delete_team(team_id): """Delete team in Firebase.""" # TODO: What is the consequence of this on projects and users # do we expect that the teamId is removed there as well? # teamId is removed for users, but not for projects at the moment fb_db = firebaseDB() # noqa E841 try: # check if team exist in firebase if not fb_db.reference(f"v2/teams/{team_id}").get(): raise CustomError(f"can't find team in firebase: {team_id}") # remove all team members remove_all_team_members(team_id) # get team name from firebase team_name = fb_db.reference(f"v2/teams/{team_id}/teamName").get() # check if reference path is valid, e.g. if team_id is None ref = fb_db.reference(f"v2/teams/{team_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""") # delete team in firebase ref.delete() logger.info(f"deleted team: {team_id} - '{team_name}'") except Exception as e: logger.info(f"could not delete team: {team_id}") raise CustomError(e)
def renew_team_token(team_id): """Create new team in Firebase.""" fb_db = firebaseDB() # noqa E841 try: # check if team exist in firebase if not fb_db.reference(f"v2/teams/{team_id}").get(): raise CustomError(f"can't find team in firebase: {team_id}") # get team name from firebase team_name = fb_db.reference(f"v2/teams/{team_id}/teamName").get() # check if reference path is valid ref = fb_db.reference(f"v2/teams/{team_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""") # generate new uuid4 token new_team_token = str(uuid.uuid4()) # set team token in firebase ref.update({"teamToken": new_team_token}) logger.info( f"renewed team token: {team_id} - '{team_name}' - {new_team_token}" ) return new_team_token except Exception as e: logger.info(f"could not delete team: {team_id}") raise CustomError(e)
def save_tutorial(self): """Save the tutorial in Firebase.""" tutorial = vars(self) groups = self.groups tasks = self.tasks tutorial.pop("groups", None) tutorial.pop("tasks", None) tutorial.pop("raw_tasks", None) tutorial.pop("examplesFile", None) tutorial.pop("tutorial_tasks", None) fb_db = auth.firebaseDB() ref = fb_db.reference("") if not self.projectId or self.projectId == "": raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. Project Id is invalid: {self.projectId}""") ref.update({ f"v2/projects/{self.projectId}": tutorial, f"v2/groups/{self.projectId}": groups, f"v2/tasks/{self.projectId}": tasks, }) logger.info(f"uploaded tutorial data to firebase for {self.projectId}") ref = fb_db.reference(f"v2/tutorialDrafts/{self.tutorialDraftId}") ref.set({})
def __init__(self, tile_server_dict): self.name = tile_server_dict.get("name", "bing") # set base url self.url = tile_server_dict.get( "url", auth.get_tileserver_url(tile_server_dict.get("name", "bing"))) if self.url == "": self.url = auth.get_tileserver_url( tile_server_dict.get("name", "bing")) # check if url contains the right place holders if not self.check_imagery_url(): raise CustomError( f"The imagery url {self.url} must contain {{x}}, {{y}} (or {{-y}}) " "and {{z}} or the {quad_key} placeholders.") # set api key self.apiKey = tile_server_dict.get( "apiKey", auth.get_api_key(tile_server_dict.get("name", "bing"))) if self.apiKey == "": self.apiKey = auth.get_api_key(tile_server_dict.get( "name", "bing")) # only needed if tile server is a WMS self.wmtsLayerName = tile_server_dict.get("wmtsLayerName", None) if self.wmtsLayerName == "": self.wmtsLayerName = None self.credits = tile_server_dict.get("credits", "") # currently not used in client and project creation self.captions = tile_server_dict.get("caption", None) self.date = tile_server_dict.get("date", None)
def query_osm(changeset_ids: list, changeset_results): """Get data from changesetId.""" id_string = ",".join(map(str, changeset_ids)) url = OSM_API_LINK + f"changesets?changesets={id_string}" response = retry_get(url) if response.status_code != 200: err = f"osm request failed: {response.status_code}" logger.warning(f"{err}") logger.warning(response.json()) raise CustomError(err) tree = ElementTree.fromstring(response.content) for changeset in tree.iter("changeset"): id = changeset.attrib["id"] username = remove_troublesome_chars(changeset.attrib["user"]) userid = changeset.attrib["uid"] comment = created_by = None for tag in changeset.iter("tag"): if tag.attrib["k"] == "comment": comment = tag.attrib["v"] if tag.attrib["k"] == "created_by": created_by = tag.attrib["v"] changeset_results[int(id)] = { "username": remove_troublesome_chars(username), "userid": userid, "comment": remove_troublesome_chars(comment), "editor": remove_troublesome_chars(created_by), } return changeset_results
def create_user(email, username, password): fb_db = firebaseDB() try: user = auth.create_user(email=email, display_name=username, password=password) ref = fb_db.reference(f"v2/users/{user.uid}/") ref.update({ "username": username, "taskContributionCount": 0, "groupContributionCount": 0, "projectContributionCount": 0, "created": datetime.datetime.utcnow().isoformat()[0:-3] + "Z", # Store current datetime in milliseconds }) logger.info(f"created new user: {user.uid}") return user except Exception as e: logger.info(f"could not create new user {email}.") raise CustomError(e)
def delete_user(email): fb_db = firebaseDB() try: user = auth.get_user_by_email(email) ref = fb_db.reference(f"v2/users/") ref.update({user.uid: None}) auth.delete_user(user.uid) logger.info(f"deleted user {email}") except Exception as e: logger.info(f"could not find user {email} in firebase to delete.") raise CustomError(e)
def update_username(email, username): fb_db = firebaseDB() try: user = auth.get_user_by_email(email) auth.update_user(user.uid, display_name=username) ref = fb_db.reference(f"v2/users/{user.uid}/username") ref.set(username) logger.info(f"updated username for user {email}: {username}") except Exception as e: logger.info(f"could not find user {email} in firebase to update username.") raise CustomError(e)
def set_project_manager_rights(email): fb_db = firebaseDB() # noqa E841 try: user = auth.get_user_by_email(email) auth.set_custom_user_claims(user.uid, {"projectManager": True}) logger.info(f"user {email} has project manager rights.") except Exception as e: logger.info( f"could not find user {email} in firebase to set project manager rights." ) raise CustomError(e)
def tear_down_team_member(user_id): fb_db = firebaseDB() # check if reference path is valid, e.g. if team_id is None ref = fb_db.reference(f"v2/users/{user_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""") # delete team in firebase ref.delete()
def remove_project_manager_rights(email): fb_db = firebaseDB() # noqa E841 try: user = auth.get_user_by_email(email) auth.update_user(user.uid, custom_claims=auth.DELETE_ATTRIBUTE) logger.info(f"user {email} has no project manager rights.") except Exception as e: logger.info( f"could not find user {email} in firebase to remove project manager rights." ) raise CustomError(e)
def remove_user_from_team(email): """Remove teamId attribute for user.""" try: fb_db = firebaseDB() # noqa E841 # get user by email try: user = auth.get_user_by_email(email) except auth.UserNotFoundError: raise CustomError(f"can't find user in firebase: {email}") # get teamId from firebase team_id = fb_db.reference(f"v2/users/{user.uid}/teamId").get() # remove teamId attribute for user in firebase ref = fb_db.reference(f"v2/users/{user.uid}") ref.update({"teamId": None}) # deletes the teamId attribute in firebase logger.info(f"removed teamId {team_id} for user {email} - {user.uid}.") except Exception as e: logger.info(f"could not remove teamId attribute for user.") raise CustomError(e)
def add_user_to_team(email, team_id): """Add teamId attribute for user.""" try: fb_db = firebaseDB() # noqa E841 # check if team exist in firebase if not fb_db.reference(f"v2/teams/{team_id}").get(): raise CustomError(f"can't find team in firebase: {team_id}") # get user by email try: user = auth.get_user_by_email(email) except auth.UserNotFoundError: raise CustomError(f"can't find user in firebase: {email}") # set teamId attribute for user in firebase ref = fb_db.reference(f"v2/users/{user.uid}") ref.update({"teamId": team_id}) logger.info(f"added teamId {team_id} for user {email} - {user.uid}.") except Exception as e: logger.info(f"could not add teamId attribute for user.") raise CustomError(e)
def create_team(team_name): """Create new team in Firebase.""" fb_db = firebaseDB() # noqa E841 try: # generate random uuid4 token team_token = str(uuid.uuid4()) # set data in firebase ref = fb_db.reference(f"v2/teams/") team_ref = ref.push() team_ref.set({"teamName": team_name, "teamToken": team_token}) logger.info( f"created team: {team_ref.key} - '{team_name}' - {team_token}") return team_ref.key, team_token except Exception as e: logger.info(f"could not create team: {team_name}") raise CustomError(e)
def delete_old_groups(project_id): """ Delete old groups for a project """ fb_db = auth.firebaseDB() ref = fb_db.reference(f"/groups/{project_id}") if not re.match(r"/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""") try: ref.delete() except exceptions.InvalidArgumentError: # Data to write exceeds the maximum size that can be modified # with a single request. Delete chunks of data instead. childs = ref.get(shallow=True) for chunk in chunks(list(childs.keys())): ref.update({key: None for key in chunk}) ref.delete()
def save_tutorial(self): """Save the tutorial in Firebase.""" tutorial = vars(self) groups = self.groups tasks = self.tasks tutorial.pop("groups", None) tutorial.pop("tasks", None) tutorial.pop("raw_tasks", None) tutorial.pop("examplesFile", None) tutorial.pop("tutorial_tasks", None) fb_db = auth.firebaseDB() ref = fb_db.reference("") if not self.projectId or self.projectId == "": raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. Project Id is invalid: {self.projectId}""") if self.projectType in [ProjectType.FOOTPRINT.value]: # we compress tasks for footprint project type using gzip compressed_tasks = gzip_str.compress_tasks(tasks) tasks = {"101": compressed_tasks} ref.update({ f"v2/projects/{self.projectId}": tutorial, f"v2/groups/{self.projectId}": groups, f"v2/tasks/{self.projectId}": tasks, }) logger.info(f"uploaded tutorial data to firebase for {self.projectId}") ref = fb_db.reference(f"v2/tutorialDrafts/{self.tutorialDraftId}") ref.set({})
def ohsome(request: dict, area: str, properties=None) -> dict: """Request data from Ohsome API.""" url = OHSOME_API_LINK + request["endpoint"] data = {"bpolys": area, "filter": request["filter"]} if properties: data["properties"] = properties logger.info("Target: " + url) logger.info("Filter: " + request["filter"]) response = requests.post(url, data=data) if response.status_code != 200: err = f"ohsome request failed: {response.status_code}" logger.warning( f"{err} - check for errors in filter or geometries - {request['filter']}" ) logger.warning(response.json()) raise CustomError(err) else: logger.info("Query succesfull.") response = response.json() if properties: response = remove_noise_and_add_user_info(response) return response
def query_osmcha(changeset_ids: list, changeset_results): """Get data from changesetId.""" id_string = ",".join(map(str, changeset_ids)) url = OSMCHA_API_LINK + f"changesets/?ids={id_string}" response = retry_get(url, to_osmcha=True) if response.status_code != 200: err = f"osmcha request failed: {response.status_code}" logger.warning(f"{err}") logger.warning(response.json()) raise CustomError(err) response = response.json() for feature in response["features"]: changeset_results[int(feature["id"])] = { "username": remove_troublesome_chars(feature["properties"]["user"]), "userid": feature["properties"]["uid"], "comment": remove_troublesome_chars(feature["properties"]["comment"]), "editor": remove_troublesome_chars(feature["properties"]["editor"]), } return changeset_results
def archive_project(project_ids: list) -> bool: """ Archive a project. Deletes groups, tasks and results from Firebase. Set status = archived for project in Firebase and Postgres. """ for project_id in project_ids: logger.info(f"Archive project with the id {project_id}") fb_db = auth.firebaseDB() ref = fb_db.reference(f"v2/results/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) try: ref.delete() except exceptions.InvalidArgumentError: # Data to write exceeds the maximum size that can be modified # with a single request. Delete chunks of data instead. childs = ref.get(shallow=True) for chunk in chunks(list(childs.keys())): ref.update({key: None for key in chunk}) ref.delete() ref = fb_db.reference(f"v2/tasks/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) try: ref.delete() except exceptions.InvalidArgumentError: # Data to write exceeds the maximum size that can be modified # with a single request. Delete chunks of data instead. childs = ref.get(shallow=True) for chunk in chunks(list(childs.keys())): ref.update({key: None for key in chunk}) ref.delete() ref = fb_db.reference(f"v2/groups/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) ref.delete() fb_db.reference(f"v2/projects/{project_id}/status").set("archived") pg_db = auth.postgresDB() sql_query = """ UPDATE projects SET status = 'archived' WHERE project_id = %(project_id)s; """ pg_db.query(sql_query, {"project_id": project_id}) return True
def validate_geometries(self): raw_input_file = '{}/input_geometries/raw_input_{}.geojson'.format( DATA_PATH, self.import_key) valid_input_file = '{}/input_geometries/valid_input_{}.geojson'.format( DATA_PATH, self.import_key) if not os.path.isdir('{}/input_geometries'.format(DATA_PATH)): os.mkdir('{}/input_geometries'.format(DATA_PATH)) # download file from given url url = self.info['inputGeometries'] urllib.request.urlretrieve(url, raw_input_file) logging.warning( '%s - __init__ - downloaded input geometries from url and saved as file: %s' % (self.import_key, raw_input_file)) self.info['inputGeometries'] = raw_input_file # open the raw input file and get layer driver = ogr.GetDriverByName('GeoJSON') datasource = driver.Open(raw_input_file, 0) try: layer = datasource.GetLayer() LayerDefn = layer.GetLayerDefn() except AttributeError: raise CustomError('Value error in input geometries file') # create layer for valid_input_file to store all valid geometries outDriver = ogr.GetDriverByName("GeoJSON") # Remove output geojson if it already exists if os.path.exists(valid_input_file): outDriver.DeleteDataSource(valid_input_file) outDataSource = outDriver.CreateDataSource(valid_input_file) outLayer = outDataSource.CreateLayer("geometries", geom_type=ogr.wkbMultiPolygon) for i in range(0, LayerDefn.GetFieldCount()): fieldDefn = LayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) outLayerDefn = outLayer.GetLayerDefn() # check if raw_input_file layer is empty if layer.GetFeatureCount() < 1: err = 'empty file. No geometries provided' logging.warning("%s - check_input_geometry - %s" % (self.import_key, err)) raise Exception(err) # check if the input geometry is a valid polygon for feature in layer: feat_geom = feature.GetGeometryRef() geom_name = feat_geom.GetGeometryName() if not feat_geom.IsValid(): layer.DeleteFeature( feature.GetFID()) # removed geometry from layer logging.warning( "%s - check_input_geometries - deleted invalid feature %s" % (self.import_key, feature.GetFID())) # we accept only POLYGON or MULTIPOLYGON geometries elif geom_name != 'POLYGON' and geom_name != 'MULTIPOLYGON': layer.DeleteFeature( feature.GetFID()) # removed geometry from layer logging.warning( "%s - check_input_geometries - deleted non polygon feature %s" % (self.import_key, feature.GetFID())) else: # Create output Feature outFeature = ogr.Feature(outLayerDefn) # Add field values from input Layer for i in range(0, outLayerDefn.GetFieldCount()): outFeature.SetField( outLayerDefn.GetFieldDefn(i).GetNameRef(), feature.GetField(i)) outFeature.SetGeometry(feat_geom) outLayer.CreateFeature(outFeature) outFeature = None # check if layer is empty if layer.GetFeatureCount() < 1: err = 'no geometries left after checking validity and geometry type.' logging.warning("%s - check_input_geometry - %s" % (self.id, err)) raise Exception(err) del datasource del outDataSource del layer self.info['validInputGeometries'] = valid_input_file logging.warning( '%s - check_input_geometry - filtered correct input geometries and created file: %s' % (self.import_key, valid_input_file)) return True
def save_project(self): """ Creates a projects with groups and tasks and saves it in firebase and postgres Returns ------ Boolean: True = Successful """ logger.info(f"{self.projectId}" f" - start creating a project") # Convert object attributes to dictionaries # for saving it to firebase and postgres project = vars(self) groups = dict() groupsOfTasks = dict() for group in self.groups: group = vars(group) tasks = list() for task in group["tasks"]: tasks.append(vars(task)) groupsOfTasks[group["groupId"]] = tasks del group["tasks"] groups[group["groupId"]] = group del project["groups"] project.pop("inputGeometries", None) project.pop("validInputGeometries", None) # Convert Date object to ISO Datetime: # https://www.w3.org/TR/NOTE-datetime project["created"] = self.created.strftime("%Y-%m-%dT%H:%M:%S.%fZ") # logger.info( # f'{self.projectId}' # f' - size of all tasks: ' # f'{sys.getsizeof(json.dumps(groupsOfTasks))/1024/1024} MB' # ) # Make sure projects get saved in Postgres and Firebase successful try: self.save_to_postgres( project, groups, groupsOfTasks, ) logger.info( f"{self.projectId}" f" - the project has been saved" f" to postgres" ) except Exception as e: logger.exception( f"{self.projectId}" f" - the project could not be saved" f" to postgres and will therefor not be " f" saved to firebase" ) raise CustomError(e) # if project can't be saved to files, delete also in postgres try: self.save_to_files(project) logger.info( f"{self.projectId}" f" - the project has been saved" f" to files" ) except Exception as e: self.delete_from_postgres() logger.exception( f"{self.projectId}" f" - the project could not be saved" f" to files. " ) logger.info( f"{self.projectId} deleted project data from files and postgres" ) raise CustomError(e) try: self.save_to_firebase( project, groups, groupsOfTasks, ) logger.info( f"{self.projectId}" f" - the project has been saved" f" to firebase" ) # if project can't be saved to firebase, delete also in postgres except Exception as e: self.delete_from_postgres() self.delete_from_files() logger.exception( f"{self.projectId}" f" - the project could not be saved" f" to firebase. " ) logger.info( f"{self.projectId} deleted project data from postgres and files" ) raise CustomError(e) return True
def validate_geometries(self): raw_input_file = ( f"{DATA_PATH}/" f"input_geometries/raw_input_{self.projectId}.geojson") valid_input_file = ( f"{DATA_PATH}/" f"input_geometries/valid_input_{self.projectId}.geojson") if not os.path.isdir("{}/input_geometries".format(DATA_PATH)): os.mkdir("{}/input_geometries".format(DATA_PATH)) # download file from given url url = self.inputGeometries urllib.request.urlretrieve(url, raw_input_file) logger.info(f"{self.projectId}" f" - __init__ - " f"downloaded input geometries from url and saved as file: " f"{raw_input_file}") self.inputGeometries = raw_input_file # open the raw input file and get layer driver = ogr.GetDriverByName("GeoJSON") datasource = driver.Open(raw_input_file, 0) try: layer = datasource.GetLayer() LayerDefn = layer.GetLayerDefn() except AttributeError: raise CustomError("Value error in input geometries file") # create layer for valid_input_file to store all valid geometries outDriver = ogr.GetDriverByName("GeoJSON") # Remove output geojson if it already exists if os.path.exists(valid_input_file): outDriver.DeleteDataSource(valid_input_file) outDataSource = outDriver.CreateDataSource(valid_input_file) outLayer = outDataSource.CreateLayer("geometries", geom_type=ogr.wkbMultiPolygon) for i in range(0, LayerDefn.GetFieldCount()): fieldDefn = LayerDefn.GetFieldDefn(i) outLayer.CreateField(fieldDefn) outLayerDefn = outLayer.GetLayerDefn() # check if raw_input_file layer is empty if layer.GetFeatureCount() < 1: err = "empty file. No geometries provided" # TODO: How to user logger and exceptions? logger.warning(f"{self.projectId} - check_input_geometry - {err}") raise Exception(err) # get geometry as wkt # get the bounding box/ extent of the layer extent = layer.GetExtent() # Create a Polygon from the extent tuple ring = ogr.Geometry(ogr.wkbLinearRing) ring.AddPoint(extent[0], extent[2]) ring.AddPoint(extent[1], extent[2]) ring.AddPoint(extent[1], extent[3]) ring.AddPoint(extent[0], extent[3]) ring.AddPoint(extent[0], extent[2]) poly = ogr.Geometry(ogr.wkbPolygon) poly.AddGeometry(ring) wkt_geometry = poly.ExportToWkt() # check if the input geometry is a valid polygon for feature in layer: feat_geom = feature.GetGeometryRef() geom_name = feat_geom.GetGeometryName() fid = feature.GetFID if not feat_geom.IsValid(): layer.DeleteFeature(fid) logger.warning(f"{self.projectId}" f" - check_input_geometries - " f"deleted invalid feature {fid}") # we accept only POLYGON or MULTIPOLYGON geometries elif geom_name != "POLYGON" and geom_name != "MULTIPOLYGON": layer.DeleteFeature(fid) logger.warning(f"{self.projectId}" f" - check_input_geometries - " f"deleted non polygon feature {fid}") else: # Create output Feature outFeature = ogr.Feature(outLayerDefn) # Add field values from input Layer for i in range(0, outLayerDefn.GetFieldCount()): outFeature.SetField( outLayerDefn.GetFieldDefn(i).GetNameRef(), feature.GetField(i)) outFeature.SetGeometry(feat_geom) outLayer.CreateFeature(outFeature) outFeature = None # check if layer is empty if layer.GetFeatureCount() < 1: err = "no geometries left after checking validity and geometry type." logger.warning(f"{self.projectId} - check_input_geometry - {err}") raise Exception(err) del datasource del outDataSource del layer self.validInputGeometries = valid_input_file logger.info(f"{self.projectId}" f" - check_input_geometry - " f"filtered correct input geometries and created file: " f"{valid_input_file}") return wkt_geometry
def validate_geometries(self): raw_input_file = (f"{DATA_PATH}/input_geometries/" f"raw_input_{self.projectId}.geojson") # check if a 'data' folder exists and create one if not if not os.path.isdir("{}/input_geometries".format(DATA_PATH)): os.mkdir("{}/input_geometries".format(DATA_PATH)) # write string to geom file with open(raw_input_file, "w") as geom_file: json.dump(self.geometry, geom_file) driver = ogr.GetDriverByName("GeoJSON") datasource = driver.Open(raw_input_file, 0) try: layer = datasource.GetLayer() except AttributeError: logger.warning(f"{self.projectId}" f" - validate geometry - " f"Could not get layer for datasource") raise CustomError("could not get layer for datasource") # check if layer is empty if layer.GetFeatureCount() < 1: logger.warning(f"{self.projectId}" f" - validate geometry - " f"Empty file. " f"No geometry is provided.") raise CustomError("Empty file. ") # check if more than 1 geometry is provided elif layer.GetFeatureCount() > MAX_INPUT_GEOMETRIES: logger.warning( f"{self.projectId}" f" - validate geometry - " f"Input file contains more than {MAX_INPUT_GEOMETRIES} geometries. " f"Make sure to provide less than {MAX_INPUT_GEOMETRIES} geometries." ) raise CustomError( f"Input file contains more than {MAX_INPUT_GEOMETRIES} geometries. " ) project_area = 0 geometry_collection = ogr.Geometry(ogr.wkbMultiPolygon) # check if the input geometry is a valid polygon for feature in layer: feat_geom = feature.GetGeometryRef() geom_name = feat_geom.GetGeometryName() # add geometry to geometry collection if geom_name == "MULTIPOLYGON": for singlepart_polygon in feat_geom: geometry_collection.AddGeometry(singlepart_polygon) if geom_name == "POLYGON": geometry_collection.AddGeometry(feat_geom) if not feat_geom.IsValid(): logger.warning(f"{self.projectId}" f" - validate geometry - " f"Geometry is not valid: {geom_name}. " f"Tested with IsValid() ogr method. " f"Probably self-intersections.") raise CustomError(f"Geometry is not valid: {geom_name}. ") # we accept only POLYGON or MULTIPOLYGON geometries if geom_name != "POLYGON" and geom_name != "MULTIPOLYGON": logger.warning(f"{self.projectId}" f" - validate geometry - " f"Invalid geometry type: {geom_name}. " f'Please provide "POLYGON" or "MULTIPOLYGON"') raise CustomError(f"Invalid geometry type: {geom_name}. ") # check size of project make sure its smaller than 5,000 sqkm # for doing this we transform the geometry # into Mollweide projection (EPSG Code 54009) source = feat_geom.GetSpatialReference() target = osr.SpatialReference() target.ImportFromProj4( "+proj=moll +lon_0=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs" ) transform = osr.CoordinateTransformation(source, target) feat_geom.Transform(transform) project_area = +feat_geom.GetArea() / 1000000 # calculate max area based on zoom level # for zoom level 18 this will be 5000 square kilometers # max zoom level is 22 if self.zoomLevel > 22: raise CustomError( f"zoom level is to large (max: 22): {self.zoomLevel}.") max_area = (23 - int(self.zoomLevel)) * (23 - int(self.zoomLevel)) * 200 if project_area > max_area: logger.warning( f"{self.projectId}" f" - validate geometry - " f"Project is to large: {project_area} sqkm. " f"Please split your projects into smaller sub-projects and resubmit" ) raise CustomError( f"Project is to large: {project_area} sqkm. " f"Max area for zoom level {self.zoomLevel} = {max_area} sqkm") del datasource del layer self.validInputGeometries = raw_input_file logger.info(f"{self.projectId}" f" - validate geometry - " f"input geometry is correct.") dissolved_geometry = geometry_collection.UnionCascaded() wkt_geometry_collection = dissolved_geometry.ExportToWkt() return wkt_geometry_collection
def delete_project(project_ids: list) -> bool: """ Deletes project, groups, tasks and results from Firebase and Postgres. """ for project_id in project_ids: logger.info( f"Delete project, groups, tasks and results of project: {project_id}" ) fb_db = auth.firebaseDB() ref = fb_db.reference(f"v2/results/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) try: ref.delete() except exceptions.InvalidArgumentError: # Data to write exceeds the maximum size that can be modified # with a single request. Delete chunks of data instead. childs = ref.get(shallow=True) for chunk in chunks(list(childs.keys())): ref.update({key: None for key in chunk}) ref.delete() ref = fb_db.reference(f"v2/tasks/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) try: ref.delete() except exceptions.InvalidArgumentError: # Data to write exceeds the maximum size that can be modified # with a single request. Delete chunks of data instead. childs = ref.get(shallow=True) for chunk in chunks(list(childs.keys())): ref.update({key: None for key in chunk}) ref.delete() ref = fb_db.reference(f"v2/groupsUsers/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) ref.delete() time.sleep(5) # Wait for Firebase Functions to complete ref = fb_db.reference(f"v2/groups/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) ref.delete() ref = fb_db.reference(f"v2/projects/{project_id}") if not re.match(r"/v2/\w+/[-a-zA-Z0-9]+", ref.path): raise CustomError( f"""Given argument resulted in invalid Firebase Realtime Database reference. {ref.path}""" ) ref.delete() pg_db = auth.postgresDB() sql_query = "DELETE FROM results WHERE project_id = %(project_id)s;" pg_db.query(sql_query, {"project_id": project_id}) sql_query = "DELETE FROM tasks WHERE project_id = %(project_id)s;" pg_db.query(sql_query, {"project_id": project_id}) sql_query = "DELETE FROM groups WHERE project_id = %(project_id)s;" pg_db.query(sql_query, {"project_id": project_id}) sql_query = "DELETE FROM projects WHERE project_id = %(project_id)s;" pg_db.query(sql_query, {"project_id": project_id}) return True