class Route(db.Model): """Route""" def __repr__(self): """Show information about a route""" r = self return f'<Route {r.id}, {r.name}>' __tablename__ = "routes" id = db.Column(db.Integer, primary_key=True, autoincrement=True) mp_id = db.Column(db.Text, nullable=False) name = db.Column(db.Text, nullable=False) difficulty = db.Column(db.Integer, nullable=False) image_url = db.Column( db.Text, default="/static/images/default-pic.png", ) stars = db.Column(db.Integer, nullable=False) location = db.Column(db.Text) location2 = db.Column(db.Text) lat = db.Column(db.Numeric, nullable=False) lon = db.Column(db.Numeric, nullable=False) geo = db.Column(Geometry(geometry_type="POINT")) description = db.Column(db.Text, nullable=True) route_type = db.Column(db.Text, nullable=False) project = db.relationship("Project", primaryjoin=(Project.route_id == id)) send = db.relationship( "Send", primaryjoin=(Send.route_id == id), ) def serialize(self): return { 'id': self.id, 'name': self.name, 'difficulty': self.difficulty, 'image_url': self.image_url, 'stars': self.stars, 'location': self.location, 'location2': self.location2, 'lat': str(self.lat), 'lon': str(self.lon), 'route_type': self.route_type } def get_routes_within_radius(lat, lon, radius): """Return all routes within a given radius (in meters) of this point.""" geo = func.ST_GeomFromText('POINT({} {})'.format(lon, lat)) return Route.query.filter( func.ST_DistanceSphere(Route.geo, geo) < radius * 1609.344).all() def get_routes_within_radius_count(lat, lon, radius, count): """Return all routes within a given radius (in meters) of this point.""" geo = func.ST_GeomFromText('POINT({} {})'.format(lon, lat)) return Route.query.filter( func.ST_DistanceSphere(Route.geo, geo) < radius * 1609.344).order_by(func.ST_DistanceSphere( Route.geo, geo)).limit(count).offset(1).all() # Route.query.filter(func.ST_DistanceSphere(Route.geo, geo) < radius*1609.344).order_by(func.ST_DistanceSphere(Route.geo, geo)).offset(1).limit(count).all() def get_routes_within_radius_count_for_feed(lat, lon, radius, count): """Return all routes within a given radius (in meters) of this point.""" geo = func.ST_GeomFromText('POINT({} {})'.format(lon, lat)) return Route.query.filter( func.ST_DistanceSphere(Route.geo, geo) < radius * 1609.344).order_by(func.ST_DistanceSphere(Route.geo, geo)).limit(count).all() def get_routes_search_all(): """Return all routes""" @classmethod def add_route(cls, mp_id, name, difficulty, image_url, stars, location, location2, lat, lon, route_type): """Add a route to db.""" geo = 'POINT({} {})'.format(lon, lat) new_route = Route(mp_id=mp_id, name=name, image_url=image_url, stars=stars, location=location, location2=location2, lat=lat, lon=lon, route_type=route_type, geo=geo) if route_type == 'Boulder' or difficulty[0] == "V": if difficulty in boulder_levels: new_route.difficulty = boulder_levels.index(difficulty) else: difficulty = difficulty[0:1] difficulty = difficulty.strip() if difficulty in boulder_levels: new_route.difficulty = boulder_levels.index(difficulty) else: new_route.difficulty = 0 else: if difficulty in sport_levels: new_route.difficulty = sport_levels.index(difficulty) else: difficulty = difficulty[0:4] difficulty = difficulty.strip() if difficulty in sport_levels: new_route.difficulty = sport_levels.index(difficulty) else: new_route.difficulty = 0 db.session.add(new_route) db.session.commit() @classmethod def update_geometries(cls): """Using each route lon and lat, add a geometry data to db.""" routes = Route.query.all() for route in routes: point = 'POINT({} {})'.format(route.lon, route.lat) route.geo = point db.session.commit()
class Ndvi(db.Model): __tablename__ = 'ndvi_filtre' gid = db.Column(db.Integer, primary_key=True) geom = db.Column(Geometry(geometry_type='MULTIPOLYGON', srid=32631))
def calculate_summary(self, product_name: str, time: Range) -> TimePeriodOverview: """ Create a summary of the given product/time range. """ log = self.log.bind(product_name=product_name, time=time) log.debug("summary.query") begin_time, end_time, where_clause = self._where(product_name, time) select_by_srid = select(( func.ST_SRID(DATASET_SPATIAL.c.footprint).label('srid'), func.count().label("dataset_count"), func.ST_Transform( func.ST_Union(DATASET_SPATIAL.c.footprint), self._target_srid(), type_=Geometry() ).label("footprint_geometry"), func.sum(DATASET_SPATIAL.c.size_bytes).label('size_bytes'), func.max(DATASET_SPATIAL.c.creation_time).label("newest_dataset_creation_time"), )).where(where_clause).group_by('srid').alias('srid_summaries') # Union all srid groups into one summary. result = self._engine.execute( select(( func.sum(select_by_srid.c.dataset_count).label("dataset_count"), func.array_agg(select_by_srid.c.srid).label("srids"), func.sum(select_by_srid.c.size_bytes).label('size_bytes'), func.ST_Union( select_by_srid.c.footprint_geometry, type_=Geometry(srid=self._target_srid()) ).label("footprint_geometry"), func.max(select_by_srid.c.newest_dataset_creation_time).label("newest_dataset_creation_time"), func.now().label("summary_gen_time"), )) ) rows = result.fetchall() log.debug("summary.query.done", srid_rows=len(rows)) assert len(rows) == 1 row = dict(rows[0]) row['dataset_count'] = int(row['dataset_count']) if row['dataset_count'] else 0 if row['footprint_geometry'] is not None: row['footprint_crs'] = self._get_srid_name(row['footprint_geometry'].srid) row['footprint_geometry'] = geo_shape.to_shape(row['footprint_geometry']) else: row['footprint_crs'] = None row['crses'] = None if row['srids'] is not None: row['crses'] = {self._get_srid_name(s) for s in row['srids']} del row['srids'] # Convert from Python Decimal if row['size_bytes'] is not None: row['size_bytes'] = int(row['size_bytes']) has_data = row['dataset_count'] > 0 log.debug("counter.calc") # Initialise all requested days as zero day_counts = Counter({ d.date(): 0 for d in pd.date_range(begin_time, end_time, closed='left') }) region_counts = Counter() if has_data: day_counts.update( Counter({ day.date(): count for day, count in self._engine.execute( select([ func.date_trunc( 'day', DATASET_SPATIAL.c.center_time.op('AT TIME ZONE')(self.grouping_time_zone) ).label('day'), func.count() ]).where(where_clause).group_by('day') ) }) ) region_counts = Counter( { item: count for item, count in self._engine.execute( select([ DATASET_SPATIAL.c.region_code.label('region_code'), func.count() ]).where(where_clause).group_by('region_code') ) } ) summary = TimePeriodOverview( **row, timeline_period='day', time_range=Range(begin_time, end_time), timeline_dataset_counts=day_counts, region_dataset_counts=region_counts, # TODO: filter invalid from the counts? footprint_count=row['dataset_count'] or 0, ) log.debug( "summary.calc.done", dataset_count=summary.dataset_count, footprints_missing=summary.dataset_count - summary.footprint_count ) return summary
class User(UserMixin, db.Model): """User - UserMixin gives access to is_active, is_authenticated, is_anonymous, and get_id""" def __repr__(self): """Show info about a user""" u = self return f"<User {u.id}, {u.username}, {u.email}>" __tablename__ = "users" id = db.Column(db.Integer, primary_key=True, autoincrement=True) username = db.Column(db.Text, nullable=False, unique=True) password = db.Column(db.Text, #nullable=False ) email = db.Column( db.Text, nullable=False, unique=True, ) first_name = db.Column(db.Text, nullable=True) last_name = db.Column(db.Text, nullable=True) image_url = db.Column( db.Text, default="/static/images/default-pic.png", ) b_skill_level = db.Column( db.Text, nullable=True, ) tr_skill_level = db.Column(db.Text, nullable=True) ld_skill_level = db.Column(db.Text, nullable=True) #How will I process User location ? location = db.Column(db.Text, nullable=True) geo = db.Column(Geometry(geometry_type="POINT")) lat = db.Column(db.Numeric, nullable=True) lon = db.Column(db.Numeric, nullable=True) #About me section - but labelled goals for climbing goals = db.Column(db.Text, nullable=True) followers = db.relationship( "User", secondary="follows", primaryjoin=(Follows.user_being_followed_id == id), secondaryjoin=(Follows.user_following_id == id)) following = db.relationship( "User", secondary="follows", primaryjoin=(Follows.user_following_id == id), secondaryjoin=(Follows.user_being_followed_id == id)) #Email confirmation confirmed = db.Column(db.Boolean, nullable=False, default=False) confirmed_on = db.Column(db.DateTime, nullable=True) def serialize(self): return { 'id': self.id, 'username': self.username, 'email': self.email, 'first_name': self.first_name, 'last_name': self.last_name, 'image_url': self.image_url, 'b_skill_level': self.b_skill_level, 'tr_skill_level': self.tr_skill_level, 'ld_skill_level': self.ld_skill_level, 'location': self.location, 'lat': str(self.lat), 'lon': str(self.lon), 'goals': self.goals } #Is methods for checking information about a user def is_followed_by(self, other_user): """Is this user followed by `other_user`?""" found_user_list = [ user for user in self.followers if user == other_user ] return len(found_user_list) == 1 def is_following(self, other_user): """Is this user following `other_use`?""" found_user_list = [ user for user in self.following if user == other_user ] return len(found_user_list) == 1 #Class Methods for Authenticating a User @classmethod def signup(cls, username, email, password): """Sign up user. Hashes password and adds user to system. """ hashed_pwd = bcrypt.generate_password_hash(password).decode('UTF-8') if profanity.contains_profanity( username) or profanity.contains_profanity(email): return False user = User(username=username, email=email, password=hashed_pwd) db.session.add(user) return user @classmethod def authenticate(cls, email, password): """Find user with `username` and `password`. This is a class method (call it on the class, not an individual user.) It searches for a user whose password hash matches this password and, if it finds such a user, returns that user object. If can't find matching user (or if password is wrong), returns False. """ user = cls.query.filter_by(email=email).first() if user: is_auth = bcrypt.check_password_hash(user.password, password) if is_auth: return user return False @classmethod def change_password(cls, email, password): """Already found user will do one more check for email and change password""" user = cls.query.filter_by(email=email).first() if user: hashed_pwd = bcrypt.generate_password_hash(password).decode( 'UTF-8') user.password = hashed_pwd sess.add(user) sess.commit() return user
from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import select from sqlalchemy import Table from geoalchemy2 import Geometry from geoalchemy2.shape import to_shape engine = create_engine('postgresql://*****:*****@localhost/gis', echo=True) metadata = MetaData(engine) table = Table( "inserts", metadata, Column("id", Integer, primary_key=True), Column("geom", Geometry("LINESTRING", 4326)), Column("distance", Float), ) class TestLengthAtInsert(): def setup(self): self.conn = engine.connect() metadata.drop_all(checkfirst=True) metadata.create_all() def teardown(self): self.conn.close() metadata.drop_all() def test_query(self):
def add_geometry_column(cls): if not hasattr(cls, 'geom'): cls.geom = deferred( Column(Geometry(geometry_type='LINESTRING', srid=config.SRID)))
class Task(db.Model): """ Describes an individual mapping Task """ __tablename__ = "tasks" # Table has composite PK on (id and project_id) id = db.Column(db.Integer, primary_key=True) project_id = db.Column( db.Integer, db.ForeignKey("projects.id"), index=True, primary_key=True ) x = db.Column(db.Integer) y = db.Column(db.Integer) zoom = db.Column(db.Integer) extra_properties = db.Column(db.Unicode) # Tasks need to be split differently if created from an arbitrary grid or were clipped to the edge of the AOI is_square = db.Column(db.Boolean, default=True) geometry = db.Column(Geometry("MULTIPOLYGON", srid=4326)) task_status = db.Column(db.Integer, default=TaskStatus.READY.value) locked_by = db.Column( db.BigInteger, db.ForeignKey("users.id", name="fk_users_locked"), index=True ) mapped_by = db.Column( db.BigInteger, db.ForeignKey("users.id", name="fk_users_mapper"), index=True ) validated_by = db.Column( db.BigInteger, db.ForeignKey("users.id", name="fk_users_validator"), index=True ) # Mapped objects task_history = db.relationship( TaskHistory, cascade="all", order_by=desc(TaskHistory.id) ) task_annotations = db.relationship(TaskAnnotation, cascade="all") lock_holder = db.relationship(User, foreign_keys=[locked_by]) mapper = db.relationship(User, foreign_keys=[mapped_by]) def create(self): """ Creates and saves the current model to the DB """ db.session.add(self) db.session.commit() def update(self): """ Updates the DB with the current state of the Task """ db.session.commit() def delete(self): """ Deletes the current model from the DB """ db.session.delete(self) db.session.commit() @classmethod def from_geojson_feature(cls, task_id, task_feature): """ Constructs and validates a task from a GeoJson feature object :param task_id: Unique ID for the task :param task_feature: A geoJSON feature object :raises InvalidGeoJson, InvalidData """ if type(task_feature) is not geojson.Feature: raise InvalidGeoJson("Task: Invalid GeoJson should be a feature") task_geometry = task_feature.geometry if type(task_geometry) is not geojson.MultiPolygon: raise InvalidGeoJson("Task: Geometry must be a MultiPolygon") is_valid_geojson = geojson.is_valid(task_geometry) if is_valid_geojson["valid"] == "no": raise InvalidGeoJson( f"Task: Invalid MultiPolygon - {is_valid_geojson['message']}" ) task = cls() try: task.x = task_feature.properties["x"] task.y = task_feature.properties["y"] task.zoom = task_feature.properties["zoom"] task.is_square = task_feature.properties["isSquare"] except KeyError as e: raise InvalidData(f"Task: Expected property not found: {str(e)}") if "extra_properties" in task_feature.properties: task.extra_properties = json.dumps( task_feature.properties["extra_properties"] ) task.id = task_id task_geojson = geojson.dumps(task_geometry) task.geometry = ST_SetSRID(ST_GeomFromGeoJSON(task_geojson), 4326) return task @staticmethod def get(task_id: int, project_id: int): """ Gets specified task :param task_id: task ID in scope :param project_id: project ID in scope :return: Task if found otherwise None """ # LIKELY PROBLEM AREA return Task.query.filter_by(id=task_id, project_id=project_id).one_or_none() @staticmethod def get_tasks(project_id: int, task_ids: List[int]): """ Get all tasks that match supplied list """ return Task.query.filter( Task.project_id == project_id, Task.id.in_(task_ids) ).all() @staticmethod def get_all_tasks(project_id: int): """ Get all tasks for a given project """ return Task.query.filter(Task.project_id == project_id).all() @staticmethod def auto_unlock_delta(): return parse_duration(current_app.config["TASK_AUTOUNLOCK_AFTER"]) @staticmethod def auto_unlock_tasks(project_id: int): """Unlock all tasks locked for longer than the auto-unlock delta""" expiry_delta = Task.auto_unlock_delta() lock_duration = (datetime.datetime.min + expiry_delta).time().isoformat() expiry_date = datetime.datetime.utcnow() - expiry_delta old_tasks = ( db.session.query(Task.id) .filter(Task.id == TaskHistory.task_id) .filter(Task.project_id == TaskHistory.project_id) .filter(Task.task_status.in_([1, 3])) .filter( TaskHistory.action.in_(["LOCKED_FOR_VALIDATION", "LOCKED_FOR_MAPPING"]) ) .filter(TaskHistory.action_text.is_(None)) .filter(Task.project_id == project_id) .filter(TaskHistory.action_date <= str(expiry_date)) ) if old_tasks.count() == 0: # no tasks older than the delta found, return without further processing return for old_task in old_tasks: task = Task.get(old_task[0], project_id) task.auto_unlock_expired_tasks(expiry_date, lock_duration) def auto_unlock_expired_tasks(self, expiry_date, lock_duration): """Unlock all tasks locked before expiry date. Clears task lock if needed""" TaskHistory.update_expired_and_locked_actions( self.project_id, self.id, expiry_date, lock_duration ) last_action = TaskHistory.get_last_locked_or_auto_unlocked_action( self.project_id, self.id ) if last_action.action in [ "AUTO_UNLOCKED_FOR_MAPPING", "AUTO_UNLOCKED_FOR_VALIDATION", ]: self.clear_lock() def is_mappable(self): """ Determines if task in scope is in suitable state for mapping """ if TaskStatus(self.task_status) not in [ TaskStatus.READY, TaskStatus.INVALIDATED, ]: return False return True def set_task_history( self, action, user_id, comment=None, new_state=None, mapping_issues=None ): """ Sets the task history for the action that the user has just performed :param task: Task in scope :param user_id: ID of user performing the action :param action: Action the user has performed :param comment: Comment user has added :param new_state: New state of the task :param mapping_issues: Identified issues leading to invalidation """ history = TaskHistory(self.id, self.project_id, user_id) if action in [TaskAction.LOCKED_FOR_MAPPING, TaskAction.LOCKED_FOR_VALIDATION]: history.set_task_locked_action(action) elif action == TaskAction.COMMENT: history.set_comment_action(comment) elif action == TaskAction.STATE_CHANGE: history.set_state_change_action(new_state) elif action in [ TaskAction.AUTO_UNLOCKED_FOR_MAPPING, TaskAction.AUTO_UNLOCKED_FOR_VALIDATION, ]: history.set_auto_unlock_action(action) if mapping_issues is not None: history.task_mapping_issues = mapping_issues self.task_history.append(history) return history def lock_task_for_mapping(self, user_id: int): self.set_task_history(TaskAction.LOCKED_FOR_MAPPING, user_id) self.task_status = TaskStatus.LOCKED_FOR_MAPPING.value self.locked_by = user_id self.update() def lock_task_for_validating(self, user_id: int): self.set_task_history(TaskAction.LOCKED_FOR_VALIDATION, user_id) self.task_status = TaskStatus.LOCKED_FOR_VALIDATION.value self.locked_by = user_id self.update() def reset_task(self, user_id: int): expiry_delta = Task.auto_unlock_delta() lock_duration = (datetime.datetime.min + expiry_delta).time().isoformat() if TaskStatus(self.task_status) in [ TaskStatus.LOCKED_FOR_MAPPING, TaskStatus.LOCKED_FOR_VALIDATION, ]: self.record_auto_unlock(lock_duration) self.set_task_history(TaskAction.STATE_CHANGE, user_id, None, TaskStatus.READY) self.mapped_by = None self.validated_by = None self.locked_by = None self.task_status = TaskStatus.READY.value self.update() def clear_task_lock(self): """ Unlocks task in scope in the database. Clears the lock as though it never happened. No history of the unlock is recorded. :return: """ # clear the lock action for the task in the task history last_action = TaskHistory.get_last_locked_action(self.project_id, self.id) last_action.delete() # Set locked_by to null and status to last status on task self.clear_lock() def record_auto_unlock(self, lock_duration): locked_user = self.locked_by last_action = TaskHistory.get_last_locked_action(self.project_id, self.id) next_action = ( TaskAction.AUTO_UNLOCKED_FOR_MAPPING if last_action.action == "LOCKED_FOR_MAPPING" else TaskAction.AUTO_UNLOCKED_FOR_VALIDATION ) self.clear_task_lock() # Add AUTO_UNLOCKED action in the task history auto_unlocked = self.set_task_history(action=next_action, user_id=locked_user) auto_unlocked.action_text = lock_duration self.update() def unlock_task( self, user_id, new_state=None, comment=None, undo=False, issues=None ): """ Unlock task and ensure duration task locked is saved in History """ if comment: self.set_task_history( action=TaskAction.COMMENT, comment=comment, user_id=user_id, mapping_issues=issues, ) history = self.set_task_history( action=TaskAction.STATE_CHANGE, new_state=new_state, user_id=user_id, mapping_issues=issues, ) if ( new_state in [TaskStatus.MAPPED, TaskStatus.BADIMAGERY] and TaskStatus(self.task_status) != TaskStatus.LOCKED_FOR_VALIDATION ): # Don't set mapped if state being set back to mapped after validation self.mapped_by = user_id elif new_state == TaskStatus.VALIDATED: TaskInvalidationHistory.record_validation( self.project_id, self.id, user_id, history ) self.validated_by = user_id elif new_state == TaskStatus.INVALIDATED: TaskInvalidationHistory.record_invalidation( self.project_id, self.id, user_id, history ) self.mapped_by = None self.validated_by = None if not undo: # Using a slightly evil side effect of Actions and Statuses having the same name here :) TaskHistory.update_task_locked_with_duration( self.id, self.project_id, TaskStatus(self.task_status), user_id ) self.task_status = new_state.value self.locked_by = None self.update() def reset_lock(self, user_id, comment=None): """ Removes a current lock from a task, resets to last status and updates history with duration of lock """ if comment: self.set_task_history( action=TaskAction.COMMENT, comment=comment, user_id=user_id ) # Using a slightly evil side effect of Actions and Statuses having the same name here :) TaskHistory.update_task_locked_with_duration( self.id, self.project_id, TaskStatus(self.task_status), user_id ) self.clear_lock() def clear_lock(self): """ Resets to last status and removes current lock from a task """ self.task_status = TaskHistory.get_last_status(self.project_id, self.id).value self.locked_by = None self.update() @staticmethod def get_tasks_as_geojson_feature_collection( project_id, task_ids_str: str = None, order_by: str = None, order_by_type: str = "ASC", status: int = None, ): """ Creates a geoJson.FeatureCollection object for tasks related to the supplied project ID :param project_id: Owning project ID :order_by: sorting option: available values update_date and building_area_diff :status: task status id to filter by :return: geojson.FeatureCollection """ # subquery = ( # db.session.query(func.max(TaskHistory.action_date)) # .filter( # Task.id == TaskHistory.task_id, # Task.project_id == TaskHistory.project_id, # ) # .correlate(Task) # .group_by(Task.id) # .label("update_date") # ) query = db.session.query( Task.id, Task.x, Task.y, Task.zoom, Task.is_square, Task.task_status, Task.geometry.ST_AsGeoJSON().label("geojson"), Task.locked_by, # subquery, ) filters = [Task.project_id == project_id] if task_ids_str: task_ids = map(int, task_ids_str.split(",")) tasks = Task.get_tasks(project_id, task_ids) if not tasks or len(tasks) == 0: raise NotFound() else: tasks_filters = [task.id for task in tasks] filters = [Task.project_id == project_id, Task.id.in_(tasks_filters)] else: tasks = Task.get_all_tasks(project_id) if not tasks or len(tasks) == 0: raise NotFound() if status: filters.append(Task.task_status == status) if order_by == "effort_prediction": query = query.outerjoin(TaskAnnotation).filter(*filters) if order_by_type == "DESC": query = query.order_by( desc( cast( cast(TaskAnnotation.properties["building_area_diff"], Text), Float, ) ) ) else: query = query.order_by( cast( cast(TaskAnnotation.properties["building_area_diff"], Text), Float, ) ) # elif order_by == "last_updated": # if order_by_type == "DESC": # query = query.filter(*filters).order_by(desc("update_date")) # else: # query = query.filter(*filters).order_by("update_date") else: query = query.filter(*filters) project_tasks = query.all() tasks_features = [] for task in project_tasks: task_geometry = geojson.loads(task.geojson) task_properties = dict( taskId=task.id, taskX=task.x, taskY=task.y, taskZoom=task.zoom, taskIsSquare=task.is_square, taskStatus=TaskStatus(task.task_status).name, lockedBy=task.locked_by, ) feature = geojson.Feature( geometry=task_geometry, properties=task_properties ) tasks_features.append(feature) return geojson.FeatureCollection(tasks_features) @staticmethod def get_tasks_as_geojson_feature_collection_no_geom(project_id): """ Creates a geoJson.FeatureCollection object for all tasks related to the supplied project ID without geometry :param project_id: Owning project ID :return: geojson.FeatureCollection """ project_tasks = ( db.session.query( Task.id, Task.x, Task.y, Task.zoom, Task.is_square, Task.task_status ) .filter(Task.project_id == project_id) .all() ) tasks_features = [] for task in project_tasks: task_properties = dict( taskId=task.id, taskX=task.x, taskY=task.y, taskZoom=task.zoom, taskIsSquare=task.is_square, taskStatus=TaskStatus(task.task_status).name, ) feature = geojson.Feature(properties=task_properties) tasks_features.append(feature) return geojson.FeatureCollection(tasks_features) @staticmethod def get_mapped_tasks_by_user(project_id: int): """ Gets all mapped tasks for supplied project grouped by user""" results = ( db.session.query( User.username, User.mapping_level, func.count(distinct(Task.id)), func.json_agg(distinct(Task.id)), func.max(TaskHistory.action_date), User.date_registered, User.last_validation_date, ) .filter(Task.project_id == TaskHistory.project_id) .filter(Task.id == TaskHistory.task_id) .filter(Task.mapped_by == User.id) .filter(Task.project_id == project_id) .filter(Task.task_status == 2) .filter(TaskHistory.action_text == "MAPPED") .group_by( User.username, User.mapping_level, User.date_registered, User.last_validation_date, ) ) mapped_tasks_dto = MappedTasks() for row in results: user_mapped = MappedTasksByUser() user_mapped.username = row[0] user_mapped.mapping_level = MappingLevel(row[1]).name user_mapped.mapped_task_count = row[2] user_mapped.tasks_mapped = row[3] user_mapped.last_seen = row[4] user_mapped.date_registered = row[5] user_mapped.last_validation_date = row[6] mapped_tasks_dto.mapped_tasks.append(user_mapped) return mapped_tasks_dto @staticmethod def get_max_task_id_for_project(project_id: int): """Gets the nights task id currently in use on a project""" result = ( db.session.query(func.max(Task.id)) .filter(Task.project_id == project_id) .group_by(Task.project_id) ) if result.count() == 0: raise NotFound() for row in result: return row[0] def as_dto( self, task_history: List[TaskHistoryDTO] = [], last_updated: datetime.datetime = None, ): """Just converts to a TaskDTO""" task_dto = TaskDTO() task_dto.task_id = self.id task_dto.project_id = self.project_id task_dto.task_status = TaskStatus(self.task_status).name task_dto.lock_holder = self.lock_holder.username if self.lock_holder else None task_dto.task_history = task_history task_dto.last_updated = last_updated if last_updated else None task_dto.auto_unlock_seconds = Task.auto_unlock_delta().total_seconds() return task_dto def as_dto_with_instructions(self, preferred_locale: str = "en") -> TaskDTO: """Get dto with any task instructions""" task_history = [] for action in self.task_history: history = TaskHistoryDTO() history.history_id = action.id history.action = action.action history.action_text = action.action_text history.action_date = action.action_date history.action_by = ( action.actioned_by.username if action.actioned_by else None ) history.picture_url = ( action.actioned_by.picture_url if action.actioned_by else None ) if action.task_mapping_issues: history.issues = [ issue.as_dto() for issue in action.task_mapping_issues ] task_history.append(history) last_updated = None if len(task_history) > 0: last_updated = task_history[0].action_date task_dto = self.as_dto(task_history, last_updated=last_updated) per_task_instructions = self.get_per_task_instructions(preferred_locale) # If we don't have instructions in preferred locale try again for default locale task_dto.per_task_instructions = ( per_task_instructions if per_task_instructions else self.get_per_task_instructions(self.projects.default_locale) ) annotations = self.get_per_task_annotations() task_dto.task_annotations = annotations if annotations else [] return task_dto def get_per_task_annotations(self): result = [ta.get_dto() for ta in self.task_annotations] return result def get_per_task_instructions(self, search_locale: str) -> str: """ Gets any per task instructions attached to the project """ project_info = self.projects.project_info.all() for info in project_info: if info.locale == search_locale: return self.format_per_task_instructions(info.per_task_instructions) def format_per_task_instructions(self, instructions) -> str: """ Format instructions by looking for X, Y, Z tokens and replacing them with the task values """ if not instructions: return "" # No instructions so return empty string properties = {} if self.x: properties["x"] = str(self.x) if self.y: properties["y"] = str(self.y) if self.zoom: properties["z"] = str(self.zoom) if self.extra_properties: properties.update(json.loads(self.extra_properties)) try: instructions = instructions.format(**properties) except KeyError: pass return instructions def copy_task_history(self) -> list: copies = [] for entry in self.task_history: db.session.expunge(entry) make_transient(entry) entry.id = None entry.task_id = None db.session.add(entry) copies.append(entry) return copies def get_locked_tasks_for_user(user_id: int): """ Gets tasks on project owned by specified user id""" tasks = Task.query.filter_by(locked_by=user_id) tasks_dto = LockedTasksForUser() for task in tasks: tasks_dto.locked_tasks.append(task.id) tasks_dto.project = task.project_id tasks_dto.task_status = TaskStatus(task.task_status).name return tasks_dto def get_locked_tasks_details_for_user(user_id: int): """ Gets tasks on project owned by specified user id""" tasks = Task.query.filter_by(locked_by=user_id) locked_tasks = [task for task in tasks] return locked_tasks
class AdministrativeDivision(Base): __tablename__ = 'administrativedivision' id = Column(Integer, primary_key=True) code = Column(Integer, index=True, unique=True, nullable=False) leveltype_id = Column(Integer, ForeignKey(AdminLevelType.id), nullable=False, index=True) name = Column(Unicode, nullable=False) name_fr = Column(Unicode) name_es = Column(Unicode) parent_code = Column( Integer, ForeignKey('administrativedivision.code', use_alter=True, name='administrativedivision_parent_code_fkey')) geom = deferred(Column(Geometry('MULTIPOLYGON', 4326))) leveltype = relationship(AdminLevelType) parent = relationship('AdministrativeDivision', uselist=False, lazy='joined', join_depth=2, remote_side=code) hazardcategories = relationship( 'HazardCategoryAdministrativeDivisionAssociation', back_populates='administrativedivision') def __json__(self, request): lang = request.locale_name attr = 'name' if lang == 'en' else 'name_' + lang if self.leveltype_id == 1: return { 'code': self.code, 'admin0': getattr(self, attr), 'url': request.route_url('report_overview', division=self) } if self.leveltype_id == 2: return { 'code': self.code, 'admin0': getattr(self.parent, attr), 'admin1': self.name, 'url': request.route_url('report_overview', division=self) } if self.leveltype_id == 3: return { 'code': self.code, 'admin0': getattr(self.parent.parent, attr), 'admin1': self.parent.name, 'admin2': self.name, 'url': request.route_url('report_overview', division=self) } def slug(self): tokens = [self.name] parent = self.parent while parent: tokens.append(parent.name) parent = parent.parent tokens.reverse() return slugify('-'.join(tokens)) def translated_name(self, lang): attr = 'name' if lang == 'en' or self.leveltype.mnemonic != 'COU' \ else 'name_' + lang return getattr(self, attr)
class TStationsOcchab(ModelCruvedAutorization): __tablename__ = "t_stations" __table_args__ = {"schema": "pr_occhab"} id_station = DB.Column(DB.Integer, primary_key=True) unique_id_sinp_station = DB.Column(UUID(as_uuid=True), default=select( [func.uuid_generate_v4()])) id_dataset = DB.Column(DB.Integer, ForeignKey('gn_meta.t_datasets.id_dataset')) date_min = DB.Column(DB.DateTime) date_max = DB.Column(DB.DateTime) observers_txt = DB.Column(DB.Unicode) station_name = DB.Column(DB.Unicode) is_habitat_complex = DB.Column(DB.Boolean) id_nomenclature_exposure = DB.Column( DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) depth_min = DB.Column(DB.Integer) depth_max = DB.Column(DB.Integer) area = DB.Column(DB.Float) id_nomenclature_area_surface_calculation = DB.Column( DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) id_nomenclature_geographic_object = DB.Column( DB.Integer, ForeignKey(TNomenclatures.id_nomenclature)) comment = DB.Column(DB.Unicode) id_digitiser = DB.Column(DB.Integer) geom_4326 = DB.Column(Geometry("GEOMETRY", 4626)) t_habitats = relationship("THabitatsOcchab", lazy="joined", cascade="all, delete-orphan") dataset = relationship("TDatasets", lazy="joined") observers = DB.relationship( User, lazy="joined", secondary=CorStationObserverOccHab.__table__, primaryjoin=(CorStationObserverOccHab.id_station == id_station), secondaryjoin=(CorStationObserverOccHab.id_role == User.id_role), foreign_keys=[ CorStationObserverOccHab.id_station, CorStationObserverOccHab.id_role, ], ) # overright the constructor # to inherit of ReleModel, the constructor must define some mandatory attribute def __init__(self, *args, **kwargs): super(TStationsOcchab, self).__init__(*args, **kwargs) self.observer_rel = getattr(self, 'observers') self.dataset_rel = getattr(self, 'dataset') self.id_digitiser_col = getattr(self, 'id_digitiser') self.id_dataset_col = getattr(self, 'id_dataset') def get_geofeature(self): return self.as_geofeature("geom_4326", "id_station", fields=[ 'observers', 't_habitats', 't_habitats.habref', 'dataset', ])
def setup_method(self, _): # Always see the diff # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff self.maxDiff = None self._tables = [] functionality.FUNCTIONALITIES_TYPES = None from c2cgeoportal.models import DBSession, User, Role, \ RestrictionArea, TreeItem, Theme, LayerGroup, Interface, LayerWMS from sqlalchemy import Column, Table, types from sqlalchemy.ext.declarative import declarative_base from geoalchemy2 import Geometry for o in DBSession.query(RestrictionArea).all(): DBSession.delete(o) for o in DBSession.query(Role).all(): DBSession.delete(o) for o in DBSession.query(User).all(): DBSession.delete(o) for o in DBSession.query(TreeItem).all(): DBSession.delete(o) ogcserver, ogcserver_external = create_default_ogcserver() role1 = Role(name="__test_role1") role1.id = 999 user1 = User(username="******", password="******", role=role1) user1.email = "*****@*****.**" role2 = Role(name="__test_role2", extent=WKTElement( "POLYGON((1 2, 1 4, 3 4, 3 2, 1 2))", srid=21781 )) user2 = User(username="******", password="******", role=role2) main = Interface(name="main") engine = DBSession.c2c_rw_bind engine.connect() a_geo_table = Table( "a_geo_table", declarative_base(bind=engine).metadata, Column("id", types.Integer, primary_key=True), Column("geom", Geometry("POINT", srid=21781)), schema="geodata" ) self._tables = [a_geo_table] a_geo_table.drop(checkfirst=True) a_geo_table.create() private_layer = LayerWMS(name="__test_private_layer", public=False) private_layer.layer = "__test_private_layer" private_layer.geo_table = "geodata.a_geo_table" private_layer.interfaces = [main] private_layer.ogc_server = ogcserver group = LayerGroup(name="__test_layer_group") group.children = [private_layer] theme = Theme(name="__test_theme") theme.children = [group] theme.interfaces = [main] DBSession.add(RestrictionArea( name="__test_ra1", description="", layers=[private_layer], roles=[role1], )) DBSession.add(RestrictionArea( name="__test_ra2", description="", layers=[private_layer], roles=[role2], readwrite=True, )) DBSession.add_all([ user1, user2, role1, role2, theme, group, private_layer, ]) transaction.commit()
class Project(db.Model): """ Describes a HOT Mapping Project """ __tablename__ = "projects" # Columns id = db.Column(db.Integer, primary_key=True) status = db.Column(db.Integer, default=ProjectStatus.DRAFT.value, nullable=False) created = db.Column(db.DateTime, default=timestamp, nullable=False) priority = db.Column(db.Integer, default=ProjectPriority.MEDIUM.value) default_locale = db.Column( db.String(10), default="en" ) # The locale that is returned if requested locale not available author_id = db.Column(db.BigInteger, db.ForeignKey("users.id", name="fk_users"), nullable=False) mapper_level = db.Column( db.Integer, default=2, nullable=False, index=True) # Mapper level project is suitable for mapping_permission = db.Column(db.Integer, default=MappingPermission.ANY.value) validation_permission = db.Column( db.Integer, default=ValidationPermission.ANY.value ) # Means only users with validator role can validate enforce_random_task_selection = db.Column( db.Boolean, default=False ) # Force users to edit at random to avoid mapping "easy" tasks private = db.Column(db.Boolean, default=False) # Only allowed users can validate featured = db.Column( db.Boolean, default=False) # Only PMs can set a project as featured changeset_comment = db.Column(db.String) osmcha_filter_id = db.Column( db.String) # Optional custom filter id for filtering on OSMCha due_date = db.Column(db.DateTime) imagery = db.Column(db.String) josm_preset = db.Column(db.String) id_presets = db.Column(ARRAY(db.String)) last_updated = db.Column(db.DateTime, default=timestamp) license_id = db.Column(db.Integer, db.ForeignKey("licenses.id", name="fk_licenses")) geometry = db.Column(Geometry("MULTIPOLYGON", srid=4326)) centroid = db.Column(Geometry("POINT", srid=4326)) country = db.Column(ARRAY(db.String), default=[]) task_creation_mode = db.Column(db.Integer, default=TaskCreationMode.GRID.value, nullable=False) organisation_id = db.Column( db.Integer, db.ForeignKey("organisations.id", name="fk_organisations"), index=True, ) # Tags mapping_types = db.Column(ARRAY(db.Integer), index=True) # Editors mapping_editors = db.Column( ARRAY(db.Integer), default=[ Editors.ID.value, Editors.JOSM.value, Editors.POTLATCH_2.value, Editors.FIELD_PAPERS.value, Editors.CUSTOM.value, ], index=True, nullable=False, ) validation_editors = db.Column( ARRAY(db.Integer), default=[ Editors.ID.value, Editors.JOSM.value, Editors.POTLATCH_2.value, Editors.FIELD_PAPERS.value, Editors.CUSTOM.value, ], index=True, nullable=False, ) # Stats total_tasks = db.Column(db.Integer, nullable=False) tasks_mapped = db.Column(db.Integer, default=0, nullable=False) tasks_validated = db.Column(db.Integer, default=0, nullable=False) tasks_bad_imagery = db.Column(db.Integer, default=0, nullable=False) # Mapped Objects tasks = db.relationship(Task, backref="projects", cascade="all, delete, delete-orphan", lazy="dynamic") project_info = db.relationship(ProjectInfo, lazy="dynamic", cascade="all") project_chat = db.relationship(ProjectChat, lazy="dynamic", cascade="all") author = db.relationship(User) allowed_users = db.relationship(User, secondary=project_allowed_users) priority_areas = db.relationship( PriorityArea, secondary=project_priority_areas, cascade="all, delete-orphan", single_parent=True, ) custom_editor = db.relationship(CustomEditor, uselist=False) favorited = db.relationship(User, secondary=project_favorites, backref="favorites") organisation = db.relationship(Organisation, backref="projects") campaign = db.relationship(Campaign, secondary=campaign_projects, backref="projects") interests = db.relationship(Interest, secondary=project_interests, backref="projects") def create_draft_project(self, draft_project_dto: DraftProjectDTO): """ Creates a draft project :param draft_project_dto: DTO containing draft project details :param aoi: Area of Interest for the project (eg boundary of project) """ self.project_info.append( ProjectInfo.create_from_name(draft_project_dto.project_name)) self.status = ProjectStatus.DRAFT.value self.author_id = draft_project_dto.user_id self.last_updated = timestamp() def set_project_aoi(self, draft_project_dto: DraftProjectDTO): """ Sets the AOI for the supplied project """ aoi_geojson = geojson.loads( json.dumps(draft_project_dto.area_of_interest)) aoi_geometry = GridService.merge_to_multi_polygon(aoi_geojson, dissolve=True) valid_geojson = geojson.dumps(aoi_geometry) self.geometry = ST_SetSRID(ST_GeomFromGeoJSON(valid_geojson), 4326) self.centroid = ST_Centroid(self.geometry) def set_default_changeset_comment(self): """ Sets the default changeset comment""" default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"] self.changeset_comment = ( f"{default_comment}-{self.id} {self.changeset_comment}" if self.changeset_comment is not None else f"{default_comment}-{self.id}") self.save() def set_country_info(self): """ Sets the default country based on centroid""" lat, lng = (db.session.query( cast(ST_Y(Project.centroid), sqlalchemy.String), cast(ST_X(Project.centroid), sqlalchemy.String), ).filter(Project.id == self.id).one()) url = "https://nominatim.openstreetmap.org/reverse?format=jsonv2&lat={0}&lon={1}".format( lat, lng) country_info = requests.get(url) country_info_json = country_info.content.decode("utf8").replace( "'", '"') # Load the JSON to a Python list & dump it back out as formatted JSON data = json.loads(country_info_json) if data["address"].get("country") is not None: self.country = [data["address"]["country"]] else: self.country = [data["address"]["county"]] self.save() def create(self): """ Creates and saves the current model to the DB """ db.session.add(self) db.session.commit() def save(self): """ Save changes to db""" db.session.commit() @staticmethod def clone(project_id: int, author_id: int): """ Clone project """ orig = Project.query.get(project_id) if orig is None: raise NotFound() # Transform into dictionary. orig_metadata = orig.__dict__.copy() # Remove unneeded data. items_to_remove = ["_sa_instance_state", "id", "allowed_users"] [orig_metadata.pop(i, None) for i in items_to_remove] # Remove clone from session so we can reinsert it as a new object orig_metadata.update({ "total_tasks": 0, "tasks_mapped": 0, "tasks_validated": 0, "tasks_bad_imagery": 0, "last_updated": timestamp(), "created": timestamp(), "author_id": author_id, "status": ProjectStatus.DRAFT.value, "geometry": None, "centroid": None, }) new_proj = Project(**orig_metadata) db.session.add(new_proj) proj_info = [] for info in orig.project_info.all(): info_data = info.__dict__.copy() info_data.pop("_sa_instance_state") info_data.update({ "project_id": new_proj.id, "project_id_str": str(new_proj.id) }) proj_info.append(ProjectInfo(**info_data)) new_proj.project_info = proj_info # Replace changeset comment. default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"] if default_comment is not None: orig_changeset = f"{default_comment}-{orig.id}" # Preserve space new_proj.changeset_comment = orig.changeset_comment.replace( orig_changeset, "") # Copy array relationships. for field in ["interests", "campaign", "teams"]: value = getattr(orig, field) setattr(new_proj, field, value) new_proj.custom_editor = orig.custom_editor db.session.commit() return new_proj @staticmethod def get(project_id: int): """ Gets specified project :param project_id: project ID in scope :return: Project if found otherwise None """ return Project.query.options( orm.noload("tasks"), orm.noload("messages"), orm.noload("project_chat")).get(project_id) def update(self, project_dto: ProjectDTO): """ Updates project from DTO """ self.status = ProjectStatus[project_dto.project_status].value self.priority = ProjectPriority[project_dto.project_priority].value self.default_locale = project_dto.default_locale self.enforce_random_task_selection = project_dto.enforce_random_task_selection self.private = project_dto.private self.mapper_level = MappingLevel[ project_dto.mapper_level.upper()].value self.changeset_comment = project_dto.changeset_comment self.due_date = project_dto.due_date self.imagery = project_dto.imagery self.josm_preset = project_dto.josm_preset self.id_presets = project_dto.id_presets self.last_updated = timestamp() self.license_id = project_dto.license_id if project_dto.osmcha_filter_id: # Support simple extraction of OSMCha filter id from OSMCha URL match = re.search(r"aoi=([\w-]+)", project_dto.osmcha_filter_id) self.osmcha_filter_id = (match.group(1) if match else project_dto.osmcha_filter_id) else: self.osmcha_filter_id = None if project_dto.organisation: org = Organisation.get(project_dto.organisation) if org is None: raise NotFound("Organisation does not exist") self.organisation = org # Cast MappingType strings to int array type_array = [] for mapping_type in project_dto.mapping_types: type_array.append(MappingTypes[mapping_type].value) self.mapping_types = type_array # Cast Editor strings to int array mapping_editors_array = [] for mapping_editor in project_dto.mapping_editors: mapping_editors_array.append(Editors[mapping_editor].value) self.mapping_editors = mapping_editors_array validation_editors_array = [] for validation_editor in project_dto.validation_editors: validation_editors_array.append(Editors[validation_editor].value) self.validation_editors = validation_editors_array self.country = project_dto.country_tag # Add list of allowed users, meaning the project can only be mapped by users in this list if hasattr(project_dto, "allowed_users"): self.allowed_users = [ ] # Clear existing relationships then re-insert for user in project_dto.allowed_users: self.allowed_users.append(user) # Update teams and projects relationship. self.teams = [] if hasattr(project_dto, "project_teams") and project_dto.project_teams: for team_dto in project_dto.project_teams: team = Team.get(team_dto.team_id) if team is None: raise NotFound("Team not found") role = TeamRoles[team_dto.role].value ProjectTeams(project=self, team=team, role=role) # Set Project Info for all returned locales for dto in project_dto.project_info_locales: project_info = self.project_info.filter_by( locale=dto.locale).one_or_none() if project_info is None: new_info = ProjectInfo.create_from_dto( dto) # Can't find info so must be new locale self.project_info.append(new_info) else: project_info.update_from_dto(dto) self.priority_areas = [ ] # Always clear Priority Area prior to updating if project_dto.priority_areas: for priority_area in project_dto.priority_areas: pa = PriorityArea.from_dict(priority_area) self.priority_areas.append(pa) if project_dto.custom_editor: if not self.custom_editor: new_editor = CustomEditor.create_from_dto( self.id, project_dto.custom_editor) self.custom_editor = new_editor else: self.custom_editor.update_editor(project_dto.custom_editor) else: if self.custom_editor: self.custom_editor.delete() self.campaign = [ Campaign.query.get(c.id) for c in project_dto.campaigns ] if project_dto.mapping_permission: self.mapping_permission = MappingPermission[ project_dto.mapping_permission.upper()].value if project_dto.validation_permission: self.validation_permission = ValidationPermission[ project_dto.validation_permission.upper()].value # Update Interests. self.interests = [] if project_dto.interests: self.interests = [ Interest.query.get(i.id) for i in project_dto.interests ] db.session.commit() def delete(self): """ Deletes the current model from the DB """ db.session.delete(self) db.session.commit() @staticmethod def exists(project_id): query = Project.query.filter(Project.id == project_id).exists() return db.session.query(literal(True)).filter(query).scalar() def is_favorited(self, user_id: int) -> bool: user = User.query.get(user_id) if user not in self.favorited: return False return True def favorite(self, user_id: int): user = User.query.get(user_id) self.favorited.append(user) db.session.commit() def unfavorite(self, user_id: int): user = User.query.get(user_id) if user not in self.favorited: raise ValueError("Project not been favorited by user") self.favorited.remove(user) db.session.commit() def set_as_featured(self): if self.featured is True: raise ValueError("Project is already featured") self.featured = True db.session.commit() def unset_as_featured(self): if self.featured is False: raise ValueError("Project is not featured") self.featured = False db.session.commit() def can_be_deleted(self) -> bool: """ Projects can be deleted if they have no mapped work """ task_count = self.tasks.filter( Task.task_status != TaskStatus.READY.value).count() if task_count == 0: return True else: return False @staticmethod def get_projects_for_admin(admin_id: int, preferred_locale: str, search_dto: ProjectSearchDTO) -> PMDashboardDTO: """ Get projects for admin """ query = Project.query.filter(Project.author_id == admin_id) # Do Filtering Here if search_dto.order_by: if search_dto.order_by_type == "DESC": query = query.order_by(desc(search_dto.order_by)) else: query = query.order_by(search_dto.order_by) admins_projects = query.all() if admins_projects is None: raise NotFound("No projects found for admin") admin_projects_dto = PMDashboardDTO() for project in admins_projects: pm_project = project.get_project_summary(preferred_locale) project_status = ProjectStatus(project.status) if project_status == ProjectStatus.DRAFT: admin_projects_dto.draft_projects.append(pm_project) elif project_status == ProjectStatus.PUBLISHED: admin_projects_dto.active_projects.append(pm_project) elif project_status == ProjectStatus.ARCHIVED: admin_projects_dto.archived_projects.append(pm_project) else: current_app.logger.error( f"Unexpected state project {project.id}") return admin_projects_dto def get_project_user_stats(self, user_id: int) -> ProjectUserStatsDTO: """Compute project specific stats for a given user""" stats_dto = ProjectUserStatsDTO() stats_dto.time_spent_mapping = 0 stats_dto.time_spent_validating = 0 stats_dto.total_time_spent = 0 total_mapping_time = (db.session.query( func.sum( cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time))).filter( or_( TaskHistory.action == "LOCKED_FOR_MAPPING", TaskHistory.action == "AUTO_UNLOCKED_FOR_MAPPING", )).filter(TaskHistory.user_id == user_id).filter( TaskHistory.project_id == self.id)) for time in total_mapping_time: total_mapping_time = time[0] if total_mapping_time: stats_dto.time_spent_mapping = total_mapping_time.total_seconds( ) stats_dto.total_time_spent += stats_dto.time_spent_mapping query = (TaskHistory.query.with_entities( func.date_trunc("minute", TaskHistory.action_date).label("trn"), func.max(TaskHistory.action_text).label("tm"), ).filter(TaskHistory.user_id == user_id).filter( TaskHistory.project_id == self.id).filter( TaskHistory.action == "LOCKED_FOR_VALIDATION").group_by( "trn").subquery()) total_validation_time = db.session.query( func.sum(cast(func.to_timestamp(query.c.tm, "HH24:MI:SS"), Time))).all() for time in total_validation_time: total_validation_time = time[0] if total_validation_time: stats_dto.time_spent_validating = total_validation_time.total_seconds( ) stats_dto.total_time_spent += stats_dto.time_spent_validating return stats_dto def get_project_stats(self) -> ProjectStatsDTO: """ Create Project Stats model for postgis project object""" project_stats = ProjectStatsDTO() project_stats.project_id = self.id project_area_sql = "select ST_Area(geometry, true)/1000000 as area from public.projects where id = :id" project_area_result = db.engine.execute(text(project_area_sql), id=self.id) project_stats.area = project_area_result.fetchone()["area"] project_stats.total_mappers = (db.session.query(User).filter( User.projects_mapped.any(self.id)).count()) project_stats.total_tasks = self.total_tasks project_stats.total_comments = (db.session.query(ProjectChat).filter( ProjectChat.project_id == self.id).count()) project_stats.percent_mapped = Project.calculate_tasks_percent( "mapped", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) project_stats.percent_validated = Project.calculate_tasks_percent( "validated", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) project_stats.percent_bad_imagery = Project.calculate_tasks_percent( "bad_imagery", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON()) project_stats.aoi_centroid = geojson.loads(centroid_geojson) project_stats.total_time_spent = 0 project_stats.total_mapping_time = 0 project_stats.total_validation_time = 0 project_stats.average_mapping_time = 0 project_stats.average_validation_time = 0 total_mapping_time, total_mapping_tasks = (db.session.query( func.sum( cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time)), func.count(TaskHistory.action), ).filter( or_( TaskHistory.action == "LOCKED_FOR_MAPPING", TaskHistory.action == "AUTO_UNLOCKED_FOR_MAPPING", )).filter(TaskHistory.project_id == self.id).one()) if total_mapping_tasks > 0: total_mapping_time = total_mapping_time.total_seconds() project_stats.total_mapping_time = total_mapping_time project_stats.average_mapping_time = (total_mapping_time / total_mapping_tasks) project_stats.total_time_spent += total_mapping_time total_validation_time, total_validation_tasks = (db.session.query( func.sum( cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time)), func.count(TaskHistory.action), ).filter( or_( TaskHistory.action == "LOCKED_FOR_VALIDATION", TaskHistory.action == "AUTO_UNLOCKED_FOR_VALIDATION", )).filter(TaskHistory.project_id == self.id).one()) if total_validation_tasks > 0: total_validation_time = total_validation_time.total_seconds() project_stats.total_validation_time = total_validation_time project_stats.average_validation_time = (total_validation_time / total_validation_tasks) project_stats.total_time_spent += total_validation_time actions = [] if project_stats.average_mapping_time <= 0: actions.append(TaskStatus.LOCKED_FOR_MAPPING.name) if project_stats.average_validation_time <= 0: actions.append(TaskStatus.LOCKED_FOR_VALIDATION.name) zoom_levels = [] # Check that averages are non-zero. if len(actions) != 0: zoom_levels = (Task.query.with_entities( Task.zoom.distinct()).filter(Task.project_id == self.id).all()) zoom_levels = [z[0] for z in zoom_levels] # Validate project has arbitrary tasks. is_square = True if None in zoom_levels: is_square = False sq = (TaskHistory.query.with_entities( Task.zoom, TaskHistory.action, (cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time)).label("ts"), ).filter(Task.is_square == is_square).filter( TaskHistory.project_id == Task.project_id).filter( TaskHistory.task_id == Task.id).filter( TaskHistory.action.in_(actions))) if is_square is True: sq = sq.filter(Task.zoom.in_(zoom_levels)) sq = sq.subquery() nz = (db.session.query(sq.c.zoom, sq.c.action, sq.c.ts).filter( sq.c.ts > datetime.time(0)).limit(10000).subquery()) if project_stats.average_mapping_time <= 0: mapped_avg = (db.session.query(nz.c.zoom, (func.avg( nz.c.ts)).label("avg")).filter( nz.c.action == TaskStatus.LOCKED_FOR_MAPPING.name).group_by( nz.c.zoom).all()) mapping_time = sum([t.avg.total_seconds() for t in mapped_avg]) / len(mapped_avg) project_stats.average_mapping_time = mapping_time if project_stats.average_validation_time <= 0: val_avg = (db.session.query(nz.c.zoom, (func.avg( nz.c.ts)).label("avg")).filter( nz.c.action == TaskStatus.LOCKED_FOR_VALIDATION.name).group_by( nz.c.zoom).all()) validation_time = sum([t.avg.total_seconds() for t in val_avg]) / len(val_avg) project_stats.average_validation_time = validation_time time_to_finish_mapping = ( self.total_tasks - (self.tasks_mapped + self.tasks_bad_imagery + self.tasks_validated)) * project_stats.average_mapping_time project_stats.time_to_finish_mapping = time_to_finish_mapping project_stats.time_to_finish_validating = ( self.total_tasks - (self.tasks_validated + self.tasks_bad_imagery) ) * project_stats.average_validation_time + time_to_finish_mapping return project_stats def get_project_summary(self, preferred_locale) -> ProjectSummary: """ Create Project Summary model for postgis project object""" summary = ProjectSummary() summary.project_id = self.id priority = self.priority if priority == 0: summary.priority = "URGENT" elif priority == 1: summary.priority = "HIGH" elif priority == 2: summary.priority = "MEDIUM" else: summary.priority = "LOW" summary.author = User.get_by_id(self.author_id).username summary.default_locale = self.default_locale summary.country_tag = self.country summary.changeset_comment = self.changeset_comment summary.due_date = self.due_date summary.created = self.created summary.last_updated = self.last_updated summary.osmcha_filter_id = self.osmcha_filter_id summary.mapper_level = MappingLevel(self.mapper_level).name summary.mapping_permission = MappingPermission( self.mapping_permission).name summary.validation_permission = ValidationPermission( self.validation_permission).name summary.random_task_selection_enforced = self.enforce_random_task_selection summary.private = self.private summary.license_id = self.license_id summary.status = ProjectStatus(self.status).name summary.id_presets = self.id_presets summary.imagery = self.imagery if self.organisation_id: summary.organisation = self.organisation_id summary.organisation_name = self.organisation.name summary.organisation_logo = self.organisation.logo if self.campaign: summary.campaigns = [i.as_dto() for i in self.campaign] # Cast MappingType values to related string array mapping_types_array = [] if self.mapping_types: for mapping_type in self.mapping_types: mapping_types_array.append(MappingTypes(mapping_type).name) summary.mapping_types = mapping_types_array if self.mapping_editors: mapping_editors = [] for mapping_editor in self.mapping_editors: mapping_editors.append(Editors(mapping_editor).name) summary.mapping_editors = mapping_editors if self.validation_editors: validation_editors = [] for validation_editor in self.validation_editors: validation_editors.append(Editors(validation_editor).name) summary.validation_editors = validation_editors if self.custom_editor: summary.custom_editor = self.custom_editor.as_dto() # If project is private, fetch list of allowed users if self.private: allowed_users = [] for user in self.allowed_users: allowed_users.append(user.username) summary.allowed_users = allowed_users centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON()) summary.aoi_centroid = geojson.loads(centroid_geojson) summary.percent_mapped = Project.calculate_tasks_percent( "mapped", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) summary.percent_validated = Project.calculate_tasks_percent( "validated", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) summary.percent_bad_imagery = Project.calculate_tasks_percent( "bad_imagery", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) summary.project_teams = [ ProjectTeamDTO( dict( team_id=t.team.id, team_name=t.team.name, role=TeamRoles(t.role).name, )) for t in self.teams ] project_info = ProjectInfo.get_dto_for_locale(self.id, preferred_locale, self.default_locale) summary.project_info = project_info return summary def get_project_title(self, preferred_locale): project_info = ProjectInfo.get_dto_for_locale(self.id, preferred_locale, self.default_locale) return project_info.name @staticmethod def get_project_total_contributions(project_id: int) -> int: project_contributors_count = (TaskHistory.query.with_entities( TaskHistory.user_id).filter( TaskHistory.project_id == project_id, TaskHistory.action != "COMMENT").distinct( TaskHistory.user_id).count()) return project_contributors_count def get_aoi_geometry_as_geojson(self): """ Helper which returns the AOI geometry as a geojson object """ aoi_geojson = db.engine.execute(self.geometry.ST_AsGeoJSON()).scalar() return geojson.loads(aoi_geojson) def get_project_teams(self): """ Helper to return teams with members so we can handle permissions """ project_teams = [] for t in self.teams: project_teams.append({ "name": t.team.name, "role": t.role, "members": [m.member.username for m in t.team.members], }) return project_teams @staticmethod @cached(active_mappers_cache) def get_active_mappers(project_id) -> int: """ Get count of Locked tasks as a proxy for users who are currently active on the project """ return (Task.query.filter( Task.task_status.in_(( TaskStatus.LOCKED_FOR_MAPPING.value, TaskStatus.LOCKED_FOR_VALIDATION.value, ))).filter(Task.project_id == project_id).distinct( Task.locked_by).count()) def _get_project_and_base_dto(self): """ Populates a project DTO with properties common to all roles """ base_dto = ProjectDTO() base_dto.project_id = self.id base_dto.project_status = ProjectStatus(self.status).name base_dto.default_locale = self.default_locale base_dto.project_priority = ProjectPriority(self.priority).name base_dto.area_of_interest = self.get_aoi_geometry_as_geojson() base_dto.aoi_bbox = shape(base_dto.area_of_interest).bounds base_dto.mapping_permission = MappingPermission( self.mapping_permission).name base_dto.validation_permission = ValidationPermission( self.validation_permission).name base_dto.enforce_random_task_selection = self.enforce_random_task_selection base_dto.private = self.private base_dto.mapper_level = MappingLevel(self.mapper_level).name base_dto.changeset_comment = self.changeset_comment base_dto.osmcha_filter_id = self.osmcha_filter_id base_dto.due_date = self.due_date base_dto.imagery = self.imagery base_dto.josm_preset = self.josm_preset base_dto.id_presets = self.id_presets base_dto.country_tag = self.country base_dto.organisation_id = self.organisation_id base_dto.license_id = self.license_id base_dto.created = self.created base_dto.last_updated = self.last_updated base_dto.author = User.get_by_id(self.author_id).username base_dto.active_mappers = Project.get_active_mappers(self.id) base_dto.task_creation_mode = TaskCreationMode( self.task_creation_mode).name base_dto.percent_mapped = Project.calculate_tasks_percent( "mapped", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) base_dto.percent_validated = Project.calculate_tasks_percent( "validated", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) base_dto.percent_bad_imagery = Project.calculate_tasks_percent( "bad_imagery", self.total_tasks, self.tasks_mapped, self.tasks_validated, self.tasks_bad_imagery, ) base_dto.project_teams = [ ProjectTeamDTO( dict( team_id=t.team.id, team_name=t.team.name, role=TeamRoles(t.role).name, )) for t in self.teams ] if self.custom_editor: base_dto.custom_editor = self.custom_editor.as_dto() if self.private: # If project is private it should have a list of allowed users allowed_usernames = [] for user in self.allowed_users: allowed_usernames.append(user.username) base_dto.allowed_usernames = allowed_usernames if self.mapping_types: mapping_types = [] for mapping_type in self.mapping_types: mapping_types.append(MappingTypes(mapping_type).name) base_dto.mapping_types = mapping_types if self.campaign: base_dto.campaigns = [i.as_dto() for i in self.campaign] if self.mapping_editors: mapping_editors = [] for mapping_editor in self.mapping_editors: mapping_editors.append(Editors(mapping_editor).name) base_dto.mapping_editors = mapping_editors if self.validation_editors: validation_editors = [] for validation_editor in self.validation_editors: validation_editors.append(Editors(validation_editor).name) base_dto.validation_editors = validation_editors if self.priority_areas: geojson_areas = [] for priority_area in self.priority_areas: geojson_areas.append(priority_area.get_as_geojson()) base_dto.priority_areas = geojson_areas base_dto.interests = [ InterestDTO(dict(id=i.id, name=i.name)) for i in self.interests ] return self, base_dto def as_dto_for_mapping(self, authenticated_user_id: int = None, locale: str = "en", abbrev: bool = True) -> Optional[ProjectDTO]: """ Creates a Project DTO suitable for transmitting to mapper users """ project, project_dto = self._get_project_and_base_dto() if abbrev is False: project_dto.tasks = Task.get_tasks_as_geojson_feature_collection( self.id, None) else: project_dto.tasks = Task.get_tasks_as_geojson_feature_collection_no_geom( self.id) project_dto.project_info = ProjectInfo.get_dto_for_locale( self.id, locale, project.default_locale) if project.organisation_id: project_dto.organisation = project.organisation.id project_dto.organisation_name = project.organisation.name project_dto.organisation_logo = project.organisation.logo project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales( self.id) return project_dto def tasks_as_geojson(self, task_ids_str: str, order_by=None, order_by_type="ASC", status=None): """ Creates a geojson of all areas """ project_tasks = Task.get_tasks_as_geojson_feature_collection( self.id, task_ids_str, order_by, order_by_type, status) return project_tasks @staticmethod def get_all_countries(): query = (db.session.query( func.unnest(Project.country).label("country")).distinct().order_by( "country")) tags_dto = TagsDTO() tags_dto.tags = [r[0] for r in query] return tags_dto @staticmethod def calculate_tasks_percent(target, total_tasks, tasks_mapped, tasks_validated, tasks_bad_imagery): """ Calculates percentages of contributions """ if target == "mapped": return int((tasks_mapped + tasks_validated) / (total_tasks - tasks_bad_imagery) * 100) elif target == "validated": return int(tasks_validated / (total_tasks - tasks_bad_imagery) * 100) elif target == "bad_imagery": return int((tasks_bad_imagery / total_tasks) * 100) def as_dto_for_admin(self, project_id): """ Creates a Project DTO suitable for transmitting to project admins """ project, project_dto = self._get_project_and_base_dto() if project is None: return None project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales( project_id) return project_dto def create_or_update_interests(self, interests_ids): self.interests = [] objs = [Interest.get_by_id(i) for i in interests_ids] self.interests.extend(objs) db.session.commit() @staticmethod def get_project_campaigns(project_id: int): query = (Campaign.query.join(campaign_projects).filter( campaign_projects.c.project_id == project_id).all()) campaign_list = [] for campaign in query: campaign_dto = CampaignDTO() campaign_dto.id = campaign.id campaign_dto.name = campaign.name campaign_list.append(campaign_dto) return campaign_list
class SurveysCore(Base): __tablename__ = 'survey' __table_args__ = {"schema": "odk"} uri = Column(String, primary_key=True) user_id = Column(String) deviceid = Column(String) srv_date = Column(String) start_time = Column(DateTime) end_time = Column(DateTime) rte = Column(Integer) dir = Column(Integer) english = Column(Boolean) other_lng = Column(Integer) orig_geom = Column(Geometry(geometry_type='POINT', srid=2913)) orig_purpose = Column(Integer) orig_purpose_other = Column(String) orig_access = Column(Integer) orig_access_other = Column(String) orig_blocks = Column(Integer) orig_parking = Column(String) dest_geom = Column(Geometry(geometry_type='POINT', srid=2913)) dest_purpose = Column(Integer) dest_purpose_other = Column(String) dest_egress = Column(Integer) dest_egress_other = Column(String) dest_blocks = Column(Integer) dest_parking = Column(String) board_id = Column(Integer, ForeignKey("tm.stops.gid"), nullable=True) alight_id = Column(Integer, ForeignKey("tm.stops.gid"), nullable=True) board = relationship("StopsODK", foreign_keys=board_id) alight = relationship("StopsODK", foreign_keys=alight_id) route1 = Column(String) route2 = Column(String) route3 = Column(String) route4 = Column(String) route5 = Column(String) #loc_valid = Column(String) reverse_trip = Column(Boolean) reverse_time = Column(DateTime) stcar_fare = Column(Integer) stcar_fare_other = Column(String) churn = Column(Integer) churn_other = Column(String) reason = Column(Integer) license = Column(Boolean) house_no = Column(Integer) wrk_out_house = Column(Integer) wrk_veh = Column(Integer) race = Column(Integer) race_other = Column(String) income = Column(Integer) addit_lng = Column(Boolean) engl_prof = Column(Integer) call_name = Column(String) call_number = Column(String) call_time = Column(String) call_spanish = Column(String) call_comment = Column(String) def __repr__(self): return '<Survey: uri:%r, rte:%r, dir:%r>' % \ (self.uri, self.user_id, elf.rte, self.dir)
class Lake(Base): __tablename__ = 'GDELT' id = Column(Integer, primary_key=True) name = Column(String) geom = Column(Geometry('POINT'))
class VSyntheseValidation(DB.Model): __tablename__ = "v_synthese_validation_forwebapp" __table_args__ = {"schema": "gn_commons"} id_synthese = DB.Column( DB.Integer, ForeignKey("gn_synthese.v_synthese_decode_nomenclatures.id_synthese"), primary_key=True, ) unique_id_sinp = DB.Column(UUID(as_uuid=True)) unique_id_sinp_grp = DB.Column(UUID(as_uuid=True)) id_source = DB.Column(DB.Integer) entity_source_pk_value = DB.Column(DB.Integer) id_dataset = DB.Column(DB.Integer) dataset_name = DB.Column(DB.Integer) id_acquisition_framework = DB.Column(DB.Integer) count_min = DB.Column(DB.Integer) count_max = DB.Column(DB.Integer) cd_nom = DB.Column(DB.Integer) cd_ref = DB.Column(DB.Unicode) nom_cite = DB.Column(DB.Unicode) nom_valide = DB.Column(DB.Unicode) nom_vern = DB.Column(DB.Unicode) lb_nom = DB.Column(DB.Unicode) meta_v_taxref = DB.Column(DB.Unicode) sample_number_proof = DB.Column(DB.Unicode) digital_proof = DB.Column(DB.Unicode) non_digital_proof = DB.Column(DB.Unicode) altitude_min = DB.Column(DB.Unicode) altitude_max = DB.Column(DB.Unicode) the_geom_4326 = DB.Column(Geometry("GEOMETRY", 4326)) date_min = DB.Column(DB.DateTime) date_max = DB.Column(DB.DateTime) validator = DB.Column(DB.Unicode) observers = DB.Column(DB.Unicode) determiner = DB.Column(DB.Unicode) id_digitiser = DB.Column(DB.Integer) comment_context = DB.Column(DB.Unicode) comment_description = DB.Column(DB.Unicode) meta_validation_date = DB.Column(DB.DateTime) meta_create_date = DB.Column(DB.DateTime) meta_update_date = DB.Column(DB.DateTime) last_action = DB.Column(DB.Unicode) id_nomenclature_geo_object_nature = DB.Column(DB.Integer) id_nomenclature_info_geo_type = DB.Column(DB.Integer) id_nomenclature_grp_typ = DB.Column(DB.Integer) id_nomenclature_obs_technique = DB.Column(DB.Integer) id_nomenclature_bio_status = DB.Column(DB.Integer) id_nomenclature_bio_condition = DB.Column(DB.Integer) id_nomenclature_naturalness = DB.Column(DB.Integer) id_nomenclature_exist_proof = DB.Column(DB.Integer) id_nomenclature_diffusion_level = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_nomenclature_sex = DB.Column(DB.Integer) id_nomenclature_obj_count = DB.Column(DB.Integer) id_nomenclature_type_count = DB.Column(DB.Integer) id_nomenclature_sensitivity = DB.Column(DB.Integer) id_nomenclature_observation_status = DB.Column(DB.Integer) id_nomenclature_blurring = DB.Column(DB.Integer) id_nomenclature_source_status = DB.Column(DB.Integer) id_nomenclature_valid_status = DB.Column(DB.Integer) mnemonique = DB.Column(DB.Unicode) cd_nomenclature_validation_status = DB.Column(DB.Unicode) label_default = DB.Column(DB.Unicode) validation_auto = DB.Column(DB.Boolean) validation_date = DB.Column(DB.DateTime) geojson = DB.Column(DB.Unicode) has_medias = column_property( exists([TMedias.id_media]).\ where(TMedias.uuid_attached_row==unique_id_sinp) ) def get_geofeature(self, recursif=False, fields=()): return self.as_geofeature("the_geom_4326", "id_synthese", recursif, fields=fields)
def _create_layer(self, public=False, none_area=False, attr_list=False, exclude_properties=False, metadatas=None, geom_type=False): """ This function is central for this test class. It creates a layer with two features, and associates a restriction area to it. """ import transaction from sqlalchemy import Column, Table, types, ForeignKey from sqlalchemy.ext.declarative import declarative_base from geoalchemy2 import Geometry, WKTElement from c2cgeoportal.models import DBSession, LayerV1, RestrictionArea if self._tables is None: self._tables = [] self.__class__._table_index += 1 id = self.__class__._table_index engine = DBSession.c2c_rw_bind connection = engine.connect() if not self.metadata: self.metadata = declarative_base(bind=engine).metadata tablename = "table_{0:d}".format(id) table1 = Table("{0!s}_child".format(tablename), self.metadata, Column("id", types.Integer, primary_key=True), Column("name", types.Unicode), schema="public") if geom_type: table1.append_column(Column("geom", Geometry("POINT", srid=21781))) else: table1.append_column(Column("geom", Geometry(srid=21781))) self._tables.append(table1) table2 = Table( tablename, self.metadata, Column("id", types.Integer, primary_key=True), Column("child_id", types.Integer, ForeignKey("public.{0!s}_child.id".format(tablename))), Column("name", types.Unicode), Column("last_update_user", types.Unicode), Column("last_update_date", types.DateTime), schema="public") if geom_type: table2.append_column(Column("geom", Geometry("POINT", srid=21781))) else: table2.append_column(Column("geom", Geometry(srid=21781))) self._tables.append(table2) table1.drop(checkfirst=True) table2.drop(checkfirst=True) table1.create() table2.create() ins = table1.insert().values(name="c1é") c1_id = connection.execute(ins).inserted_primary_key[0] ins = table1.insert().values(name="c2é") c2_id = connection.execute(ins).inserted_primary_key[0] ins = table2.insert().values(child_id=c1_id, name="foo", geom=WKTElement("POINT(5 45)", 21781)) connection.execute(ins) ins = table2.insert().values(child_id=c2_id, name="bar", geom=WKTElement("POINT(6 46)", 21781)) connection.execute(ins) if attr_list: ins = table2.insert().values(child_id=c2_id, name="aaa,bbb,foo", geom=WKTElement("POINT(6 46)", 21781)) connection.execute(ins) layer = LayerV1() layer.id = id layer.name = str(id) layer.geo_table = tablename layer.public = public layer.interface = [self.main] if exclude_properties: layer.exclude_properties = "name" if metadatas: layer.metadatas = metadatas DBSession.add(layer) if not public: ra = RestrictionArea() ra.name = "__test_ra" ra.layers = [layer] ra.roles = [self.role] ra.readwrite = True if not none_area: poly = "POLYGON((4 44, 4 46, 6 46, 6 44, 4 44))" ra.area = WKTElement(poly, srid=21781) DBSession.add(ra) transaction.commit() self.layer_ids.append(id) return id
class SpatialScale(Base): """ The SpatialScale is used to commonly describe the spatial scale at which the data described is valid. metacatalog uses the scale triplet (spacing, extent, support), but renames ``'spacing'`` to ``'resolution'``. Attributes ---------- id : int Unique id of the record. If not specified, the database will assign it. resolution : int Spatial resoultion in meter. The resolution usually describes a grid cell size, which only applies to gridded datasets. Use the :attr:`resolution_str` property for a string representation extent : geoalchemy2.Geometry The spatial extent of the dataset is given as a ``'POLYGON'``. While metacatalog is capable of storing any kind of valid POLYGON as extent, it is best practice to allow only Bounding Boxes on upload. support : float The support gives the spatial validity for a single observation. It specifies the spatial extent at which an observed value is valid. It is given as a fraction of resolution. For gridded datasets, it is common to set support to 1, as the observations are validated to represent the whole grid cell. In case ground truthing data is available, the actual footprint fraction of observations can be given here. Defaults to ``support=1.0``. """ __tablename__ = 'spatial_scales' # columns id = Column(Integer, primary_key=True) resolution = Column(Integer, nullable=False) extent = Column(Geometry(geometry_type='POLYGON', srid=4326), nullable=False) support = Column(Numeric, CheckConstraint('support >= 0'), nullable=False, default=1.0) # relationships sources = relationship("DataSource", back_populates='spatial_scale') @property def extent_shape(self): return to_shape(self.extent) @extent_shape.setter def extent_shape(self, shape): self.extent = from_shape(shape) @property def resolution_str(self): if self.resolution / 1000 > 1: return '%d km' % (int(self.resolution / 1000)) return '%.1f m' % self.resolution @property def support_str(self): if (self.support * self.resultion) / 1000 > 1: return '%d km' % (int((self.support * self.resultion) / 1000)) return '%.1f m' % (self.support * self.resolution) def to_dict(self, deep=False) -> dict: """To dict Return the model as a python dictionary. Parameters ---------- deep : bool If True, all related objects will be included as dictionary. Defaults to False Returns ------- obj : dict The Model as dict """ # base dictionary d = dict( id=self.id, resolution=self.resolution, resolution_str=self.resolution_str, extent=self.extent_shape.wkt, support=self.support, support_str = self.support_str ) if deep: d['datasources'] = [s.to_dict(deep=False) for s in self.sources] return d
def setup_method(self, _): # Always see the diff # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff self.maxDiff = None # pylint: disable=invalid-name self._tables = [] from geoalchemy2 import Geometry from sqlalchemy import Column, Table, func, types from sqlalchemy.ext.declarative import declarative_base from c2cgeoportal_commons.models import DBSession from c2cgeoportal_commons.models.main import ( OGCSERVER_AUTH_GEOSERVER, OGCSERVER_TYPE_GEOSERVER, FullTextSearch, Functionality, Interface, LayerGroup, LayerWMS, OGCServer, RestrictionArea, Role, Theme, ) from c2cgeoportal_commons.models.static import User setup_db() role1 = Role(name="__test_role1") user1 = User(username="******", password="******", settings_role=role1, roles=[role1]) user1.email = "*****@*****.**" role2 = Role(name="__test_role2", extent=WKTElement("POLYGON((1 2, 1 4, 3 4, 3 2, 1 2))", srid=21781)) user2 = User(username="******", password="******", settings_role=role2, roles=[role2]) main = Interface(name="desktop") mobile = Interface(name="mobile") engine = DBSession.c2c_rw_bind engine.connect() a_geo_table = Table( "a_geo_table", declarative_base(bind=engine).metadata, Column("id", types.Integer, primary_key=True), Column("geom", Geometry("POINT", srid=21781)), schema="geodata", ) self._tables = [a_geo_table] a_geo_table.drop(checkfirst=True) a_geo_table.create() ogcserver = create_default_ogcserver() private_layer_edit = LayerWMS(name="__test_private_layer_edit", public=False) private_layer_edit.layer = "__test_private_layer" private_layer_edit.geo_table = "a_schema.a_geo_table" private_layer_edit.interfaces = [main, mobile] private_layer_edit.ogc_server = ogcserver public_layer2 = LayerWMS(name="__test_public_layer", layer="__test_public_layer_bis", public=True) public_layer2.interfaces = [main, mobile] public_layer2.ogc_server = ogcserver private_layer = LayerWMS(name="__test_private_layer", layer="__test_private_layer_bis", public=False) private_layer.interfaces = [main, mobile] private_layer.ogc_server = ogcserver interface_not_in_mapfile = Interface(name="interface_not_in_mapfile") public_layer_not_in_mapfile = LayerWMS( name="__test_public_layer_not_in_mapfile", layer="__test_public_layer_not_in_mapfile", public=True) public_layer_not_in_mapfile.interfaces = [interface_not_in_mapfile] public_layer_not_in_mapfile.ogc_server = ogcserver interface_notmapfile = Interface(name="interface_notmapfile") ogcserver_notmapfile = OGCServer(name="__test_ogc_server_notmapfile") ogcserver_notmapfile.url = mapserv_url + "?map=not_a_mapfile" public_layer_not_mapfile = LayerWMS( name="__test_public_layer_notmapfile", layer="__test_public_layer_notmapfile", public=True) public_layer_not_mapfile.interfaces = [interface_notmapfile] public_layer_not_mapfile.ogc_server = ogcserver_notmapfile interface_geoserver = Interface(name="interface_geoserver") ogcserver_geoserver = OGCServer(name="__test_ogc_server_geoserver") ogcserver_geoserver.url = mapserv_url ogcserver_geoserver.type = OGCSERVER_TYPE_GEOSERVER ogcserver_geoserver.auth = OGCSERVER_AUTH_GEOSERVER public_layer_geoserver = LayerWMS( name="__test_public_layer_geoserver", layer="__test_public_layer_geoserver", public=True) public_layer_geoserver.interfaces = [interface_geoserver] public_layer_geoserver.ogc_server = ogcserver_geoserver interface_no_layers = Interface(name="interface_no_layers") public_layer_no_layers = LayerWMS(name="__test_public_layer_no_layers", public=True) public_layer_no_layers.interfaces = [interface_no_layers] public_layer_no_layers.ogc_server = ogcserver group = LayerGroup(name="__test_layer_group") group.children = [ private_layer_edit, public_layer2, public_layer_not_in_mapfile, public_layer_not_mapfile, public_layer_geoserver, public_layer_no_layers, private_layer, ] theme = Theme(name="__test_theme") theme.children = [group] theme.interfaces = [ main, interface_not_in_mapfile, interface_notmapfile, interface_geoserver, interface_no_layers, ] functionality1 = Functionality(name="test_name", value="test_value_1") functionality2 = Functionality(name="test_name", value="test_value_2") theme.functionalities = [functionality1, functionality2] poly = "POLYGON((-100 0, -100 20, 100 20, 100 0, -100 0))" area = WKTElement(poly, srid=21781) RestrictionArea( name="__test_ra1", description="", layers=[private_layer_edit, private_layer], roles=[role1], area=area, ) area = WKTElement(poly, srid=21781) RestrictionArea( name="__test_ra2", description="", layers=[private_layer_edit, private_layer], roles=[role2], area=area, readwrite=True, ) entry1 = FullTextSearch() entry1.label = "label1" entry1.layer_name = "layer1" entry1.ts = func.to_tsvector("french", "soleil travail") entry1.the_geom = WKTElement("POINT(-90 -45)", 21781) entry1.public = True entry2 = FullTextSearch() entry2.label = "label1" entry2.layer_name = "layer1" entry2.ts = func.to_tsvector("french", "soleil travail") entry2.the_geom = WKTElement("POINT(-90 -45)", 21781) entry2.public = True entry3 = FullTextSearch() entry3.label = "label1" entry3.layer_name = None entry3.ts = func.to_tsvector("french", "soleil travail") entry3.the_geom = WKTElement("POINT(-90 -45)", 21781) entry3.public = True DBSession.add_all([user1, user2, theme, entry1, entry2, entry3]) DBSession.flush() self.role1_id = role1.id transaction.commit()
class Shape(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(128), unique=True, nullable=False) geo = db.Column(Geometry(geometry_type='POLYGON'), nullable=False)
class Carpool(db.Model, UuidMixin): __tablename__ = 'carpools' id = db.Column(db.Integer, primary_key=True) created_at = db.Column(db.DateTime(timezone=True), default=datetime.datetime.utcnow) from_place = db.Column(db.String(120)) from_point = db.Column(Geometry('POINT')) leave_time = db.Column(db.DateTime(timezone=True)) return_time = db.Column(db.DateTime(timezone=True)) max_riders = db.Column(db.Integer) notes = db.Column(db.Text) vehicle_description = db.Column(db.Text) driver_id = db.Column(db.Integer, db.ForeignKey('people.id')) destination_id = db.Column(db.Integer, db.ForeignKey('destinations.id')) ride_requests = relationship("RideRequest", cascade="all, delete-orphan") destination = relationship("Destination") driver = relationship("Person") def get_ride_requests_query(self, statuses=None): query = RideRequest.query.filter_by(carpool_id=self.id) if statuses: query = query.filter(RideRequest.status.in_(statuses)) return query def get_current_user_ride_request(self): if current_user.is_anonymous: return None else: return self.get_ride_requests_query() \ .filter_by(person_id=current_user.id) \ .first() @property def current_user_is_driver(self): return current_user.id == self.driver_id def get_riders(self, statuses): requests = self.get_ride_requests_query(statuses).all() if not requests: return [] return Person.query.filter(Person.id.in_(p.person_id for p in requests)).all() @property def riders(self): return self.get_riders(['approved']) @property def riders_and_potential_riders(self): return self.get_riders(['approved', 'requested']) @property def seats_available(self): return self.max_riders - \ self.get_ride_requests_query(['approved']).count()