Esempio n. 1
0
    Column("z_name", TEXT, unique=True, nullable=False),
    schema="bra",
)
# Alpine massif, like "Maurienne", "Chablais", "Aspe ossau"
MassifTable = AbstractSpatialTable(
    "massif",
    metadata,
    Column("m_id", UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
    Column("m_name", TEXT, unique=True, nullable=False),
    Column(
        "m_department",
        UUID(as_uuid=True),
        ForeignKey("bra.department.d_id"),
        nullable=False,
    ),
    Column("the_geom", Geometry("POLYGON", srid=4326), nullable=False),
    schema="bra",
)

RiskTable = AbstractTable(
    "risk",
    metadata,
    Column("r_id", UUID(as_uuid=True), primary_key=True, default=uuid.uuid4),
    Column(
        "r_record_id",
        UUID(as_uuid=True),
        ForeignKey("bra.record.br_id"),
        nullable=False,
    ),
    Column("r_altitude_limit", TEXT),
    Column("r_risk", Integer, CheckConstraint("r_risk>0 AND r_risk<=5")),
Esempio n. 2
0
class Business(db.Model):
    """ Business model. """

    __tablename__ = 'businesses'

    biz_id = db.Column(db.Integer, autoincrement=True, primary_key=True)
    biz_name = db.Column(db.String(100), nullable=False)
    address = db.Column(db.String(100), nullable=True)
    city = db.Column(db.String(64), nullable=True)
    state = db.Column(db.String(64), nullable=True)
    country = db.Column(db.String(64), nullable=True)
    zipcode = db.Column(db.String(64), nullable=True)
    phone = db.Column(db.String(64), nullable=True)
    email = db.Column(db.String(64), nullable=True)
    valid_email = db.Column(db.Boolean, nullable=False, default=False)
    url = db.Column(db.String(100), nullable=True)
    category = db.Column(db.String(64), nullable=True)
    days_open = db.Column(db.String(64), nullable=True)
    open_time = db.Column(db.Integer, nullable=True)
    close_time = db.Column(db.Integer, nullable=True)
    claimed = db.Column(db.Boolean, nullable=False, default=False)
    biz_pic = db.Column(db.String(100), nullable=True)
    lat = db.Column(db.Float, nullable=True)
    lng = db.Column(db.Float, nullable=True)
    location = db.Column(Geometry(geometry_type='POINT'), nullable=True)
    __table_args__ = (db.CheckConstraint(
        "email ~ '^[A-Z0-9a-z._%+-]+@[A-Z0-9a-z.-]+\.[A-Za-z]{2,}$'"), )

    checkins = db.relationship('CheckIn', backref='biz')
    referrals = db.relationship('Referral', backref='biz')
    reviews = db.relationship('Review', backref='biz')
    promos = db.relationship('Promo', backref='biz')

    def __repr__(self):
        """ Displays info. """

        return (u'<biz_id={} biz_name={}>'.format(self.biz_id, self.biz_name))

    def is_owned(self, user_id):
        """ Tracks whether business has been claimed by a specific user. """

        owned = UserBiz.query.filter(UserBiz.biz_id == self.biz_id,
                                     UserBiz.user_id == user_id).first()

        if owned:
            return True
        else:
            return False

    def is_claimed(self):
        """
        Tracks whether business has been claimed and therefore, is available
        to be claimed or not.
        """

        claimed = UserBiz.query.filter_by(biz_id=self.biz_id).first()

        if claimed:
            return True
        else:
            return False

    def is_checkin(self, user_id):
        """ Tracks whether business has been checked into by specific user for
        that day. """

        today = datetime.today().date()

        checkin = CheckIn.query.filter(CheckIn.biz_id == self.biz_id,
                                       CheckIn.user_id == user_id,
                                       CheckIn.checkin_date == today).first()

        if checkin:
            return True
        else:
            return False

    def has_reviewed(self, user_id):
        """ Tracks whether business has been reviewed by a specific user. """

        review = Review.query.filter(Review.biz_id == self.biz_id,
                                     Review.user_id == user_id).first()

        if review:
            return True
        else:
            return False

    def visit_not_reviewed(self, user_id):
        """ Tracks whether business has been checked in but not reviewed by
        specific user. """

        checkins_not_reviewed = (db.session.query(distinct(
            CheckIn.biz_id)).filter(
                CheckIn.user_id == user_id,
                CheckIn.biz_id.notin_(
                    db.session.query(distinct(Review.biz_id)).filter(
                        Review.user_id == user_id))).all())

        not_reviewed = False

        for item in checkins_not_reviewed:
            if item[0] == self.biz_id:
                not_reviewed = True

        return not_reviewed

    def tot_checkins(self):
        """ Tracks total check-ins to the business. """

        total = len(self.checkins)

        return total

    def tot_user_checkins(self, user_id):
        """ Tracks total check-ins to the business by a specific user. """

        total = CheckIn.query.filter(CheckIn.user_id == user_id,
                                     CheckIn.biz_id == self.biz_id).count()

        return total

    def tot_reviews(self):
        """ Calculates total reviews for a business. """

        count = len(self.reviews)

        return count

    def avg_score(self):
        """ Calculates average rating of business. """

        tot_score = 0

        for review in self.reviews:
            if review.revise_review:
                tot_score += review.new_rating
            else:
                tot_score += review.rating

        avg_rating = tot_score / self.tot_reviews()

        return avg_rating

    def tot_promos_red(self):
        """ Calculates total promotions redeemed by consumers. """

        redeemed_promos = 0

        for promo in self.promos:
            for item in promo.user_promos:
                if item.redeemed is True:
                    redeemed_promos += 1

        return redeemed_promos

    def tot_refs(self):
        """ Calculates total referrals. """

        total = len(self.referrals)

        return total

    def tot_refs_red(self):
        """ Calculates total referrals that have been redeemed. """

        redeemed_refs = 0

        for item in self.referrals:
            if item.user_promo.redeemed is True:
                redeemed_refs += 1

        return redeemed_refs

    def deg_of_sep(self, user2_id, degrees=6):
        """
        Calculates degrees of separation, if any, between a user and business,
        if claimed. If count is zero, the user2 is user1. Otherwise, count of none
        means no connection. Currently set to stop at count at n degrees.
        """

        if not self.users:
            count = 0
            return count
        else:
            visit = []
            seen = set()
            visit.append(self.users[0].user_id)
            sought = user2_id
            anchor = self.users[0].user_id
            count = 0

            while visit:
                current = visit.pop(0)
                if current == sought:
                    return count
                else:
                    if current not in seen:
                        seen.add(current)
                        for friend in friends_lst(current, seen):
                            visit.append(friend)
                    if current == anchor:
                        if visit:
                            anchor = visit[-1]
                        count += 1
                        if count == (degrees + 1):
                            count = None
                            return count

        return count
Esempio n. 3
0
class Task(db.Model):
    """ Describes an individual mapping Task """

    __tablename__ = "tasks"

    # Table has composite PK on (id and project_id)
    id = db.Column(db.Integer, primary_key=True)
    project_id = db.Column(db.Integer,
                           db.ForeignKey("projects.id"),
                           index=True,
                           primary_key=True)
    x = db.Column(db.Integer)
    y = db.Column(db.Integer)
    zoom = db.Column(db.Integer)
    extra_properties = db.Column(db.Unicode)
    # Tasks need to be split differently if created from an arbitrary grid or were clipped to the edge of the AOI
    is_square = db.Column(db.Boolean, default=True)
    geometry = db.Column(Geometry("MULTIPOLYGON", srid=4326))
    task_status = db.Column(db.Integer, default=TaskStatus.READY.value)
    locked_by = db.Column(db.BigInteger,
                          db.ForeignKey("users.id", name="fk_users_locked"),
                          index=True)
    mapped_by = db.Column(db.BigInteger,
                          db.ForeignKey("users.id", name="fk_users_mapper"),
                          index=True)
    validated_by = db.Column(db.BigInteger,
                             db.ForeignKey("users.id",
                                           name="fk_users_validator"),
                             index=True)

    # Mapped objects
    task_history = db.relationship(TaskHistory, cascade="all")
    task_annotations = db.relationship(TaskAnnotation, cascade="all")
    lock_holder = db.relationship(User, foreign_keys=[locked_by])
    mapper = db.relationship(User, foreign_keys=[mapped_by])

    def create(self):
        """ Creates and saves the current model to the DB """
        db.session.add(self)
        db.session.commit()

    def update(self):
        """ Updates the DB with the current state of the Task """
        db.session.commit()

    def delete(self):
        """ Deletes the current model from the DB """
        db.session.delete(self)
        db.session.commit()

    @classmethod
    def from_geojson_feature(cls, task_id, task_feature):
        """
        Constructs and validates a task from a GeoJson feature object
        :param task_id: Unique ID for the task
        :param task_feature: A geoJSON feature object
        :raises InvalidGeoJson, InvalidData
        """
        if type(task_feature) is not geojson.Feature:
            raise InvalidGeoJson("Task: Invalid GeoJson should be a feature")

        task_geometry = task_feature.geometry

        if type(task_geometry) is not geojson.MultiPolygon:
            raise InvalidGeoJson("Task: Geometry must be a MultiPolygon")

        is_valid_geojson = geojson.is_valid(task_geometry)
        if is_valid_geojson["valid"] == "no":
            raise InvalidGeoJson(
                f"Task: Invalid MultiPolygon - {is_valid_geojson['message']}")

        task = cls()
        try:
            task.x = task_feature.properties["x"]
            task.y = task_feature.properties["y"]
            task.zoom = task_feature.properties["zoom"]
            task.is_square = task_feature.properties["isSquare"]
        except KeyError as e:
            raise InvalidData(f"Task: Expected property not found: {str(e)}")

        if "extra_properties" in task_feature.properties:
            task.extra_properties = json.dumps(
                task_feature.properties["extra_properties"])

        task.id = task_id
        task_geojson = geojson.dumps(task_geometry)
        task.geometry = ST_SetSRID(ST_GeomFromGeoJSON(task_geojson), 4326)

        return task

    @staticmethod
    def get(task_id: int, project_id: int):
        """
        Gets specified task
        :param task_id: task ID in scope
        :param project_id: project ID in scope
        :return: Task if found otherwise None
        """
        # LIKELY PROBLEM AREA

        return Task.query.filter_by(id=task_id,
                                    project_id=project_id).one_or_none()

    @staticmethod
    def get_tasks(project_id: int, task_ids: List[int]):
        """ Get all tasks that match supplied list """
        return Task.query.filter(Task.project_id == project_id,
                                 Task.id.in_(task_ids)).all()

    @staticmethod
    def get_all_tasks(project_id: int):
        """ Get all tasks for a given project """
        return Task.query.filter(Task.project_id == project_id).all()

    @staticmethod
    def auto_unlock_delta():
        return parse_duration(current_app.config["TASK_AUTOUNLOCK_AFTER"])

    @staticmethod
    def auto_unlock_tasks(project_id: int):
        """Unlock all tasks locked for longer than the auto-unlock delta"""
        expiry_delta = Task.auto_unlock_delta()
        lock_duration = (datetime.datetime.min +
                         expiry_delta).time().isoformat()
        expiry_date = datetime.datetime.utcnow() - expiry_delta
        old_locks_query = """SELECT t.id
            FROM tasks t, task_history th
            WHERE t.id = th.task_id
            AND t.project_id = th.project_id
            AND t.task_status IN (1,3)
            AND th.action IN ( 'LOCKED_FOR_VALIDATION','LOCKED_FOR_MAPPING' )
            AND th.action_text IS NULL
            AND t.project_id = :project_id
            AND th.action_date <= :expiry_date
            """

        old_tasks = db.engine.execute(text(old_locks_query),
                                      project_id=project_id,
                                      expiry_date=str(expiry_date))

        if old_tasks.rowcount == 0:
            # no tasks older than the delta found, return without further processing
            return

        for old_task in old_tasks:
            task = Task.get(old_task[0], project_id)
            task.auto_unlock_expired_tasks(expiry_date, lock_duration)

    def auto_unlock_expired_tasks(self, expiry_date, lock_duration):
        """Unlock all tasks locked before expiry date. Clears task lock if needed"""
        TaskHistory.update_expired_and_locked_actions(self.project_id, self.id,
                                                      expiry_date,
                                                      lock_duration)

        last_action = TaskHistory.get_last_locked_or_auto_unlocked_action(
            self.project_id, self.id)
        if last_action.action in [
                "AUTO_UNLOCKED_FOR_MAPPING",
                "AUTO_UNLOCKED_FOR_VALIDATION",
        ]:
            self.clear_lock()

    def is_mappable(self):
        """ Determines if task in scope is in suitable state for mapping """
        if TaskStatus(self.task_status) not in [
                TaskStatus.READY,
                TaskStatus.INVALIDATED,
        ]:
            return False

        return True

    def set_task_history(self,
                         action,
                         user_id,
                         comment=None,
                         new_state=None,
                         mapping_issues=None):
        """
        Sets the task history for the action that the user has just performed
        :param task: Task in scope
        :param user_id: ID of user performing the action
        :param action: Action the user has performed
        :param comment: Comment user has added
        :param new_state: New state of the task
        :param mapping_issues: Identified issues leading to invalidation
        """
        history = TaskHistory(self.id, self.project_id, user_id)

        if action in [
                TaskAction.LOCKED_FOR_MAPPING, TaskAction.LOCKED_FOR_VALIDATION
        ]:
            history.set_task_locked_action(action)
        elif action == TaskAction.COMMENT:
            history.set_comment_action(comment)
        elif action == TaskAction.STATE_CHANGE:
            history.set_state_change_action(new_state)
        elif action in [
                TaskAction.AUTO_UNLOCKED_FOR_MAPPING,
                TaskAction.AUTO_UNLOCKED_FOR_VALIDATION,
        ]:
            history.set_auto_unlock_action(action)

        if mapping_issues is not None:
            history.task_mapping_issues = mapping_issues

        self.task_history.append(history)
        return history

    def lock_task_for_mapping(self, user_id: int):
        self.set_task_history(TaskAction.LOCKED_FOR_MAPPING, user_id)
        self.task_status = TaskStatus.LOCKED_FOR_MAPPING.value
        self.locked_by = user_id
        self.update()

    def lock_task_for_validating(self, user_id: int):
        self.set_task_history(TaskAction.LOCKED_FOR_VALIDATION, user_id)
        self.task_status = TaskStatus.LOCKED_FOR_VALIDATION.value
        self.locked_by = user_id
        self.update()

    def reset_task(self, user_id: int):
        if TaskStatus(self.task_status) in [
                TaskStatus.LOCKED_FOR_MAPPING,
                TaskStatus.LOCKED_FOR_VALIDATION,
        ]:
            self.record_auto_unlock()

        self.set_task_history(TaskAction.STATE_CHANGE, user_id, None,
                              TaskStatus.READY)
        self.mapped_by = None
        self.validated_by = None
        self.locked_by = None
        self.task_status = TaskStatus.READY.value
        self.update()

    def clear_task_lock(self):
        """
        Unlocks task in scope in the database.  Clears the lock as though it never happened.
        No history of the unlock is recorded.
        :return:
        """
        # clear the lock action for the task in the task history
        last_action = TaskHistory.get_last_locked_action(
            self.project_id, self.id)
        last_action.delete()

        # Set locked_by to null and status to last status on task
        self.clear_lock()

    def record_auto_unlock(self, lock_duration):
        locked_user = self.locked_by
        last_action = TaskHistory.get_last_locked_action(
            self.project_id, self.id)
        next_action = (TaskAction.AUTO_UNLOCKED_FOR_MAPPING
                       if last_action.action == "LOCKED_FOR_MAPPING" else
                       TaskAction.AUTO_UNLOCKED_FOR_VALIDATION)

        self.clear_task_lock()

        # Add AUTO_UNLOCKED action in the task history
        auto_unlocked = self.set_task_history(action=next_action,
                                              user_id=locked_user)
        auto_unlocked.action_text = lock_duration
        self.update()

    def unlock_task(self,
                    user_id,
                    new_state=None,
                    comment=None,
                    undo=False,
                    issues=None):
        """ Unlock task and ensure duration task locked is saved in History """
        if comment:
            self.set_task_history(
                action=TaskAction.COMMENT,
                comment=comment,
                user_id=user_id,
                mapping_issues=issues,
            )

        history = self.set_task_history(
            action=TaskAction.STATE_CHANGE,
            new_state=new_state,
            user_id=user_id,
            mapping_issues=issues,
        )

        if (new_state in [TaskStatus.MAPPED, TaskStatus.BADIMAGERY]
                and TaskStatus(
                    self.task_status) != TaskStatus.LOCKED_FOR_VALIDATION):
            # Don't set mapped if state being set back to mapped after validation
            self.mapped_by = user_id
        elif new_state == TaskStatus.VALIDATED:
            TaskInvalidationHistory.record_validation(self.project_id, self.id,
                                                      user_id, history)
            self.validated_by = user_id
        elif new_state == TaskStatus.INVALIDATED:
            TaskInvalidationHistory.record_invalidation(
                self.project_id, self.id, user_id, history)
            self.mapped_by = None
            self.validated_by = None

        if not undo:
            # Using a slightly evil side effect of Actions and Statuses having the same name here :)
            TaskHistory.update_task_locked_with_duration(
                self.id, self.project_id, TaskStatus(self.task_status),
                user_id)

        self.task_status = new_state.value
        self.locked_by = None
        self.update()

    def reset_lock(self, user_id, comment=None):
        """ Removes a current lock from a task, resets to last status and updates history with duration of lock """
        if comment:
            self.set_task_history(action=TaskAction.COMMENT,
                                  comment=comment,
                                  user_id=user_id)

        # Using a slightly evil side effect of Actions and Statuses having the same name here :)
        TaskHistory.update_task_locked_with_duration(
            self.id, self.project_id, TaskStatus(self.task_status), user_id)
        self.clear_lock()

    def clear_lock(self):
        """ Resets to last status and removes current lock from a task """
        self.task_status = TaskHistory.get_last_status(self.project_id,
                                                       self.id).value
        self.locked_by = None
        self.update()

    @staticmethod
    def get_tasks_as_geojson_feature_collection(
        project_id,
        task_ids_str: str = None,
        order_by: str = None,
        order_by_type: str = "ASC",
        status: int = None,
    ):
        """
        Creates a geoJson.FeatureCollection object for tasks related to the supplied project ID
        :param project_id: Owning project ID
        :order_by: sorting option: available values update_date and building_area_diff
        :status: task status id to filter by
        :return: geojson.FeatureCollection
        """
        # subquery = (
        #     db.session.query(func.max(TaskHistory.action_date))
        #     .filter(
        #         Task.id == TaskHistory.task_id,
        #         Task.project_id == TaskHistory.project_id,
        #     )
        #     .correlate(Task)
        #     .group_by(Task.id)
        #     .label("update_date")
        # )
        query = db.session.query(
            Task.id,
            Task.x,
            Task.y,
            Task.zoom,
            Task.is_square,
            Task.task_status,
            Task.geometry.ST_AsGeoJSON().label("geojson"),
            Task.locked_by,
            # subquery,
        )

        filters = [Task.project_id == project_id]

        if task_ids_str:
            tasks_filters = []
            task_ids = map(int, task_ids_str.split(","))
            tasks = Task.get_tasks(project_id, task_ids)
            if not tasks or len(tasks) == 0:
                raise NotFound()
            else:
                for task in tasks:
                    tasks_filters.append(task.id)
            filters = [
                Task.project_id == project_id,
                Task.id.in_(tasks_filters)
            ]
        else:
            tasks = Task.get_all_tasks(project_id)
            if not tasks or len(tasks) == 0:
                raise NotFound()

        if status:
            filters.append(Task.task_status == status)

        if order_by == "effort_prediction":
            query = query.outerjoin(TaskAnnotation).filter(*filters)
            if order_by_type == "DESC":
                query = query.order_by(
                    desc(
                        cast(
                            cast(
                                TaskAnnotation.
                                properties["building_area_diff"], Text),
                            Float,
                        )))
            else:
                query = query.order_by(
                    cast(
                        cast(TaskAnnotation.properties["building_area_diff"],
                             Text),
                        Float,
                    ))
        # elif order_by == "last_updated":
        #     if order_by_type == "DESC":
        #         query = query.filter(*filters).order_by(desc("update_date"))
        #     else:
        #         query = query.filter(*filters).order_by("update_date")
        else:
            query = query.filter(*filters)

        project_tasks = query.all()

        tasks_features = []
        for task in project_tasks:
            task_geometry = geojson.loads(task.geojson)
            task_properties = dict(
                taskId=task.id,
                taskX=task.x,
                taskY=task.y,
                taskZoom=task.zoom,
                taskIsSquare=task.is_square,
                taskStatus=TaskStatus(task.task_status).name,
                lockedBy=task.locked_by,
            )

            feature = geojson.Feature(geometry=task_geometry,
                                      properties=task_properties)
            tasks_features.append(feature)

        return geojson.FeatureCollection(tasks_features)

    @staticmethod
    def get_tasks_as_geojson_feature_collection_no_geom(project_id):
        """
        Creates a geoJson.FeatureCollection object for all tasks related to the supplied project ID without geometry
        :param project_id: Owning project ID
        :return: geojson.FeatureCollection
        """
        project_tasks = (db.session.query(
            Task.id, Task.x, Task.y, Task.zoom, Task.is_square,
            Task.task_status).filter(Task.project_id == project_id).all())

        tasks_features = []
        for task in project_tasks:
            task_properties = dict(
                taskId=task.id,
                taskX=task.x,
                taskY=task.y,
                taskZoom=task.zoom,
                taskIsSquare=task.is_square,
                taskStatus=TaskStatus(task.task_status).name,
            )

            feature = geojson.Feature(properties=task_properties)
            tasks_features.append(feature)

        return geojson.FeatureCollection(tasks_features)

    @staticmethod
    def get_mapped_tasks_by_user(project_id: int):
        """ Gets all mapped tasks for supplied project grouped by user"""

        # Raw SQL is easier to understand that SQL alchemy here :)
        sql = """select u.username, u.mapping_level, count(distinct(t.id)), json_agg(distinct(t.id)),
                            max(th.action_date) last_seen, u.date_registered, u.last_validation_date
                      from tasks t,
                           task_history th,
                           users u
                     where t.project_id = th.project_id
                       and t.id = th.task_id
                       and t.mapped_by = u.id
                       and t.project_id = :project_id
                       and t.task_status = 2
                       and th.action_text = 'MAPPED'
                     group by u.username, u.mapping_level, u.date_registered, u.last_validation_date"""

        results = db.engine.execute(text(sql), project_id=project_id)

        mapped_tasks_dto = MappedTasks()
        for row in results:
            user_mapped = MappedTasksByUser()
            user_mapped.username = row[0]
            user_mapped.mapping_level = MappingLevel(row[1]).name
            user_mapped.mapped_task_count = row[2]
            user_mapped.tasks_mapped = row[3]
            user_mapped.last_seen = row[4]
            user_mapped.date_registered = row[5]
            user_mapped.last_validation_date = row[6]

            mapped_tasks_dto.mapped_tasks.append(user_mapped)

        return mapped_tasks_dto

    @staticmethod
    def get_max_task_id_for_project(project_id: int):
        """Gets the nights task id currently in use on a project"""
        sql = """select max(id) from tasks where project_id = :project_id GROUP BY project_id"""
        result = db.engine.execute(text(sql), project_id=project_id)
        if result.rowcount == 0:
            raise NotFound()
        for row in result:
            return row[0]

    def as_dto(
        self,
        task_history: List[TaskHistoryDTO] = [],
        last_updated: datetime.datetime = None,
    ):
        """Just converts to a TaskDTO"""
        task_dto = TaskDTO()
        task_dto.task_id = self.id
        task_dto.project_id = self.project_id
        task_dto.task_status = TaskStatus(self.task_status).name
        task_dto.lock_holder = self.lock_holder.username if self.lock_holder else None
        task_dto.task_history = task_history
        if last_updated:
            task_dto.last_updated = last_updated
        task_dto.auto_unlock_seconds = Task.auto_unlock_delta().total_seconds()
        return task_dto

    def as_dto_with_instructions(self,
                                 preferred_locale: str = "en") -> TaskDTO:
        """Get dto with any task instructions"""
        task_history = []
        for action in reversed(self.task_history):
            history = TaskHistoryDTO()
            history.history_id = action.id
            history.action = action.action
            history.action_text = action.action_text
            history.action_date = action.action_date
            history.action_by = (action.actioned_by.username
                                 if action.actioned_by else None)
            history.picture_url = (action.actioned_by.picture_url
                                   if action.actioned_by else None)
            if action.task_mapping_issues:
                history.issues = [
                    issue.as_dto() for issue in action.task_mapping_issues
                ]

            task_history.append(history)

        task_dto = self.as_dto(task_history)

        per_task_instructions = self.get_per_task_instructions(
            preferred_locale)

        # If we don't have instructions in preferred locale try again for default locale
        task_dto.per_task_instructions = (
            per_task_instructions if per_task_instructions else
            self.get_per_task_instructions(self.projects.default_locale))

        annotations = self.get_per_task_annotations()
        task_dto.task_annotations = annotations if annotations else []

        return task_dto

    def get_per_task_annotations(self):
        result = [ta.get_dto() for ta in self.task_annotations]
        return result

    def get_per_task_instructions(self, search_locale: str) -> str:
        """ Gets any per task instructions attached to the project """
        project_info = self.projects.project_info.all()

        for info in project_info:
            if info.locale == search_locale:
                return self.format_per_task_instructions(
                    info.per_task_instructions)

    def format_per_task_instructions(self, instructions) -> str:
        """ Format instructions by looking for X, Y, Z tokens and replacing them with the task values """
        if not instructions:
            return ""  # No instructions so return empty string

        properties = {}

        if self.x:
            properties["x"] = str(self.x)
        if self.y:
            properties["y"] = str(self.y)
        if self.zoom:
            properties["z"] = str(self.zoom)
        if self.extra_properties:
            properties.update(json.loads(self.extra_properties))

        try:
            instructions = instructions.format(**properties)
        except KeyError:
            pass
        return instructions

    def copy_task_history(self) -> list:
        copies = []
        for entry in self.task_history:
            db.session.expunge(entry)
            make_transient(entry)
            entry.id = None
            entry.task_id = None
            db.session.add(entry)
            copies.append(entry)

        return copies

    def get_locked_tasks_for_user(user_id: int):
        """ Gets tasks on project owned by specified user id"""
        tasks = Task.query.filter_by(locked_by=user_id)
        tasks_dto = LockedTasksForUser()
        tasks_dto.locked_tasks = []
        for task in tasks:
            tasks_dto.locked_tasks.append(task.id)
            tasks_dto.project = task.project_id
            tasks_dto.task_status = TaskStatus(task.task_status).name

        return tasks_dto

    def get_locked_tasks_details_for_user(user_id: int):
        """ Gets tasks on project owned by specified user id"""
        tasks = Task.query.filter_by(locked_by=user_id)

        locked_tasks = []
        for task in tasks:
            locked_tasks.append(task)

        return locked_tasks
Esempio n. 4
0
 def add_geometry_column(cls):
     if not hasattr(cls, 'geom'):
         from geoalchemy2 import Geometry
         cls.geom = deferred(Column(Geometry('MULTILINESTRING')))
Esempio n. 5
0
class Estado(Base):
    """
    Class to hold a brazillian state
    """
    __tablename__ = 'estados'
    gid = Column(Integer, Sequence('estados_gid_pkey'), primary_key=True, nullable=False)
    name = Column(Unicode(80), nullable=True)
    descriptio = Column(Unicode(80), nullable=True)
    timestamp = Column(Date, nullable=True)
    begin = Column(Date, nullable=True)
    end = Column(Date, nullable=True)
    altitudemo = Column(Unicode(80), nullable=True)
    tessellate = Column(Numeric(10, 0), nullable=True)
    extrude = Column(Numeric(10, 0), nullable=True)
    visibility = Column(Numeric(10, 0), nullable=True)
    draworder = Column(Numeric(10, 0), nullable=True)
    icon = Column(Unicode(80), nullable=True)

    # Add geometry type
    geom = Column(Geometry('MULTILINESTRING'))

    def __init__(self,
                 geom,
                 name=None,
                 descriptio=None,
                 timestamp=None,
                 begin=None,
                 end=None,
                 date=None,
                 altitudemo=None,
                 tessellate=None,
                 extrude=None,
                 visibility=None,
                 draworder=None,
                 icon=None):
        """
        Building method for the Class
        :param geom: MultiLineString Geometry type
        :param name: Place name
        :param descriptio: Description
        :param timestamp:
        :param begin:
        :param end:
        :param date:
        :param altitudemo:
        :param tessellate:
        :param extrude:
        :param visibility:
        :param draworder:
        :param icon:
        :return:
        """
        self.geom = geom
        self.name = name
        self.descriptio = descriptio
        self.timestamp = timestamp
        self.begin = begin
        self.end = end
        self.date = date
        self.altitudemo = altitudemo
        self.tessellate = tessellate
        self.extrude = extrude
        self.visibility = visibility
        self.draworder = draworder
        self.icon = icon

    def __repr__(self):
        """
        Método que lista os parâmetros da classe
        """
        return "<Estado('%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s')>" % (
            self.geom,
            self.name,
            self.descriptio,
            self.timestamp,
            self.begin,
            self.end,
            self.date,
            self.altitudemo,
            self.tessellate,
            self.extrude,
            self.visibility,
            self.draworder,
            self.icon
        )
Esempio n. 6
0
    def _create_layer(
        self,
        public=False,
        none_area=False,
        attr_list=False,
        exclude_properties=False,
        metadatas=None,
        geom_type=False,
    ):
        """
        This function is central for this test class.

        It creates a layer with two features, and associates a restriction area to it.
        """
        import transaction
        from geoalchemy2 import Geometry, WKTElement
        from sqlalchemy import CheckConstraint, Column, ForeignKey, Table, types
        from sqlalchemy.ext.declarative import declarative_base

        from c2cgeoportal_commons.models import DBSession
        from c2cgeoportal_commons.models.main import LayerWMS, OGCServer, RestrictionArea

        if self._tables is None:
            self._tables = []

        self.__class__._table_index += 1
        id = self.__class__._table_index

        engine = DBSession.c2c_rw_bind
        connection = engine.connect()

        if not self.metadata:
            self.metadata = declarative_base(bind=engine).metadata

        tablename = f"table_{id:d}"

        table1 = Table(
            f"{tablename!s}_child",
            self.metadata,
            Column("id", types.Integer, primary_key=True),
            Column("name", types.Unicode),
            schema="public",
        )
        if geom_type:
            table1.append_column(Column("geom", Geometry("POINT", srid=21781)))
        else:
            table1.append_column(Column("geom", Geometry(srid=21781)))
        self._tables.append(table1)

        table2 = Table(
            tablename,
            self.metadata,
            Column("id", types.Integer, primary_key=True),
            Column("child_id", types.Integer,
                   ForeignKey(f"public.{tablename!s}_child.id")),
            Column("name", types.Unicode),
            Column(
                "email",
                types.Unicode,
                CheckConstraint(
                    """email ~* '^[A-Za-z0-9._%%-]
                                      +@[A-Za-z0-9.-]+[.][A-Za-z]+$'""",
                    name="proper_email",
                ),
            ),
            Column("last_update_user", types.Unicode),
            Column("last_update_date", types.DateTime),
            schema="public",
        )
        if geom_type:
            table2.append_column(Column("geom", Geometry("POINT", srid=21781)))
        else:
            table2.append_column(Column("geom", Geometry(srid=21781)))
        self._tables.append(table2)

        table1.drop(checkfirst=True)
        table2.drop(checkfirst=True)
        table1.create()
        table2.create()

        ins = table1.insert().values(name="c1é")
        c1_id = connection.execute(ins).inserted_primary_key[0]
        ins = table1.insert().values(name="c2é")
        c2_id = connection.execute(ins).inserted_primary_key[0]

        ins = table2.insert().values(child_id=c1_id,
                                     name="foo",
                                     geom=WKTElement("POINT(5 45)", 21781))
        connection.execute(ins)
        ins = table2.insert().values(child_id=c2_id,
                                     name="bar",
                                     geom=WKTElement("POINT(6 46)", 21781))
        connection.execute(ins)
        if attr_list:
            ins = table2.insert().values(child_id=c2_id,
                                         name="aaa,bbb,foo",
                                         geom=WKTElement("POINT(6 46)", 21781))
            connection.execute(ins)

        ogc_server = DBSession.query(OGCServer).filter(
            OGCServer.name == "__test_ogc_server").one()
        layer = LayerWMS()
        layer.id = id
        layer.name = str(id)
        layer.ogc_server = ogc_server
        layer.geo_table = tablename
        layer.public = public
        layer.interface = [self.main]

        if exclude_properties:
            layer.exclude_properties = "name"

        if metadatas:
            layer.metadatas = metadatas

        DBSession.add(layer)

        if not public:
            ra = RestrictionArea()
            ra.name = "__test_ra"
            ra.layers = [layer]
            ra.roles = [self.role]
            ra.readwrite = True
            if not none_area:
                poly = "POLYGON((4 44, 4 46, 6 46, 6 44, 4 44))"
                ra.area = WKTElement(poly, srid=21781)
            DBSession.add(ra)

        transaction.commit()

        self.layer_ids.append(id)
        return id
Esempio n. 7
0
class Project(db.Model):
    """ Describes a HOT Mapping Project """
    __tablename__ = 'projects'

    # Columns
    id = db.Column(db.Integer, primary_key=True)
    status = db.Column(db.Integer,
                       default=ProjectStatus.DRAFT.value,
                       nullable=False)
    created = db.Column(db.DateTime, default=timestamp, nullable=False)
    priority = db.Column(db.Integer, default=ProjectPriority.MEDIUM.value)
    default_locale = db.Column(
        db.String(10), default='en'
    )  # The locale that is returned if requested locale not available
    author_id = db.Column(db.BigInteger,
                          db.ForeignKey('users.id', name='fk_users'),
                          nullable=False)
    mapper_level = db.Column(
        db.Integer, default=1, nullable=False,
        index=True)  # Mapper level project is suitable for
    enforce_mapper_level = db.Column(db.Boolean, default=False)
    enforce_validator_role = db.Column(
        db.Boolean,
        default=False)  # Means only users with validator role can validate
    private = db.Column(db.Boolean,
                        default=False)  # Only allowed users can validate
    entities_to_map = db.Column(db.String)
    changeset_comment = db.Column(db.String)
    due_date = db.Column(db.DateTime)
    imagery = db.Column(db.String)
    josm_preset = db.Column(db.String)
    last_updated = db.Column(db.DateTime, default=timestamp)
    license_id = db.Column(db.Integer,
                           db.ForeignKey('licenses.id', name='fk_licenses'))
    geometry = db.Column(Geometry('MULTIPOLYGON', srid=4326))
    centroid = db.Column(Geometry('POINT', srid=4326))
    task_creation_mode = db.Column(db.Integer,
                                   default=TaskCreationMode.GRID.value,
                                   nullable=False)

    # Tags
    mapping_types = db.Column(ARRAY(db.Integer), index=True)
    organisation_tag = db.Column(db.String, index=True)
    campaign_tag = db.Column(db.String, index=True)

    # Stats
    total_tasks = db.Column(db.Integer, nullable=False)
    tasks_mapped = db.Column(db.Integer, default=0, nullable=False)
    tasks_validated = db.Column(db.Integer, default=0, nullable=False)
    tasks_bad_imagery = db.Column(db.Integer, default=0, nullable=False)

    # Mapped Objects
    tasks = db.relationship(Task,
                            backref='projects',
                            cascade="all, delete, delete-orphan",
                            lazy='dynamic')
    project_info = db.relationship(ProjectInfo, lazy='dynamic', cascade='all')
    author = db.relationship(User)
    allowed_users = db.relationship(User, secondary=project_allowed_users)
    priority_areas = db.relationship(PriorityArea,
                                     secondary=project_priority_areas,
                                     cascade="all, delete-orphan",
                                     single_parent=True)

    def create_draft_project(self, draft_project_dto: DraftProjectDTO):
        """
        Creates a draft project
        :param draft_project_dto: DTO containing draft project details
        :param aoi: Area of Interest for the project (eg boundary of project)
        """
        self.project_info.append(
            ProjectInfo.create_from_name(draft_project_dto.project_name))
        self.status = ProjectStatus.DRAFT.value
        self.author_id = draft_project_dto.user_id
        self.last_updated = timestamp()

    def set_project_aoi(self, draft_project_dto: DraftProjectDTO):
        """ Sets the AOI for the supplied project """
        aoi_geojson = geojson.loads(
            json.dumps(draft_project_dto.area_of_interest))

        aoi_geometry = GridService.merge_to_multi_polygon(aoi_geojson,
                                                          dissolve=True)

        valid_geojson = geojson.dumps(aoi_geometry)
        self.geometry = ST_SetSRID(ST_GeomFromGeoJSON(valid_geojson), 4326)
        self.centroid = ST_Centroid(self.geometry)

    def set_default_changeset_comment(self):
        """ Sets the default changeset comment"""
        default_comment = current_app.config['DEFAULT_CHANGESET_COMMENT']
        self.changeset_comment = f'{default_comment}-{self.id}'
        self.save()

    def create(self):
        """ Creates and saves the current model to the DB """
        db.session.add(self)
        db.session.commit()

    def save(self):
        """ Save changes to db"""
        db.session.commit()

    @staticmethod
    def clone(project_id: int, author_id: int):
        """ Clone project """

        cloned_project = Project.get(project_id)

        # Remove clone from session so we can reinsert it as a new object
        db.session.expunge(cloned_project)
        make_transient(cloned_project)

        # Re-initialise counters and meta-data
        cloned_project.total_tasks = 0
        cloned_project.tasks_mapped = 0
        cloned_project.tasks_validated = 0
        cloned_project.tasks_bad_imagery = 0
        cloned_project.last_updated = timestamp()
        cloned_project.created = timestamp()
        cloned_project.author_id = author_id
        cloned_project.status = ProjectStatus.DRAFT.value
        cloned_project.id = None  # Reset ID so we get a new ID when inserted
        cloned_project.geometry = None
        cloned_project.centroid = None

        db.session.add(cloned_project)
        db.session.commit()

        # Now add the project info, we have to do it in a two stage commit because we need to know the new project id
        original_project = Project.get(project_id)

        for info in original_project.project_info:
            db.session.expunge(info)
            make_transient(
                info
            )  # Must remove the object from the session or it will be updated rather than inserted
            info.id = None
            info.project_id_str = str(cloned_project.id)
            cloned_project.project_info.append(info)

        # Now add allowed users now we know new project id, if there are any
        for user in original_project.allowed_users:
            cloned_project.allowed_users.append(user)

        db.session.add(cloned_project)
        db.session.commit()

        return cloned_project

    @staticmethod
    def get(project_id: int):
        """
        Gets specified project
        :param project_id: project ID in scope
        :return: Project if found otherwise None
        """
        return Project.query.get(project_id)

    def update(self, project_dto: ProjectDTO):
        """ Updates project from DTO """
        self.status = ProjectStatus[project_dto.project_status].value
        self.priority = ProjectPriority[project_dto.project_priority].value
        self.default_locale = project_dto.default_locale
        self.enforce_mapper_level = project_dto.enforce_mapper_level
        self.enforce_validator_role = project_dto.enforce_validator_role
        self.private = project_dto.private
        self.mapper_level = MappingLevel[
            project_dto.mapper_level.upper()].value
        self.entities_to_map = project_dto.entities_to_map
        self.changeset_comment = project_dto.changeset_comment
        self.due_date = project_dto.due_date
        self.imagery = project_dto.imagery
        self.josm_preset = project_dto.josm_preset
        self.last_updated = timestamp()
        self.license_id = project_dto.license_id

        if project_dto.organisation_tag:
            org_tag = Tags.upsert_organistion_tag(project_dto.organisation_tag)
            self.organisation_tag = org_tag
        else:
            self.organisation_tag = None  # Set to none, for cases where a tag could have been removed

        if project_dto.campaign_tag:
            camp_tag = Tags.upsert_campaign_tag(project_dto.campaign_tag)
            self.campaign_tag = camp_tag
        else:
            self.campaign_tag = None  # Set to none, for cases where a tag could have been removed

        # Cast MappingType strings to int array
        type_array = []
        for mapping_type in project_dto.mapping_types:
            type_array.append(MappingTypes[mapping_type].value)
        self.mapping_types = type_array

        # Add list of allowed users, meaning the project can only be mapped by users in this list
        if hasattr(project_dto, 'allowed_users'):
            self.allowed_users = [
            ]  # Clear existing relationships then re-insert
            for user in project_dto.allowed_users:
                self.allowed_users.append(user)

        # Set Project Info for all returned locales
        for dto in project_dto.project_info_locales:

            project_info = self.project_info.filter_by(
                locale=dto.locale).one_or_none()

            if project_info is None:
                new_info = ProjectInfo.create_from_dto(
                    dto)  # Can't find info so must be new locale
                self.project_info.append(new_info)
            else:
                project_info.update_from_dto(dto)

        self.priority_areas = [
        ]  # Always clear Priority Area prior to updating
        if project_dto.priority_areas:
            for priority_area in project_dto.priority_areas:
                pa = PriorityArea.from_dict(priority_area)
                self.priority_areas.append(pa)

        db.session.commit()

    def delete(self):
        """ Deletes the current model from the DB """
        db.session.delete(self)
        db.session.commit()

    def can_be_deleted(self) -> bool:
        """ Projects can be deleted if they have no mapped work """
        task_count = self.tasks.filter(
            Task.task_status != TaskStatus.READY.value).count()
        if task_count == 0:
            return True
        else:
            return False

    def get_locked_tasks_for_user(self, user_id: int):
        """ Gets tasks on project owned by specified user id"""
        tasks = self.tasks.filter_by(locked_by=user_id)

        locked_tasks = []
        for task in tasks:
            locked_tasks.append(task.id)

        return locked_tasks

    def get_locked_tasks_details_for_user(self, user_id: int):
        """ Gets tasks on project owned by specified user id"""
        tasks = self.tasks.filter_by(locked_by=user_id)

        locked_tasks = []
        for task in tasks:
            locked_tasks.append(task)

        return locked_tasks

    @staticmethod
    def get_projects_for_admin(admin_id: int,
                               preferred_locale: str) -> PMDashboardDTO:
        """ Get projects for admin """
        admins_projects = Project.query.filter_by(author_id=admin_id).all()

        if admins_projects is None:
            raise NotFound('No projects found for admin')

        admin_projects_dto = PMDashboardDTO()
        for project in admins_projects:
            pm_project = project.get_project_summary(preferred_locale)
            project_status = ProjectStatus(project.status)

            if project_status == ProjectStatus.DRAFT:
                admin_projects_dto.draft_projects.append(pm_project)
            elif project_status == ProjectStatus.PUBLISHED:
                admin_projects_dto.active_projects.append(pm_project)
            elif project_status == ProjectStatus.ARCHIVED:
                admin_projects_dto.archived_projects.append(pm_project)
            else:
                current_app.logger.error(
                    f'Unexpected state project {project.id}')

        return admin_projects_dto

    def get_project_summary(self, preferred_locale) -> ProjectSummary:
        """ Create Project Summary model for postgis project object"""
        summary = ProjectSummary()
        summary.project_id = self.id
        summary.campaign_tag = self.campaign_tag
        summary.created = self.created
        summary.last_updated = self.last_updated
        summary.mapper_level = MappingLevel(self.mapper_level).name
        summary.organisation_tag = self.organisation_tag
        summary.status = ProjectStatus(self.status).name

        centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON())
        summary.aoi_centroid = geojson.loads(centroid_geojson)

        summary.percent_mapped = int(
            ((self.tasks_mapped + self.tasks_bad_imagery) / self.total_tasks) *
            100)
        summary.percent_validated = int(
            (self.tasks_validated / self.total_tasks) * 100)

        project_info = ProjectInfo.get_dto_for_locale(self.id,
                                                      preferred_locale,
                                                      self.default_locale)
        summary.name = project_info.name
        summary.short_description = project_info.short_description

        return summary

    def get_project_title(self, preferred_locale):
        project_info = ProjectInfo.get_dto_for_locale(self.id,
                                                      preferred_locale,
                                                      self.default_locale)
        return project_info.name

    def get_aoi_geometry_as_geojson(self):
        """ Helper which returns the AOI geometry as a geojson object """
        aoi_geojson = db.engine.execute(self.geometry.ST_AsGeoJSON()).scalar()
        return geojson.loads(aoi_geojson)

    @staticmethod
    @cached(active_mappers_cache)
    def get_active_mappers(project_id) -> int:
        """ Get count of Locked tasks as a proxy for users who are currently active on the project """

        return Task.query \
            .filter(Task.task_status.in_((TaskStatus.LOCKED_FOR_MAPPING.value,
                    TaskStatus.LOCKED_FOR_VALIDATION.value))) \
            .filter(Task.project_id == project_id) \
            .distinct(Task.locked_by) \
            .count()

    def _get_project_and_base_dto(self):
        """ Populates a project DTO with properties common to all roles """
        base_dto = ProjectDTO()
        base_dto.project_id = self.id
        base_dto.project_status = ProjectStatus(self.status).name
        base_dto.default_locale = self.default_locale
        base_dto.project_priority = ProjectPriority(self.priority).name
        base_dto.area_of_interest = self.get_aoi_geometry_as_geojson()
        base_dto.enforce_mapper_level = self.enforce_mapper_level
        base_dto.enforce_validator_role = self.enforce_validator_role
        base_dto.private = self.private
        base_dto.mapper_level = MappingLevel(self.mapper_level).name
        base_dto.entities_to_map = self.entities_to_map
        base_dto.changeset_comment = self.changeset_comment
        base_dto.due_date = self.due_date
        base_dto.imagery = self.imagery
        base_dto.josm_preset = self.josm_preset
        base_dto.campaign_tag = self.campaign_tag
        base_dto.organisation_tag = self.organisation_tag
        base_dto.license_id = self.license_id
        base_dto.created = self.created
        base_dto.last_updated = self.last_updated
        base_dto.author = User().get_by_id(self.author_id).username
        base_dto.active_mappers = Project.get_active_mappers(self.id)
        base_dto.task_creation_mode = TaskCreationMode(
            self.task_creation_mode).name

        if self.private:
            # If project is private it should have a list of allowed users
            allowed_usernames = []
            for user in self.allowed_users:
                allowed_usernames.append(user.username)
            base_dto.allowed_usernames = allowed_usernames

        if self.mapping_types:
            mapping_types = []
            for mapping_type in self.mapping_types:
                mapping_types.append(MappingTypes(mapping_type).name)

            base_dto.mapping_types = mapping_types

        if self.priority_areas:
            geojson_areas = []
            for priority_area in self.priority_areas:
                geojson_areas.append(priority_area.get_as_geojson())

            base_dto.priority_areas = geojson_areas

        return self, base_dto

    def as_dto_for_mapping(self, locale: str) -> Optional[ProjectDTO]:
        """ Creates a Project DTO suitable for transmitting to mapper users """
        project, project_dto = self._get_project_and_base_dto()

        project_dto.tasks = Task.get_tasks_as_geojson_feature_collection(
            self.id)
        project_dto.project_info = ProjectInfo.get_dto_for_locale(
            self.id, locale, project.default_locale)

        return project_dto

    def all_tasks_as_geojson(self):
        """ Creates a geojson of all areas """
        project_tasks = Task.get_tasks_as_geojson_feature_collection(self.id)

        return project_tasks

    def as_dto_for_admin(self, project_id):
        """ Creates a Project DTO suitable for transmitting to project admins """
        project, project_dto = self._get_project_and_base_dto()

        if project is None:
            return None

        project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales(
            project_id)

        return project_dto
class Point(Base):
    __tablename__ = "point"
    id = Column(Integer, primary_key=True)
    geom = Column(Geometry(srid=4326, geometry_type="POINT"))
    raw_geom = Column(RawGeometry(srid=4326, geometry_type="POINT"))
Esempio n. 9
0
def data_route():
    west = request.args.get('w')
    east = request.args.get('e')
    north = request.args.get('n')
    south = request.args.get('s')
    month = request.args.get('m')
    nb = int(request.args.get('nb', config['max_nb_cities_at_once']))
    if nb > config['max_nb_cities_at_once']:
        nb = config['max_nb_cities_at_once']

    if west is None or east is None or south is None or north is None:
        return 'TODO 404'

    rectangle = 'POLYGON(({0} {1}, {0} {2}, {3} {2}, {3} {1}, {0} {1}))' \
        .format(west, south, north, east)
    with session_scope() as session:
        # choose N cities
        sq = session.query(City) \
            .filter(func.ST_Covers(
                cast(rectangle, Geometry()),
                func.ST_SetSRID(cast(City.location, Geometry()), 0))) \
            .order_by(City.priority_index) \
            .limit(nb).subquery('city')

        # get their data
        query = session.query(
            sq.c.id,
            sq.c.name,
            func.ST_Y(cast(sq.c.location, Geometry())),
            func.ST_X(cast(sq.c.location, Geometry())),
            sq.c.population, MonthlyStat.month,
            MonthlyStat.value, Stat.code, sq.c.country, sq.c.source) \
            .join(MonthlyStat) \
            .join(Stat) \
            .filter(Stat.code.in_(['avgHigh', 'avgLow', 'precipitation',
                                   'precipitationDays', 'monthlySunHours',
                                   'rain', 'rainDays']))

        if month is not None:
            query = query.filter(MonthlyStat.month == month)

        def default():
            return {'month_stats': defaultdict(dict)}
        cities = defaultdict(default)
        # print(query)
        # format what is returned from the query
        for row in query:
            id = row[0]
            cities[id]['name'] = row[1]
            cities[id]['coords'] = (row[2], row[3])
            cities[id]['pop'] = row[4]
            cities[id]['month_stats'][row[7]][row[5]] = row[6]
            cities[id]['country'] = row[8]
            cities[id]['source'] = row[9]

        # changing rain to precipitation
        # TODO something similar with snow?
        for _,c in cities.items():
            for old, new in [('rain', 'precipitation'),
                             ('rainDays', 'precipitationDays')]:
                if old in c['month_stats'] and new not in c['month_stats']:
                    c['month_stats'][new] = c['month_stats'].pop(old)

        # from pprint import pprint
        # pprint(cities)
    return json.dumps(cities)
Esempio n. 10
0
class RestrictionArea(Base):
    __tablename__ = "restrictionarea"
    __table_args__ = {"schema": _schema}
    __colanderalchemy_config__ = {
        "title": _("Restriction area"),
        "plural": _("Restriction areas")
    }
    __c2cgeoform_config__ = {"duplicate": True}
    id = Column(Integer,
                primary_key=True,
                info={"colanderalchemy": {
                    "widget": HiddenWidget()
                }})
    area = Column(
        Geometry("POLYGON", srid=_srid),
        info={
            "colanderalchemy": {
                "typ": ColanderGeometry("POLYGON", srid=_srid, map_srid=3857)
            }
        },
    )

    name = Column(Unicode, info={"colanderalchemy": {"title": _("Name")}})
    description = Column(Unicode,
                         info={"colanderalchemy": {
                             "title": _("Description")
                         }})
    readwrite = Column(Boolean,
                       default=False,
                       info={"colanderalchemy": {
                           "title": _("Read/write")
                       }})

    # relationship with Role and Layer
    roles = relationship(
        "Role",
        secondary=role_ra,
        info={"colanderalchemy": {
            "title": _("Roles"),
            "exclude": True
        }},
        cascade="save-update,merge,refresh-expire",
        backref=backref("restrictionareas",
                        info={
                            "colanderalchemy": {
                                "exclude": True,
                                "title": _("Restriction areas")
                            }
                        }),
    )
    layers = relationship(
        "Layer",
        secondary=layer_ra,
        info={"colanderalchemy": {
            "title": _("Layers"),
            "exclude": True
        }},
        cascade="save-update,merge,refresh-expire",
        backref=backref("restrictionareas",
                        info={
                            "colanderalchemy": {
                                "title": _("Restriction areas"),
                                "exclude": True
                            }
                        }),
    )

    def __init__(
        self,
        name: str = "",
        description: str = "",
        layers: List[Layer] = None,
        roles: List[Role] = None,
        area: Geometry = None,
        readwrite: bool = False,
    ) -> None:
        if layers is None:
            layers = []
        if roles is None:
            roles = []
        self.name = name
        self.description = description
        self.layers = layers
        self.roles = roles
        self.area = area
        self.readwrite = readwrite

    def __str__(self) -> str:  # pragma: no cover
        return self.name or ""
Esempio n. 11
0
class Role(Base):
    __tablename__ = "role"
    __table_args__ = {"schema": _schema}
    __colanderalchemy_config__ = {"title": _("Role"), "plural": _("Roles")}
    __c2cgeoform_config__ = {"duplicate": True}

    id = Column(Integer,
                primary_key=True,
                info={"colanderalchemy": {
                    "widget": HiddenWidget()
                }})
    name = Column(Unicode,
                  unique=True,
                  nullable=False,
                  info={"colanderalchemy": {
                      "title": _("Name")
                  }})
    description = Column(Unicode,
                         info={"colanderalchemy": {
                             "title": _("Description")
                         }})
    extent = Column(
        Geometry("POLYGON", srid=_srid),
        info={
            "colanderalchemy": {
                "typ": ColanderGeometry("POLYGON", srid=_srid, map_srid=3857)
            }
        },
    )

    # functionality
    functionalities = relationship(
        "Functionality",
        secondary=role_functionality,
        cascade="save-update,merge,refresh-expire",
        info={
            "colanderalchemy": {
                "exclude": True,
                "title": _("Functionalities")
            }
        },
    )

    def __init__(
        self,
        name: str = "",
        description: str = "",
        functionalities: List[Functionality] = None,
        extent: Geometry = None,
    ) -> None:
        if functionalities is None:
            functionalities = []
        self.name = name
        self.functionalities = functionalities
        self.extent = extent
        self.description = description

    def __str__(self) -> str:
        return self.name or ""  # pragma: no cover

    @property
    def bounds(self) -> None:
        if self.extent is None:
            return None
        return to_shape(self.extent).bounds
Esempio n. 12
0
class Photo(db.Model):
    """
    A class representing the photo of the report and declare its attributes:
    Attributes:
        photo_id: Identification of the photo that the RESTful API
            server generates.
        report_id: Identification of the report this photo is attached to.
        location: Geographic location where the photo has been taken.
        accuracy: Accuracy in meters of the geographic location of the heritage.
        bearing: Angle of the direction that the camera pointed to when
            this photo has been taken. The bearing is the number of degrees
            in the angle measured in a clockwise direction from the north line
            to the line passing through the location of the camera in
            the direction the camera was pointing at.
        creation_time: Date and time when the photo has been registered to
            the server platform.
        update_time: Date and time of the most recent modification of one of
            the attributes of this photo.
    """

    __tablename__ = "photo"

    # Identification of the photo that the RESTful API server generates.
    photo_id = db.Column(UUID(as_uuid=True),
                         default=uuid.uuid4,
                         primary_key=True,
                         nullable=False)
    # Identification of the report this photo is attached to.
    report_id = db.Column(UUID(as_uuid=True),
                          db.ForeignKey('report.report_id'),
                          nullable=False)
    # Geographic location where the photo has been taken
    location = db.Column(Geometry(geometry_type='POINTZ', dimension=3),
                         nullable=True)
    # Accuracy in meters of the geographic location of the heritage
    accuracy = db.Column(db.Float(), nullable=True)
    # # Angle of the direction that the camera pointed to when
    # # this photo has been taken.
    bearing = db.Column(db.Float(), nullable=True)
    # Define creation time
    creation_time = db.Column(db.DateTime(timezone=True),
                              default=datetime.utcnow,
                              nullable=False)
    # Define update time
    update_time = db.Column(db.DateTime(timezone=True),
                            default=datetime.utcnow,
                            nullable=False)

    def __init__(self, report_id, *args, **kwargs):
        """The constructor of the photo update """
        super(Photo, self).__init__(report_id=report_id, *args, **kwargs)

    @classmethod
    def authenticate_photo(cls, photo):
        """Photo authentication: check if the provided photo is registered or not

        Args:
            photo (Photo Object): The provided photo which is to be authenticated

        Raises:
            ExistedPhotoInAnotherReportException: In case the provided photo
                was uploaded in another report

        Returns:
            None: In case the provided photo was uploaded in the same report
            Photo object: In case the provided photo have not yet uploaded
        """
        image = Photo.query.filter_by(location=photo.location).first()
        if image:
            report = Report.query.filter_by(report_id=image.report_id)
            if not report:
                raise ExistedPhotoInAnotherReportException(image.report_id)

            # Do not add to database if the photo was uploaded
            return None

        # In case photo have not yet uploaded, add photo object to database
        db.session.add(photo)
        db.session.commit()
        # Update `update_time` of the report
        report = Report.query.filter_by(report_id=photo.report_id)
        report.update_time = datetime.utcnow()
        db.session.commit()
        return photo
Esempio n. 13
0
def postgres(schema, tables, data_directory, psql_path, plpython, **params):
    psql = local[psql_path]
    data_directory = Path(data_directory)
    logger.info('Initializing PostgreSQL...')
    engine = init_database(
        'postgresql', params, schema, isolation_level='AUTOCOMMIT'
    )

    engine.execute("CREATE SEQUENCE IF NOT EXISTS test_sequence;")

    use_postgis = 'geo' in tables
    if use_postgis:
        engine.execute("CREATE EXTENSION IF NOT EXISTS POSTGIS")

    if plpython:
        engine.execute("CREATE EXTENSION IF NOT EXISTS PLPYTHONU")

    query = "COPY {} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')"
    database = params['database']
    for table in tables:
        src = data_directory / '{}.csv'.format(table)

        # If we are loading the geo sample data, handle the data types
        # specifically so that PostGIS understands them as geometries.
        if table == 'geo':
            if not use_postgis:
                continue
            from geoalchemy2 import Geometry, WKTElement

            srid = 0
            df = pd.read_csv(src)
            df[df.columns[1:]] = df[df.columns[1:]].applymap(
                lambda x: WKTElement(x, srid=srid)
            )
            df.to_sql(
                'geo',
                engine,
                index=False,
                dtype={
                    "geo_point": Geometry("POINT", srid=srid),
                    "geo_linestring": Geometry("LINESTRING", srid=srid),
                    "geo_polygon": Geometry("POLYGON", srid=srid),
                    "geo_multipolygon": Geometry("MULTIPOLYGON", srid=srid),
                },
            )
            continue

        load = psql[
            '--host',
            params['host'],
            '--port',
            params['port'],
            '--username',
            params['user'],
            '--dbname',
            database,
            '--command',
            query.format(table),
        ]
        with local.env(PGPASSWORD=params['password']):
            with src.open('r') as f:
                load(stdin=f)

    engine.execute('VACUUM FULL ANALYZE')
    def _create_layer(self, exclude_properties=False, metadatas=None, geom_type=False):
        """ This function is central for this test class. It creates
        a layer with two features, and associates a restriction area
        to it. """
        import transaction
        from sqlalchemy import Column, Table, types, ForeignKey
        from sqlalchemy.ext.declarative import declarative_base
        from geoalchemy2 import Geometry
        from c2cgeoportal.models import DBSession, RestrictionArea, LayerWMS

        self.__class__._table_index += 1
        id = self.__class__._table_index

        engine = DBSession.c2c_rw_bind
        connection = engine.connect()

        if not self.metadata:
            self.metadata = declarative_base(bind=engine).metadata

        tablename = "geo_table_{0:d}".format(id)
        schemaname = "geodata"

        table1 = Table(
            "{0!s}_child".format(tablename), self.metadata,
            Column("id", types.Integer, primary_key=True),
            Column("name", types.Unicode),
            schema=schemaname
        )

        self._tables.append(table1)

        table2 = Table(
            tablename, self.metadata,
            Column("id", types.Integer, primary_key=True),
            Column("child_id", types.Integer,
                   ForeignKey("{0!s}.{1!s}_child.id".format(schemaname, tablename))),
            Column("name", types.Unicode, nullable=False),
            Column("deleted", types.Boolean),
            Column("last_update_user", types.Unicode),
            Column("last_update_date", types.DateTime),
            Column("date", types.Date),
            Column("start_time", types.Time),
            # Column("interval", Interval()),
            Column("short_name1", types.String, nullable=True),
            Column("short_name2", types.String(50), nullable=True),
            Column("short_number", types.Integer, nullable=True),
            Column("double_number", types.Float(precision=4)),
            Column("large_binary", types.LargeBinary(length=60), nullable=True),
            Column("value", types.Enum("one", "two", "three", name="an_enum_value")),
            Column("numeric", types.Numeric(precision=5, scale=2), nullable=True),
            Column("numeric2", types.Numeric(), nullable=True),
            schema=schemaname
        )
        if geom_type:
            table2.append_column(
                Column("geom", Geometry("POINT", srid=21781))
            )
        else:
            table2.append_column(
                Column("geom", Geometry(srid=21781))
            )

        self._tables.append(table2)

        table2.drop(checkfirst=True)
        table1.drop(checkfirst=True)
        table1.create()
        table2.create()

        ins = table1.insert().values(name="c1é")
        connection.execute(ins).inserted_primary_key[0]
        ins = table1.insert().values(name="c2é")
        connection.execute(ins).inserted_primary_key[0]

        layer = LayerWMS(name="test_WMS_1", public=True)
        layer.layer = "test_wms"
        layer.id = id
        layer.geo_table = "{0!s}.{1!s}".format(schemaname, tablename)
        layer.interfaces = [self.main]
        layer.ogc_server = self.ogc_server

        if exclude_properties:
            layer.exclude_properties = "name"

        if metadatas:
            layer.metadatas = metadatas

        DBSession.add(self.layer_group_1)
        self.layer_group_1.children = self.layer_group_1.children + [layer]

        DBSession.add(self.layer_group_1)

        ra = RestrictionArea()
        ra.name = "__test_ra"
        ra.layers = [layer]
        ra.roles = [self.role]
        ra.readwrite = True
        DBSession.add(ra)

        transaction.commit()

        self.layer_ids.append(id)
        return id
Esempio n. 15
0
class Entry(Base):
    r"""Entry

    The Entry is the main entity in metacatalog. An object instance models a
    set of metadata needed to store and manage a datasource. The Entry is not
    the actual data.
    The Entry is designed to store all necessary information to be exportable
    in ISO19115 in the scope of metacatalog. That means, Properties which are
    always the same across metacatalog, or can be derived from the actual
    implementation, are not part of an Entry.

    Attributes
    ----------
    id : int
        Unique id of the record. If not specified, the database will assign it.
    uuid : str
        .. versionadded:: 0.1.9

        Version 4 UUID string to identify the Entry across installations.
        This field is read-only and will be assigned on creation. It is primarily
        used to export Entry into ISO19115 metadata.
    title : str
        A full title (512) to describe the datasource as well as possible.
        The truncated title (first 25 signs) is usually used to print an
        Entry object to the console.
    abstract : str
        Full abstract of the datasource. The abstract should include all
        necessary information that is needed to fully understand the data.
    external_id : str
        Any kind of OID that was used to identify the data in the first place.
        Usually an unque ID field of other data-storage solutions. The
        exernal_id is only stored for reference reasons.
    location : str, tuple
        The location as a POINT Geometry in unprojected WGS84 (EPSG: 4326).
        The location is primarily used to show all Entry objects on a map, or
        perform geo-searches. If the data-source needs to store more complex
        Geometries, you can use the ``geom`` argument.
        The location can be passed as WKT or a tuple of (x, y) coordinates.
        Note that it will be returned and stored as WKB. The output value will
        be reworked in a future release
    geom : str
        .. deprecated:: 0.1.11
            The geom attribute will be reomved with version 0.2
        .. warning::
            The geom attribute is completely untested so far and might be
            reworked or removed in a future release
            It takes a WKT of any kind of OGC-conform Geometry. The return value
            will be the same Geometry as WKB.
    creation : datetime.datetime
        Following the ISO19115 the *creation* date is referring to the creation
        date of the **data resource** described by the Entry, not the Entry
        itself. If creation date is not set, it is assumed, that yet no data
        resource is connected to the Entry.
    end : datetime.datimetime
        The last date the data source described by this Entry has data for.
        The end date is **not** ISO19115-compliant and will be reworked.
    version : int
        The version of this Entry. Usually metacatalog will handle the version
        itself and there is not need to set the version manually.
    latest_version_id : int
        Foreign key to `Entry.id`. This key is self-referencing the another
        Entry. This has to be set if the current Entry is not the latest one.
        If latest_version_id is None, the Entry is the most recent one and
        database operations that find multiple entries will in a future release
        filter to 'version duplicates'.
    is_partial : bool
        .. versionadded:: 0.1.10
        If an Entry is partial, it is not self-contained and **has** to be part
        of a :class:`EntryGroup <metacatalog.models.EntryGroup>` of type
        composite.
        .. note::
            To make it possbile to add partial Entrys via the models submodule,
            The Entry class itself will  **not** check integrity. This has to
            be done on adding partial Entry records, or by checking the database
    comment : str
        Arbitrary free-text comment to the Entry
    citation : str
        .. versionadded:: 0.1.13
        Citation informatio for this Entry. Note, that metacatalog does not
        assign DOIs and thus a citation is only useful if the associated
        data has a DOI and the bibliographic information applies to the Entry
        as well.
        .. note::
            Metacatalog does not manage bibliography. Thus it is highly
            recommended to use thrid party software for management and only
            export the reference to the resource in a common citation style.
    license : metacatalog.models.License
        Data License associated to the data and the metadata. You can pass
        the `License <metacatalog.models.License>`_ itself, or use the
        license_id attribute.
    license_id : int
        Foreign key to the data license.
    author : metacatalog.models.Person
        :class:`Person <metacatalog.models.Person>` that acts as first author
        for the given entry. Only one first author is possible, co-authors can
        be requested from either the contributors list or the
        :py:attr:`authors` property. `author` is a property and setting a
        new author using this property is not supported.
    authors : list
        List of :class:`Person <metacatalog.models.Person>`. The first element
        is the first author, see :py:attr:`~author`. The others are
        :class:`Person <metacatalog.models.Person>`s associated with the
        :class:`Role <metacatalog.models.PersonRole>` of ``'coAuthor' ``.
        The list of authors is sorted by the `order` attribute.
        `authors` is a property and setting a new list of authors using this
        property is not supported.

    Note
    ----
    One Entry object instance is always described by exactly one variable.
    If a datasource is a composite of many datasources, there are two
    strategies. Either a new table can be implemented and an abstract
    :class:`Variable <metacatalog.models.Variable>` be added. This is done with
    Eddy-Covariance data. Secondly, Each variable of the datasource can be
    represented by its own Entry, which get then grouped by an
    :class:`EntryGroup` of :class:`EntryGroupType` ``'composite'``.

    See Also
    --------
    `EntryGroup`
    `EntryGroupType
    """
    __tablename__ = 'entries'

    # columns
    id = Column(Integer, primary_key=True, autoincrement=True)
    uuid = Column(String(36), nullable=False, default=lambda: str(uuid4()))
    title = Column(String(512), nullable=False)
    abstract = Column(String)
    external_id = Column(String)
    location = Column(Geometry(geometry_type='POINT', srid=4326),
                      nullable=False)
    geom = Column(Geometry)
    version = Column(Integer, default=1, nullable=False)
    latest_version_id = Column(Integer,
                               ForeignKey('entries.id'),
                               nullable=True)
    is_partial = Column(Boolean, default=False, nullable=False)
    comment = Column(String, nullable=True)
    citation = Column(String(2048), nullable=True)

    license_id = Column(Integer, ForeignKey('licenses.id'))
    variable_id = Column(Integer, ForeignKey('variables.id'), nullable=False)
    datasource_id = Column(Integer, ForeignKey('datasources.id'))

    embargo = Column(Boolean, default=False, nullable=False)
    embargo_end = Column(DateTime, default=get_embargo_end)

    publication = Column(DateTime, default=dt.utcnow)
    lastUpdate = Column(DateTime, default=dt.utcnow, onupdate=dt.utcnow)

    # relationships
    contributors = relationship("PersonAssociation",
                                back_populates='entry',
                                cascade='all, delete, delete-orphan')
    keywords = relationship("KeywordAssociation",
                            back_populates='entry',
                            cascade='all, delete, delete-orphan')
    license = relationship("License", back_populates='entries')
    variable = relationship("Variable", back_populates='entries')
    datasource = relationship("DataSource",
                              back_populates='entries',
                              cascade='all, delete, delete-orphan',
                              single_parent=True)
    other_versions = relationship("Entry",
                                  backref=backref('latest_version',
                                                  remote_side=[id]))
    associated_groups = relationship("EntryGroup",
                                     secondary="nm_entrygroups",
                                     back_populates='entries')
    details = relationship("Detail", back_populates='entry')

    # extensions
    io_extension = None
    io_interface = None

    def to_dict(self, deep=False, stringify=False) -> dict:
        """To dict

        Return the model as a python dictionary.

        Parameters
        ----------
        deep : bool
            If True, all related objects will be included as
            dictionary. Defaults to False
        stringify : bool
            If True, all values will be turned into a string,
            to make the object serializable.
        Returns
        -------
        obj : dict
            The Model as dict

        """
        # base dictionary
        d = dict(id=self.id,
                 uuid=self.uuid,
                 title=self.title,
                 author=self.author.to_dict(deep=False),
                 authors=[a.to_dict(deep=False) for a in self.authors],
                 locationShape=self.location_shape.wkt,
                 location=self.location_shape.wkt,
                 variable=self.variable.to_dict(deep=False),
                 embargo=self.embargo,
                 embargo_end=self.embargo_end,
                 version=self.version,
                 isPartial=self.is_partial,
                 publication=self.publication,
                 lastUpdate=self.lastUpdate,
                 keywords=self.plain_keywords_dict())

        # optional relations
        if self.license is not None:
            d['license'] = self.license.to_dict(deep=False)

        if self.details is not None:
            d['details'] = self.details_dict(full=True)

        if self.datasource is not None:
            d['datasource'] = self.datasource.to_dict(deep=False)

        # set optional attributes
        for attr in ('abstract', 'external_id', 'comment', 'citation'):
            if hasattr(self, attr) and getattr(self, attr) is not None:
                d[attr] = getattr(self, attr)

        # lazy loading
        if deep:
            projects = self.projects
            if len(projects) > 0:
                d['projects'] = [p.to_dict(deep=False) for p in projects]
            comp = self.composite_entries
            if len(comp) > 0:
                d['composite_entries'] = [e.to_dict(deep=False) for e in comp]

        if stringify:
            return serialize(d, stringify=True)
        return d

    @classmethod
    def is_valid(cls, entry):
        return isinstance(entry, Entry) and entry.id is not None

    @property
    def checksum(self):
        """
        .. versionadded:: 0.3.8

        MD5 checksum of this entry. The checksum will change if any of the linked
        Metadata changes. This can be used in application built on metacatalog to
        verify integrity.
        """
        # get a dict_representation
        d = self.to_dict(deep=True, stringify=True)

        # calculate the hash
        md5 = hashlib.md5(json.dumps(d).encode()).hexdigest()

        return md5

    @property
    def is_latest_version(self):
        self.latest_version_id == self.id or self.latest_version_id is None

    @property
    def latest_version(self):
        versions = [e.version for e in self.other_versions]

        # no other versions, then self is the only
        if len(versions) == 1:
            return self

        # if more versions exist, find the highest number
        latest_index = versions.index(max(versions))
        return self.other_versions[latest_index]

    @property
    def author(self):
        return [
            c.person for c in self.contributors if c.role.name == 'author'
        ][0]

    @author.setter
    def author(self, new_author):
        self.set_new_author(new_author)

    def set_new_author(self, new_author, commit=False):
        """
        Set a new first Author for this entry.

        Parameters
        ----------
        new_author : metacatalog.models.Person
            The new first author. As of now the new author has to be passed as a
            model instance. Passing the ID or query parameter is not yet supported.
        commit : boolean
            If True, the whole :class:`Entry <metacatalog.models.Entry>` will commit
            and persist itself to the database.
            .. note::
                This will also affect other uncommited edits to the Entry.

        """
        if not isinstance(new_author, models.Person):
            raise AttributeError(
                'The new author has to be of type metatacatalog.models.Person')

        # find the association
        assoc_idx = [
            i for i, c in enumerate(self.contributors)
            if c.role.name == 'author'
        ][0]
        self.contributors[assoc_idx].person = new_author

        if commit:
            session = object_session(self)
            try:
                session.add(self)
                session.commit()
            except Exception as e:
                session.rollback()
                raise e

    @property
    def authors(self):
        # get all
        coAuthors = [c for c in self.contributors if c.role.name == 'coAuthor']

        # order
        idx = np.argsort([c.order for c in coAuthors])

        # build the author list
        authors = [self.author]
        for i in idx:
            authors.append(coAuthors[i].person)

        return authors

    @property
    def projects(self):
        return [
            group for group in self.associated_groups
            if group.type.name.lower() == 'project'
        ]

    @property
    def composite_entries(self):
        return [
            group for group in self.associated_groups
            if group.type.name.lower() == 'composite'
        ]

    @property
    def location_shape(self):
        return to_shape(self.location)

    @location_shape.setter
    def location_shape(self, shape):
        self.location = from_shape(shape)

    def plain_keywords_list(self):
        """Metadata Keyword list

        Returns list of controlled keywords associated with this
        instance of meta data.
        If there are any associated values or alias of the given
        keywords, use the keywords_dict function

        """
        return [kw.keyword.path() for kw in self.keywords]

    def plain_keywords_dict(self):
        return [kw.keyword.as_dict() for kw in self.keywords]

    def keywords_dict(self):
        return [
            dict(path=kw.keyword.full_path,
                 alias=kw.alias,
                 value=kw.associated_value) for kw in self.keywords
        ]

    def details_dict(self, full=True):
        """
        Returns the associated details as dictionary.

        Parameters
        ----------
        full : bool
            If True (default) the keywords will contain the
            full info including key description, ids and
            stemmed key. If false, it will be truncated to a
            plain key:value dict

        """
        if full:
            return {d.stem: d.to_dict() for d in self.details}
        else:
            return {d.stem: d.value for d in self.details}

    def details_table(self, fmt='html'):
        """
        Return the associated details as table

        Parameters
        ----------
        fmt : string
            Can be one of:

            * `html` to return a HTML table
            * `latex` to return LaTeX table
            * `markdown` to return Markdown table

        """
        # get the details
        details = dict()
        for key, detail in self.details_dict(full=True).items():
            if isinstance(detail['value'], dict):
                expand = {
                    f'{key}.{k}': dict(value=v,
                                       id=detail['id'],
                                       key=detail['key'],
                                       stem=detail['stem'])
                    for k, v in detail['value'].items()
                }
                details.update(expand)
            else:
                details[key] = detail

        # turn into a transposed datarame
        df = pd.DataFrame(details).T

        # output table
        if fmt.lower() == 'html':
            return df.to_html()
        elif fmt.lower() == 'latex':
            return df.to_latex()
        elif fmt.lower() == 'markdown' or fmt.lower() == 'md':
            return df.to_markdown()
        else:
            raise ValueError("fmt has to be in ['html', 'latex', 'markdown']")

    def add_details(self, details=None, commit=False, **kwargs):
        """
        Adds arbitrary key-value pairs to this entry.

        Parameters
        ----------
        details : list
            .. versionadded:: 0.1.8
            List of dict of structure:
            .. code-block::
                [{
                    'key': '',
                    'value': '',
                    'description': ''
                }]
            where the ``description`` is optional and can be omitted.
            If no descriptions are passed at all, you can also use `**kwargs`
            to pass ``key=value`` pairs.
        commit : bool
            If True, the Entry session will be added to the
            current session and the transaction is commited.
            Can have side-effects. Defaults to False.

        """
        ps = nltk.PorterStemmer()

        # build entries here
        detail_list = []

        # parse kwargs
        for k, v in kwargs.items():
            detail_list.append({
                'entry_id': self.id,
                'key': str(k),
                'stem': ps.stem(k),
                'value': v
            })

        # parse details
        if details is not None:
            for detail in details:
                d = {
                    'entry_id': self.id,
                    'key': detail['key'],
                    'stem': ps.stem(detail['key']),
                    'value': detail['value']
                }
                if 'description' in detail.keys():
                    d['description'] = detail['description']
                detail_list.append(d)

        # build the models
        for detail in detail_list:
            self.details.append(models.Detail(**detail))

        if commit:
            session = object_session(self)
            try:
                session.add(self)
                session.commit()
            except Exception as e:
                session.rollback()
                raise e

    def export(self, path=None, fmt='JSON', **kwargs):
        r"""
        Export the Entry. Exports the data using a metacatalog extension.
        Refer to the note below to learn more about export extensions.

        Parameters
        ----------
        path : str
            If set, the export will be written into a file at the given
            location.
        fmt : str
            Export format. Each export extension should at least support
            json and XML export.
        **kwargs
            Any other argument given will be passed down to the actual
            export function.

        Notes
        -----
        Uses any extension prefixed with 'export-' activated, by passing
        itself to the extension. If not format-specific extension is activated,
        the default :class:`ExportExtension <metacatalog.ext.export.ExportExtension>`
        will be used. A method of same name as ``fmt`` on the extension will be used. 
        If such a method is not present, the 'export' method is used and the fmt 
        attribute will be passed along. This can be used for format specific
        extensions.
        Refer to the notes about :any:`custom extensions <metacatalog.ext.base>`
        to learn more about writing your own export extension.

        Consider this example:

        .. code-block:: Python

            from metacatalog.ext import MetacatalogExtensionInterface
            import json

            class RawJSONExtension(MetacatalogExtensionInterface):
                @classmethod
                def init_extension(cls):
                    pass
                
                @classmethod
                def json(cls, entry, path, **kwargs):
                    # get the dict
                    data = entry.to_dict(stringify=True)
                    if path is None:
                        return data
                    else:
                        with open(path, 'w') as f:
                            json.dump(data, f, indent=kwargs.get('indent', 4))
        
        You can activate and use it like:

        >> from metacatalog import ext
        >> ext.extension('export', RawJSONEXtension)
        >> entry.export(path='testfile.json', fmt='json', indent=2)

        """
        # load the extension
        from metacatalog import ext
        try:
            Export = ext.extension(f'export-{fmt.lower()}')
        except AttributeError:
            try:
                Export = ext.extension('export')
            except AttributeError:
                from metacatalog.ext.export import ExportExtension as Export

        # get the export function
        if hasattr(Export, fmt.lower()):
            exp_function = getattr(Export, fmt.lower())
        elif hasattr(Export, 'export'):
            exp_function = getattr(Export, 'export')
        else:
            raise AttributeError(
                f'The current export extension cannot export {fmt}')

        # return
        return exp_function(self, path=path, **kwargs)

    def make_composite(self,
                       others=[],
                       title=None,
                       description=None,
                       commit=False):
        """
        Create a composite EntryGroup from this Entry. A composite marks
        stand-alone (:attr:`is_partial` ``= False``) entries as inseparable.
        A composite can also contain a partial Entry
        (:attr:`is_partial` ``= True``), whichs  data only makes sense in the
        context of the composite group.

        Parameters
        ----------
        others : list of Entry
            The other :class:`Entries <metacatalog.models.Entry>` that
            should be part of the composite.
        title : str
            Optional title of the composite, if applicable
        description : str
            Optional description of the composite if applicable
        commit : bool
            If True, the newly created Group will be persisted in the
            database. Defaults to False.

        Returns
        -------
        composite : metacatalog.models.EntryGroup
            The newly created EntryGroup of EntryGroupType.name == 'Composite'

        """
        # check type of others
        if isinstance(others, Entry):
            others = [others]
        if not all([isinstance(e, Entry) for e in others]):
            raise AttributeError("others has to be a list of Entry instances")
        others.append(self)

        # get a session
        session = object_session(self)
        type_ = api.find_group_type(session, name='Composite')[0]
        composite = models.EntryGroup(type=type_,
                                      title=title,
                                      description=description,
                                      entries=others)

        if commit:
            try:
                session.add(composite)
                session.commit()
            except Exception as e:
                session.rollback()
                raise e

        # return
        return composite

    def neighbors(self,
                  distance,
                  unit='meter',
                  buffer_epsg=3857,
                  as_sql=False,
                  **kwargs):
        """
        Find neighboring :class:`Entries <metacatalog.models.Entry>` around the
        location of this instance. You can return the result, or the sqlalchemy
        Query object, which can be printed as plain SQL.

        Parameters
        ----------
        distance : int, float
            The maximum distance at which another Entry is still considered to be a neighbor.
        unit : str
            Has to be one of ['meter', 'km', 'mile', 'nautic'] to specify the unit
            of the given distance. Note that the distance will always be transformed
            into meter.
        buffer_epsg : int
            The EPSG identification number of any projected cartesian coordinate
            reference system that uses meter as unit. This CRS will be used to
            apply the search distance (in meter).
            .. note::
                The default system is the transversal Mercartor projection, which is
                a global system. Thus, it can always be applied, but may introduce
                large uncertainties in small areas. Replace this attribute by a
                local CRS wherever possible.
        as_sql : bool
            If False (default) the SQL query for neighbors will be executed and
            the result is returned. Else, the SQL query itself will be returned.
        kwargs : keyword arguments
            Any passed keyword argument will be passed down to the
            :func:`api.find_entry <metacatalog.api.find_entry>` function to further
            filter the results.

        See Also
        --------
        :func:`around <metacatalog.util.location.around>`
        :func:`find_entry <metacatalog.api.find_entry>`

        """
        # open a session
        session = object_session(self)

        # get the base filter query
        kwargs['return_iterator'] = True
        query = api.find_entry(session, **kwargs)

        # get the area
        filter_query = around(self,
                              distance=distance,
                              unit=unit,
                              query=query,
                              buffer_use_epsg=buffer_epsg)

        if as_sql:
            return filter_query
        else:
            return filter_query.all()

    def create_datasource(self,
                          path: str,
                          type,
                          datatype,
                          commit=False,
                          **args):
        """
        """
        #
        if self.datasource is not None:
            raise MetadataMissingError(
                'Datasource already exists. You can edit that one.')

        # get a session
        session = object_session(self)

        # load the datasource type
        if isinstance(type, int):
            ds_type = api.find_datasource_type(session=session,
                                               id=type,
                                               return_iterator=True).one()
        elif isinstance(type, str):
            ds_type = api.find_datasource_type(session=session,
                                               name=type,
                                               return_iterator=True).first()
        else:
            raise AttributeError('type has to be of type int or str')

        # TODO need the API for DataTypes here!!
        dtype = session.query(
            models.DataType).filter(models.DataType.name == datatype).one()

        # build the datasource object
        ds = models.DataSource(type=ds_type, datatype=dtype, path=path)

        # add the args
        ds.save_args_from_dict(args)

        # append to self
        self.datasource = ds

        if commit:
            try:
                session.add(self)
                session.commit()
            except Exception as e:
                session.rollback()
                raise e

        # return
        return ds

    def get_data(self, **kwargs):
        """
        .. versionchanged:: 0.1.12

        Read the data. This is only possible if a datasource is specified and
        any kind of IOExtension or IOInterface is activated. By default,
        the builtin :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>`
        is activated since version 0.1.12.

        """
        if self.datasource is None:
            raise MetadataMissingError('Entry need datasource information')

        try:
            # check if an io_extension is set
            if self.io_extension is not None:
                return self.io_extension.read(**kwargs)

            # if no extension instance, maybe an interface class is set
            elif self.io_interface is not None:
                reader = self.io_interface.get_reader(self.datasource)
                return reader(self, self.datasource, **kwargs)
            else:
                raise IOOperationNotFoundError(
                    "No IO interface activated. Run metacatalog.ext.extension('io', InterfaceClass) to register"
                )
        except IOOperationNotFoundError as e:
            print('[ERROR]: Operation not possible.\n%s' % str(e))
            return None

    def import_data(self, data, **kwargs):
        """
        .. versionchanged:: 0.1.12

        Import data. This is only possible if a datasource is specified and
        any kind of IOExtension or IOInterface is activated. By default,
        the builtin :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>`
        is activated since version 0.1.12.

        For the default interface, the datasource type and data type determine
        where the data will be stored and how the data has to look like.
        You can easily inherit from the
        :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>` to
        customize read and write behaviour. If you import i.e. a timeseries to
        the same database as metacatalog, you will need to prepared data to
        to only hold an datetime index and the data to be stored.

        """
        if self.datasource is None:
            raise MetadataMissingError('Entry need datasource information')

        try:
            # check if an io_extension is set
            if self.io_extension is not None:
                return self.io_extension.import_(data, **kwargs)

            # if no extension instance, maybe an interface class is set
            elif self.io_interface is not None:
                importer = self.io_interface.get_importer(self.datasource)
                return importer(self, self.datasource, data, **kwargs)
            else:
                raise IOOperationNotFoundError(
                    "No IO interface activated. Run metacatalog.ext.extension('io', InterfaceClass) to register"
                )
        except IOOperationNotFoundError as e:
            print('[ERROR]: Operation not possible.\n%s' % str(e))
            return None

    def append_data(self, data, **kwargs):
        """
        .. versionadded:: 0.1.12

        Append data. This is only possible if a datasource is specified and
        any kind of IOExtension or IOInterface is activated. By default,
        the builtin :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>`
        is activated since version 0.1.12.

        For the default interface, the datasource type and data type determine
        where the data will be stored and how the data has to look like.
        You can easily inherit from the
        :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>` to
        customize read and write behaviour. If you import i.e. a timeseries to
        the same database as metacatalog, you will need to prepared data to
        to only hold an datetime index and the data to be stored.

        """
        if self.datasource is None:
            raise MetadataMissingError('Entry need datasource information')

        try:
            # check if an io_extension is set
            if self.io_extension is not None:
                return self.io_extension.append(data, **kwargs)

            # if no extension instance, maybe an interface class is set
            elif self.io_interface is not None:
                appender = self.io_interface.get_appender(self.datasource)
                return appender(self, self.datasource, data, **kwargs)
            else:
                raise IOOperationNotFoundError(
                    "No IO interface activated. Run metacatalog.ext.extension('io', InterfaceClass) to register"
                )
        except IOOperationNotFoundError as e:
            print('[ERROR]: Operation not possible.\n%s' % str(e))
            return None

    def delete_data(self, delete_source=False, **kwargs):
        """
        .. versionadded:: 0.1.12

        Delete data. This is only possible if a datasource is specified and
        any kind of IOExtension or IOInterface is activated. By default,
        the builtin :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>`
        is activated since version 0.1.12.

        For the default interface, the datasource type and data type determine
        where the data is stored and how the data will be delted.
        You can easily inherit from the
        :class:`IOExtension <metacatalog.ext.io.extension.IOExtension>` to
        customize read and write behaviour.

        Parameters
        ----------
        delete_source : bool
            If True, the DataSource will be deleted as well after the data
            has been deleted.

        """
        if self.datasource is None:
            raise MetadataMissingError('Entry need datasource information')

        kwargs['delete_source'] = delete_source
        try:
            # check if an io_extension is set
            if self.io_extension is not None:
                return self.io_extension.delete(**kwargs)

            # if no extension instance, maybe an interface class is set
            elif self.io_interface is not None:
                deleter = self.io_interface.get_deleter(self.datasource)
                return deleter(self, self.datasource, **kwargs)
            else:
                raise IOOperationNotFoundError(
                    "No IO interface activated. Run metacatalog.ext.extension('io', InterfaceClass) to register"
                )
        except IOOperationNotFoundError as e:
            print('[ERROR]: Operation not possible.\n%s' % str(e))
            return None

    def __str__(self):
        return "<ID=%d %s [%s] >" % (self.id, self.title[:20],
                                     self.variable.name)
Esempio n. 16
0
class Registration(Base):
    __tablename__ = 'registration'
    id = Column(Integer, primary_key=True)
    jid = Column(String, index=True, nullable=False)
    point = Column(Geometry(geometry_type='POINT', management=True),
                   nullable=False)
Esempio n. 17
0
class Location(Base):
    __tablename__ = "location"

    __table_args__ = (
        Index("idx_location_geom", "geom", postgresql_using="gist"),
        Index("idx_location_boundary", "boundary", postgresql_using="gist"),
    )

    id = Column(Integer, autoincrement=True, nullable=False, primary_key=True)

    # station_id = Column(Integer, ForeignKey("station.id"))

    # @TODO sort out this join based on this lateral query ..
    # @NOTE this might not be the best way to do this as
    # the closest weather station is not always the most relevant

    #  select
    #       l.id,
    #       l.locality,
    #       l.state,
    #       closest_station.state,
    #       closest_station.code,
    #       closest_station.dist
    #  from location l
    #  left join lateral (
    # 	select
    #       code, state, ST_Distance(l.geom, bom_station.geom) / 1000 as dist
    #   from bom_station order by l.geom <-> bom_station.geom limit 1
    #  ) AS closest_station on TRUE;

    # weather_station = relationship(
    #     "BomStation",
    #     primaryjoin=\
    #       "func.ST_ClosestPoint(remote(BomStation.geom), \
    #       foreign(Location.geom))",
    #     viewonly=True,
    #     uselist=True,
    #     lazy="joined",
    # )

    address1 = Column(Text)
    address2 = Column(Text)
    locality = Column(Text)
    state = Column(Text)
    postcode = Column(Text, nullable=True)

    revisions = relationship("Revision", lazy="joined")

    # an OSM way id such as 395531577
    osm_way_id = Column(Text, nullable=True)

    # Geo fields
    place_id = Column(Text, nullable=True, index=True)
    geocode_approved = Column(Boolean, default=False)
    geocode_skip = Column(Boolean, default=False)
    geocode_processed_at = Column(DateTime, nullable=True)
    geocode_by = Column(Text, nullable=True)
    geom = Column(Geometry("POINT", srid=4326, spatial_index=False))
    boundary = Column(Geometry("POLYGON", srid=4326, spatial_index=True))

    @hybrid_property
    def lat(self) -> Optional[float]:
        if self.geom:
            return wkb.loads(bytes(self.geom.data)).y

        return None

    @hybrid_property
    def lng(self) -> Optional[float]:
        if self.geom:
            return wkb.loads(bytes(self.geom.data)).x

        return None
Esempio n. 18
0
class Project(db.Model):
    """ Describes a HOT Mapping Project """

    __tablename__ = "projects"

    # Columns
    id = db.Column(db.Integer, primary_key=True)
    status = db.Column(db.Integer,
                       default=ProjectStatus.DRAFT.value,
                       nullable=False)
    created = db.Column(db.DateTime, default=timestamp, nullable=False)
    priority = db.Column(db.Integer, default=ProjectPriority.MEDIUM.value)
    default_locale = db.Column(
        db.String(10), default="en"
    )  # The locale that is returned if requested locale not available
    author_id = db.Column(db.BigInteger,
                          db.ForeignKey("users.id", name="fk_users"),
                          nullable=False)
    mapper_level = db.Column(
        db.Integer, default=2, nullable=False,
        index=True)  # Mapper level project is suitable for
    mapping_permission = db.Column(db.Integer,
                                   default=MappingPermission.ANY.value)
    validation_permission = db.Column(
        db.Integer, default=ValidationPermission.ANY.value
    )  # Means only users with validator role can validate
    enforce_random_task_selection = db.Column(
        db.Boolean, default=False
    )  # Force users to edit at random to avoid mapping "easy" tasks
    private = db.Column(db.Boolean,
                        default=False)  # Only allowed users can validate
    featured = db.Column(
        db.Boolean, default=False)  # Only PMs can set a project as featured
    entities_to_map = db.Column(db.String)
    changeset_comment = db.Column(db.String)
    osmcha_filter_id = db.Column(
        db.String)  # Optional custom filter id for filtering on OSMCha
    due_date = db.Column(db.DateTime)
    imagery = db.Column(db.String)
    josm_preset = db.Column(db.String)
    id_presets = db.Column(ARRAY(db.String))
    last_updated = db.Column(db.DateTime, default=timestamp)
    license_id = db.Column(db.Integer,
                           db.ForeignKey("licenses.id", name="fk_licenses"))
    geometry = db.Column(Geometry("MULTIPOLYGON", srid=4326))
    centroid = db.Column(Geometry("POINT", srid=4326))
    country = db.Column(ARRAY(db.String), default=[])
    task_creation_mode = db.Column(db.Integer,
                                   default=TaskCreationMode.GRID.value,
                                   nullable=False)

    organisation_id = db.Column(
        db.Integer,
        db.ForeignKey("organisations.id", name="fk_organisations"),
        index=True,
    )

    # Tags
    mapping_types = db.Column(ARRAY(db.Integer), index=True)

    # Editors
    mapping_editors = db.Column(
        ARRAY(db.Integer),
        default=[
            Editors.ID.value,
            Editors.JOSM.value,
            Editors.POTLATCH_2.value,
            Editors.FIELD_PAPERS.value,
            Editors.CUSTOM.value,
        ],
        index=True,
        nullable=False,
    )
    validation_editors = db.Column(
        ARRAY(db.Integer),
        default=[
            Editors.ID.value,
            Editors.JOSM.value,
            Editors.POTLATCH_2.value,
            Editors.FIELD_PAPERS.value,
            Editors.CUSTOM.value,
        ],
        index=True,
        nullable=False,
    )

    # Stats
    total_tasks = db.Column(db.Integer, nullable=False)
    tasks_mapped = db.Column(db.Integer, default=0, nullable=False)
    tasks_validated = db.Column(db.Integer, default=0, nullable=False)
    tasks_bad_imagery = db.Column(db.Integer, default=0, nullable=False)

    # Mapped Objects
    tasks = db.relationship(Task,
                            backref="projects",
                            cascade="all, delete, delete-orphan",
                            lazy="dynamic")
    project_info = db.relationship(ProjectInfo, lazy="dynamic", cascade="all")
    project_chat = db.relationship(ProjectChat, lazy="dynamic", cascade="all")
    author = db.relationship(User)
    allowed_users = db.relationship(User, secondary=project_allowed_users)
    priority_areas = db.relationship(
        PriorityArea,
        secondary=project_priority_areas,
        cascade="all, delete-orphan",
        single_parent=True,
    )
    custom_editor = db.relationship(CustomEditor, uselist=False)
    favorited = db.relationship(User,
                                secondary=project_favorites,
                                backref="favorites")
    organisation = db.relationship(Organisation, backref="projects")
    campaign = db.relationship(Campaign,
                               secondary=campaign_projects,
                               backref="projects")
    interests = db.relationship(Interest,
                                secondary=project_interests,
                                backref="projects")

    def create_draft_project(self, draft_project_dto: DraftProjectDTO):
        """
        Creates a draft project
        :param draft_project_dto: DTO containing draft project details
        :param aoi: Area of Interest for the project (eg boundary of project)
        """
        self.project_info.append(
            ProjectInfo.create_from_name(draft_project_dto.project_name))
        self.status = ProjectStatus.DRAFT.value
        self.author_id = draft_project_dto.user_id
        self.last_updated = timestamp()

    def set_project_aoi(self, draft_project_dto: DraftProjectDTO):
        """ Sets the AOI for the supplied project """
        aoi_geojson = geojson.loads(
            json.dumps(draft_project_dto.area_of_interest))

        aoi_geometry = GridService.merge_to_multi_polygon(aoi_geojson,
                                                          dissolve=True)

        valid_geojson = geojson.dumps(aoi_geometry)
        self.geometry = ST_SetSRID(ST_GeomFromGeoJSON(valid_geojson), 4326)
        self.centroid = ST_Centroid(self.geometry)

    def set_default_changeset_comment(self):
        """ Sets the default changeset comment"""
        default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"]
        self.changeset_comment = (
            f"{default_comment}-{self.id} {self.changeset_comment}"
            if self.changeset_comment is not None else
            f"{default_comment}-{self.id}")
        self.save()

    def set_country_info(self):
        """ Sets the default country based on centroid"""

        lat, lng = (db.session.query(
            cast(ST_Y(Project.centroid), sqlalchemy.String),
            cast(ST_X(Project.centroid), sqlalchemy.String),
        ).filter(Project.id == self.id).one())
        url = "https://nominatim.openstreetmap.org/reverse?format=jsonv2&lat={0}&lon={1}".format(
            lat, lng)
        country_info = requests.get(url)
        country_info_json = country_info.content.decode("utf8").replace(
            "'", '"')
        # Load the JSON to a Python list & dump it back out as formatted JSON
        data = json.loads(country_info_json)
        if data["address"].get("country") is not None:
            self.country = [data["address"]["country"]]
        else:
            self.country = [data["address"]["county"]]

        self.save()

    def create(self):
        """ Creates and saves the current model to the DB """
        db.session.add(self)
        db.session.commit()

    def save(self):
        """ Save changes to db"""
        db.session.commit()

    @staticmethod
    def clone(project_id: int, author_id: int):
        """ Clone project """

        cloned_project = Project.get(project_id)

        # Remove clone from session so we can reinsert it as a new object
        db.session.expunge(cloned_project)
        make_transient(cloned_project)

        # Re-initialise counters and meta-data
        cloned_project.total_tasks = 0
        cloned_project.tasks_mapped = 0
        cloned_project.tasks_validated = 0
        cloned_project.tasks_bad_imagery = 0
        cloned_project.last_updated = timestamp()
        cloned_project.created = timestamp()
        cloned_project.author_id = author_id
        cloned_project.status = ProjectStatus.DRAFT.value
        cloned_project.id = None  # Reset ID so we get a new ID when inserted
        cloned_project.geometry = None
        cloned_project.centroid = None

        db.session.add(cloned_project)
        db.session.commit()

        # Now add the project info, we have to do it in a two stage commit because we need to know the new project id
        original_project = Project.get(project_id)

        for info in original_project.project_info:
            db.session.expunge(info)
            make_transient(
                info
            )  # Must remove the object from the session or it will be updated rather than inserted
            info.id = None
            info.project_id_str = str(cloned_project.id)
            cloned_project.project_info.append(info)

        # Now add allowed users now we know new project id, if there are any
        for user in original_project.allowed_users:
            cloned_project.allowed_users.append(user)

        # Add other project metadata
        cloned_project.priority = original_project.priority
        cloned_project.default_locale = original_project.default_locale
        cloned_project.mapper_level = original_project.mapper_level
        cloned_project.mapping_permission = original_project.mapping_permission
        cloned_project.validation_permission = original_project.validation_permission
        cloned_project.enforce_random_task_selection = (
            original_project.enforce_random_task_selection)
        cloned_project.private = original_project.private
        cloned_project.entities_to_map = original_project.entities_to_map
        cloned_project.due_date = original_project.due_date
        cloned_project.imagery = original_project.imagery
        cloned_project.josm_preset = original_project.josm_preset
        cloned_project.license_id = original_project.license_id
        cloned_project.mapping_types = original_project.mapping_types

        # We try to remove the changeset comment referencing the old project. This
        #  assumes the default changeset comment has not changed between the old
        #  project and the cloned. This is a best effort basis.
        default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"]
        changeset_comments = []
        if original_project.changeset_comment is not None:
            changeset_comments = original_project.changeset_comment.split(" ")
        if f"{default_comment}-{original_project.id}" in changeset_comments:
            changeset_comments.remove(
                f"{default_comment}-{original_project.id}")
        cloned_project.changeset_comment = " ".join(changeset_comments)

        db.session.add(cloned_project)
        db.session.commit()

        return cloned_project

    @staticmethod
    def get(project_id: int):
        """
        Gets specified project
        :param project_id: project ID in scope
        :return: Project if found otherwise None
        """
        return Project.query.get(project_id)

    def update(self, project_dto: ProjectDTO):
        """ Updates project from DTO """
        self.status = ProjectStatus[project_dto.project_status].value
        self.priority = ProjectPriority[project_dto.project_priority].value
        self.default_locale = project_dto.default_locale
        self.enforce_random_task_selection = project_dto.enforce_random_task_selection
        self.private = project_dto.private
        self.mapper_level = MappingLevel[
            project_dto.mapper_level.upper()].value
        self.entities_to_map = project_dto.entities_to_map
        self.changeset_comment = project_dto.changeset_comment
        self.due_date = project_dto.due_date
        self.imagery = project_dto.imagery
        self.josm_preset = project_dto.josm_preset
        self.id_presets = project_dto.id_presets
        self.last_updated = timestamp()
        self.license_id = project_dto.license_id

        if project_dto.osmcha_filter_id:
            # Support simple extraction of OSMCha filter id from OSMCha URL
            match = re.search(r"aoi=([\w-]+)", project_dto.osmcha_filter_id)
            self.osmcha_filter_id = (match.group(1) if match else
                                     project_dto.osmcha_filter_id)
        else:
            self.osmcha_filter_id = None

        if project_dto.organisation:
            org = Organisation.get(project_dto.organisation)
            if org is None:
                raise NotFound("Organisation does not exist")
            self.organisation = org

        # Cast MappingType strings to int array
        type_array = []
        for mapping_type in project_dto.mapping_types:
            type_array.append(MappingTypes[mapping_type].value)
        self.mapping_types = type_array

        # Cast Editor strings to int array
        mapping_editors_array = []
        for mapping_editor in project_dto.mapping_editors:
            mapping_editors_array.append(Editors[mapping_editor].value)
        self.mapping_editors = mapping_editors_array

        validation_editors_array = []
        for validation_editor in project_dto.validation_editors:
            validation_editors_array.append(Editors[validation_editor].value)
        self.validation_editors = validation_editors_array
        self.country = project_dto.country_tag
        # Add list of allowed users, meaning the project can only be mapped by users in this list
        if hasattr(project_dto, "allowed_users"):
            self.allowed_users = [
            ]  # Clear existing relationships then re-insert
            for user in project_dto.allowed_users:
                self.allowed_users.append(user)
        # Update teams and projects relationship.
        self.teams = []
        if hasattr(project_dto, "project_teams") and project_dto.project_teams:
            for team_dto in project_dto.project_teams:
                team = Team.get(team_dto.team_id)

                if team is None:
                    raise NotFound(f"Team not found")

                role = TeamRoles[team_dto.role].value
                ProjectTeams(project=self, team=team, role=role)
        # Set Project Info for all returned locales
        for dto in project_dto.project_info_locales:

            project_info = self.project_info.filter_by(
                locale=dto.locale).one_or_none()

            if project_info is None:
                new_info = ProjectInfo.create_from_dto(
                    dto)  # Can't find info so must be new locale
                self.project_info.append(new_info)
            else:
                project_info.update_from_dto(dto)

        self.priority_areas = [
        ]  # Always clear Priority Area prior to updating
        if project_dto.priority_areas:
            for priority_area in project_dto.priority_areas:
                pa = PriorityArea.from_dict(priority_area)
                self.priority_areas.append(pa)

        if project_dto.custom_editor:
            if not self.custom_editor:
                new_editor = CustomEditor.create_from_dto(
                    self.id, project_dto.custom_editor)
                self.custom_editor = new_editor
            else:
                self.custom_editor.update_editor(project_dto.custom_editor)
        else:
            if self.custom_editor:
                self.custom_editor.delete()

        self.campaign = [
            Campaign.query.get(c.id) for c in project_dto.campaigns
        ]

        if project_dto.mapping_permission:
            self.mapping_permission = MappingPermission[
                project_dto.mapping_permission.upper()].value

        if project_dto.validation_permission:
            self.validation_permission = ValidationPermission[
                project_dto.validation_permission.upper()].value

        # Update Interests.
        self.interests = []
        if project_dto.interests:
            self.interests = [
                Interest.query.get(i.id) for i in project_dto.interests
            ]

        db.session.commit()

    def delete(self):
        """ Deletes the current model from the DB """
        db.session.delete(self)
        db.session.commit()

    def is_favorited(self, user_id: int) -> bool:
        user = User.query.get(user_id)
        if user not in self.favorited:
            return False

        return True

    def favorite(self, user_id: int):
        user = User.query.get(user_id)
        self.favorited.append(user)
        db.session.commit()

    def unfavorite(self, user_id: int):
        user = User.query.get(user_id)
        if user not in self.favorited:
            raise ValueError("Project not been favorited by user")
        self.favorited.remove(user)
        db.session.commit()

    def set_as_featured(self):
        if self.featured is True:
            raise ValueError("Project is already featured")
        self.featured = True
        db.session.commit()

    def unset_as_featured(self):
        if self.featured is False:
            raise ValueError("Project is not featured")
        self.featured = False
        db.session.commit()

    def can_be_deleted(self) -> bool:
        """ Projects can be deleted if they have no mapped work """
        task_count = self.tasks.filter(
            Task.task_status != TaskStatus.READY.value).count()
        if task_count == 0:
            return True
        else:
            return False

    @staticmethod
    def get_projects_for_admin(admin_id: int, preferred_locale: str,
                               search_dto: ProjectSearchDTO) -> PMDashboardDTO:
        """ Get projects for admin """
        query = Project.query.filter(Project.author_id == admin_id)
        # Do Filtering Here

        if search_dto.order_by:
            if search_dto.order_by_type == "DESC":
                query = query.order_by(desc(search_dto.order_by))
            else:
                query = query.order_by(search_dto.order_by)

        admins_projects = query.all()

        if admins_projects is None:
            raise NotFound("No projects found for admin")

        admin_projects_dto = PMDashboardDTO()
        for project in admins_projects:
            pm_project = project.get_project_summary(preferred_locale)
            project_status = ProjectStatus(project.status)

            if project_status == ProjectStatus.DRAFT:
                admin_projects_dto.draft_projects.append(pm_project)
            elif project_status == ProjectStatus.PUBLISHED:
                admin_projects_dto.active_projects.append(pm_project)
            elif project_status == ProjectStatus.ARCHIVED:
                admin_projects_dto.archived_projects.append(pm_project)
            else:
                current_app.logger.error(
                    f"Unexpected state project {project.id}")

        return admin_projects_dto

    def get_project_user_stats(self, user_id: int) -> ProjectUserStatsDTO:
        """Compute project specific stats for a given user"""
        stats_dto = ProjectUserStatsDTO()
        stats_dto.time_spent_mapping = 0
        stats_dto.time_spent_validating = 0
        stats_dto.total_time_spent = 0

        query = """SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history
                   WHERE (action='LOCKED_FOR_MAPPING' or action='AUTO_UNLOCKED_FOR_MAPPING')
                   and user_id = :user_id and project_id = :project_id;"""
        total_mapping_time = db.engine.execute(text(query),
                                               user_id=user_id,
                                               project_id=self.id)
        for time in total_mapping_time:
            total_mapping_time = time[0]
            if total_mapping_time:
                stats_dto.time_spent_mapping = total_mapping_time.total_seconds(
                )
                stats_dto.total_time_spent += stats_dto.time_spent_mapping

        query = """SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history
                   WHERE (action='LOCKED_FOR_VALIDATION' or action='AUTO_UNLOCKED_FOR_VALIDATION')
                   and user_id = :user_id and project_id = :project_id;"""
        total_validation_time = db.engine.execute(text(query),
                                                  user_id=user_id,
                                                  project_id=self.id)
        for time in total_validation_time:
            total_validation_time = time[0]
            if total_validation_time:
                stats_dto.time_spent_validating = total_validation_time.total_seconds(
                )
                stats_dto.total_time_spent += stats_dto.time_spent_validating

        return stats_dto

    def get_project_stats(self) -> ProjectStatsDTO:
        """ Create Project Stats model for postgis project object"""
        project_stats = ProjectStatsDTO()
        project_stats.project_id = self.id
        project_area_sql = "select ST_Area(geometry, true)/1000000 as area from public.projects where id = :id"
        project_area_result = db.engine.execute(text(project_area_sql),
                                                id=self.id)

        project_stats.area = project_area_result.fetchone()["area"]
        project_stats.total_mappers = (db.session.query(User).filter(
            User.projects_mapped.any(self.id)).count())
        project_stats.total_tasks = self.total_tasks
        project_stats.total_comments = (db.session.query(ProjectChat).filter(
            ProjectChat.project_id == self.id).count())
        project_stats.percent_mapped = Project.calculate_tasks_percent(
            "mapped",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        project_stats.percent_validated = Project.calculate_tasks_percent(
            "validated",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        project_stats.percent_bad_imagery = Project.calculate_tasks_percent(
            "bad_imagery",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON())
        project_stats.aoi_centroid = geojson.loads(centroid_geojson)
        unique_mappers = (TaskHistory.query.filter(
            TaskHistory.action == "LOCKED_FOR_MAPPING",
            TaskHistory.project_id == self.id,
        ).distinct(TaskHistory.user_id).count())
        unique_validators = (TaskHistory.query.filter(
            TaskHistory.action == "LOCKED_FOR_VALIDATION",
            TaskHistory.project_id == self.id,
        ).distinct(TaskHistory.user_id).count())
        project_stats.total_time_spent = 0
        project_stats.total_mapping_time = 0
        project_stats.total_validation_time = 0
        project_stats.average_mapping_time = 0
        project_stats.average_validation_time = 0

        query = """SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history
                   WHERE (action='LOCKED_FOR_MAPPING' or action='AUTO_UNLOCKED_FOR_MAPPING')
                   and project_id = :project_id;"""
        total_mapping_time = db.engine.execute(text(query), project_id=self.id)
        for row in total_mapping_time:
            total_mapping_time = row[0]
            if total_mapping_time:
                total_mapping_seconds = total_mapping_time.total_seconds()
                project_stats.total_mapping_time = total_mapping_seconds
                project_stats.total_time_spent += project_stats.total_mapping_time
                if unique_mappers:
                    average_mapping_time = total_mapping_seconds / unique_mappers
                    project_stats.average_mapping_time = average_mapping_time

        query = """SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history
                   WHERE (action='LOCKED_FOR_VALIDATION' or action='AUTO_UNLOCKED_FOR_VALIDATION')
                   and project_id = :project_id;"""
        total_validation_time = db.engine.execute(text(query),
                                                  project_id=self.id)
        for row in total_validation_time:
            total_validation_time = row[0]
            if total_validation_time:
                total_validation_seconds = total_validation_time.total_seconds(
                )
                project_stats.total_validation_time = total_validation_seconds
                project_stats.total_time_spent += project_stats.total_validation_time
                if unique_validators:
                    average_validation_time = (total_validation_seconds /
                                               unique_validators)
                    project_stats.average_validation_time = average_validation_time

        return project_stats

    def get_project_summary(self, preferred_locale) -> ProjectSummary:
        """ Create Project Summary model for postgis project object"""
        summary = ProjectSummary()
        summary.project_id = self.id
        priority = self.priority
        if priority == 0:
            summary.priority = "URGENT"
        elif priority == 1:
            summary.priority = "HIGH"
        elif priority == 2:
            summary.priority = "MEDIUM"
        else:
            summary.priority = "LOW"
        summary.author = User().get_by_id(self.author_id).username
        polygon = to_shape(self.geometry)
        polygon_aea = transform(
            partial(
                pyproj.transform,
                pyproj.Proj(init="EPSG:4326"),
                pyproj.Proj(proj="aea",
                            lat_1=polygon.bounds[1],
                            lat_2=polygon.bounds[3]),
            ),
            polygon,
        )
        area = polygon_aea.area / 1000000
        summary.area = area
        summary.country_tag = self.country
        summary.changeset_comment = self.changeset_comment
        summary.due_date = self.due_date
        summary.created = self.created
        summary.last_updated = self.last_updated
        summary.osmcha_filter_id = self.osmcha_filter_id
        summary.mapper_level = MappingLevel(self.mapper_level).name
        summary.mapping_permission = MappingPermission(
            self.mapping_permission).name
        summary.validation_permission = ValidationPermission(
            self.validation_permission).name
        summary.random_task_selection_enforced = self.enforce_random_task_selection
        summary.private = self.private
        summary.status = ProjectStatus(self.status).name
        summary.entities_to_map = self.entities_to_map
        summary.imagery = self.imagery
        if self.organisation_id:
            summary.organisation_name = self.organisation.name
            summary.organisation_logo = self.organisation.logo

        if self.campaign:
            summary.campaigns = [i.as_dto() for i in self.campaign]

        # Cast MappingType values to related string array
        mapping_types_array = []
        if self.mapping_types:
            for mapping_type in self.mapping_types:
                mapping_types_array.append(MappingTypes(mapping_type).name)
            summary.mapping_types = mapping_types_array

        if self.mapping_editors:
            mapping_editors = []
            for mapping_editor in self.mapping_editors:
                mapping_editors.append(Editors(mapping_editor).name)

            summary.mapping_editors = mapping_editors

        if self.validation_editors:
            validation_editors = []
            for validation_editor in self.validation_editors:
                validation_editors.append(Editors(validation_editor).name)

            summary.validation_editors = validation_editors

        if self.custom_editor:
            summary.custom_editor = self.custom_editor.as_dto()

        # If project is private, fetch list of allowed users
        if self.private:
            allowed_users = []
            for user in self.allowed_users:
                allowed_users.append(user.username)
            summary.allowed_users = allowed_users

        centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON())
        summary.aoi_centroid = geojson.loads(centroid_geojson)

        summary.percent_mapped = Project.calculate_tasks_percent(
            "mapped",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        summary.percent_validated = Project.calculate_tasks_percent(
            "validated",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        summary.percent_bad_imagery = Project.calculate_tasks_percent(
            "bad_imagery",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        summary.project_teams = [
            ProjectTeamDTO(
                dict(
                    team_id=t.team.id,
                    team_name=t.team.name,
                    role=TeamRoles(t.role).name,
                )) for t in self.teams
        ]

        project_info = ProjectInfo.get_dto_for_locale(self.id,
                                                      preferred_locale,
                                                      self.default_locale)
        summary.project_info = project_info

        return summary

    def get_project_title(self, preferred_locale):
        project_info = ProjectInfo.get_dto_for_locale(self.id,
                                                      preferred_locale,
                                                      self.default_locale)
        return project_info.name

    @staticmethod
    def get_project_total_contributions(project_id: int) -> int:

        project_contributors_count = (TaskHistory.query.with_entities(
            TaskHistory.user_id).filter(
                TaskHistory.project_id == project_id,
                TaskHistory.action != "COMMENT").distinct(
                    TaskHistory.user_id).count())

        return project_contributors_count

    def get_aoi_geometry_as_geojson(self):
        """ Helper which returns the AOI geometry as a geojson object """
        aoi_geojson = db.engine.execute(self.geometry.ST_AsGeoJSON()).scalar()
        return geojson.loads(aoi_geojson)

    def get_project_teams(self):
        """ Helper to return teams with members so we can handle permissions """
        project_teams = []
        for t in self.teams:
            project_teams.append({
                "name":
                t.team.name,
                "role":
                t.role,
                "members": [m.member.username for m in t.team.members],
            })

        return project_teams

    @staticmethod
    @cached(active_mappers_cache)
    def get_active_mappers(project_id) -> int:
        """ Get count of Locked tasks as a proxy for users who are currently active on the project """

        return (Task.query.filter(
            Task.task_status.in_((
                TaskStatus.LOCKED_FOR_MAPPING.value,
                TaskStatus.LOCKED_FOR_VALIDATION.value,
            ))).filter(Task.project_id == project_id).distinct(
                Task.locked_by).count())

    def _get_project_and_base_dto(self):
        """ Populates a project DTO with properties common to all roles """
        base_dto = ProjectDTO()
        base_dto.project_id = self.id
        base_dto.project_status = ProjectStatus(self.status).name
        base_dto.default_locale = self.default_locale
        base_dto.project_priority = ProjectPriority(self.priority).name
        base_dto.area_of_interest = self.get_aoi_geometry_as_geojson()
        base_dto.aoi_bbox = shape(base_dto.area_of_interest).bounds
        base_dto.mapping_permission = MappingPermission(
            self.mapping_permission).name
        base_dto.validation_permission = ValidationPermission(
            self.validation_permission).name
        base_dto.enforce_random_task_selection = self.enforce_random_task_selection
        base_dto.private = self.private
        base_dto.mapper_level = MappingLevel(self.mapper_level).name
        base_dto.entities_to_map = self.entities_to_map
        base_dto.changeset_comment = self.changeset_comment
        base_dto.osmcha_filter_id = self.osmcha_filter_id
        base_dto.due_date = self.due_date
        base_dto.imagery = self.imagery
        base_dto.josm_preset = self.josm_preset
        base_dto.id_presets = self.id_presets
        base_dto.country_tag = self.country
        base_dto.organisation_id = self.organisation_id
        base_dto.license_id = self.license_id
        base_dto.created = self.created
        base_dto.last_updated = self.last_updated
        base_dto.author = User().get_by_id(self.author_id).username
        base_dto.active_mappers = Project.get_active_mappers(self.id)
        base_dto.task_creation_mode = TaskCreationMode(
            self.task_creation_mode).name
        base_dto.percent_mapped = Project.calculate_tasks_percent(
            "mapped",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        base_dto.percent_validated = Project.calculate_tasks_percent(
            "validated",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )
        base_dto.percent_bad_imagery = Project.calculate_tasks_percent(
            "bad_imagery",
            self.total_tasks,
            self.tasks_mapped,
            self.tasks_validated,
            self.tasks_bad_imagery,
        )

        base_dto.project_teams = [
            ProjectTeamDTO(
                dict(
                    team_id=t.team.id,
                    team_name=t.team.name,
                    role=TeamRoles(t.role).name,
                )) for t in self.teams
        ]

        if self.custom_editor:
            base_dto.custom_editor = self.custom_editor.as_dto()

        if self.private:
            # If project is private it should have a list of allowed users
            allowed_usernames = []
            for user in self.allowed_users:
                allowed_usernames.append(user.username)
            base_dto.allowed_usernames = allowed_usernames

        if self.mapping_types:
            mapping_types = []
            for mapping_type in self.mapping_types:
                mapping_types.append(MappingTypes(mapping_type).name)

            base_dto.mapping_types = mapping_types

        if self.campaign:
            base_dto.campaigns = [i.as_dto() for i in self.campaign]

        if self.mapping_editors:
            mapping_editors = []
            for mapping_editor in self.mapping_editors:
                mapping_editors.append(Editors(mapping_editor).name)

            base_dto.mapping_editors = mapping_editors

        if self.validation_editors:
            validation_editors = []
            for validation_editor in self.validation_editors:
                validation_editors.append(Editors(validation_editor).name)

            base_dto.validation_editors = validation_editors

        if self.priority_areas:
            geojson_areas = []
            for priority_area in self.priority_areas:
                geojson_areas.append(priority_area.get_as_geojson())

            base_dto.priority_areas = geojson_areas

        base_dto.interests = [
            InterestDTO(dict(id=i.id, name=i.name)) for i in self.interests
        ]

        return self, base_dto

    def as_dto_for_mapping(self,
                           authenticated_user_id: int = None,
                           locale: str = "en",
                           abbrev: bool = True) -> Optional[ProjectDTO]:
        """ Creates a Project DTO suitable for transmitting to mapper users """
        # Check for project visibility settings
        is_allowed_user = True
        if self.private:
            is_allowed_user = False
            if authenticated_user_id:
                user = User().get_by_id(authenticated_user_id)
                if (UserRole(user.role) == UserRole.ADMIN
                        or authenticated_user_id == self.author_id):
                    is_allowed_user = True
                for user in self.allowed_users:
                    if user.id == authenticated_user_id:
                        is_allowed_user = True
                        break

        if is_allowed_user:
            project, project_dto = self._get_project_and_base_dto()
            if abbrev is False:
                project_dto.tasks = Task.get_tasks_as_geojson_feature_collection(
                    self.id, None)
            else:
                project_dto.tasks = Task.get_tasks_as_geojson_feature_collection_no_geom(
                    self.id)
            project_dto.project_info = ProjectInfo.get_dto_for_locale(
                self.id, locale, project.default_locale)
            if project.organisation_id:
                project_dto.organisation = project.organisation.id
                project_dto.organisation_name = project.organisation.name
                project_dto.organisation_logo = project.organisation.logo

            project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales(
                self.id)
            return project_dto

    def tasks_as_geojson(self,
                         task_ids_str: str,
                         order_by=None,
                         order_by_type="ASC",
                         status=None):
        """ Creates a geojson of all areas """
        project_tasks = Task.get_tasks_as_geojson_feature_collection(
            self.id, task_ids_str, order_by, order_by_type, status)

        return project_tasks

    @staticmethod
    def get_all_countries():
        query = (db.session.query(
            func.unnest(Project.country).label("country")).distinct().order_by(
                "country"))
        tags_dto = TagsDTO()
        tags_dto.tags = [r[0] for r in query]
        return tags_dto

    @staticmethod
    def get_all_organisations_tag(preferred_locale="en"):
        query = (db.session.query(
            Project.id, Project.organisation_tag, Project.private,
            Project.status).join(ProjectInfo).filter(
                ProjectInfo.locale.in_(
                    [preferred_locale,
                     "en"])).filter(Project.private is not True).filter(
                         Project.organisation_tag.isnot(None)).filter(
                             Project.organisation_tag != ""))
        query = query.distinct(Project.organisation_tag)
        query = query.order_by(Project.organisation_tag)
        tags_dto = TagsDTO()
        tags_dto.tags = [r[1] for r in query]
        return tags_dto

    @staticmethod
    def get_all_campaign_tag(preferred_locale="en"):
        query = (db.session.query(
            Project.id, Project.campaign_tag, Project.private,
            Project.status).join(ProjectInfo).filter(
                ProjectInfo.locale.in_(
                    [preferred_locale,
                     "en"])).filter(Project.private is not True).filter(
                         Project.campaign_tag.isnot(None)).filter(
                             Project.campaign_tag != ""))
        query = query.distinct(Project.campaign_tag)
        query = query.order_by(Project.campaign_tag)
        tags_dto = TagsDTO()
        tags_dto.tags = [r[1] for r in query]
        return tags_dto

    @staticmethod
    def calculate_tasks_percent(target, total_tasks, tasks_mapped,
                                tasks_validated, tasks_bad_imagery):
        """ Calculates percentages of contributions """
        if target == "mapped":
            return int((tasks_mapped + tasks_validated) /
                       (total_tasks - tasks_bad_imagery) * 100)
        elif target == "validated":
            return int(tasks_validated / (total_tasks - tasks_bad_imagery) *
                       100)
        elif target == "bad_imagery":
            return int((tasks_bad_imagery / total_tasks) * 100)

    def as_dto_for_admin(self, project_id):
        """ Creates a Project DTO suitable for transmitting to project admins """
        project, project_dto = self._get_project_and_base_dto()

        if project is None:
            return None

        project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales(
            project_id)

        return project_dto

    def create_or_update_interests(self, interests_ids):
        self.interests = []
        objs = [Interest.get_by_id(i) for i in interests_ids]
        self.interests.extend(objs)
        db.session.commit()
Esempio n. 19
0
class FarmersMarket(db.Model):
    """ Farmer's markets information. """

    __tablename__ = 'farmersmarkets'


    fm_id = db.Column(db.Integer, primary_key=True, nullable=False)
    name = db.Column(db.String(200), nullable=False)
    website = db.Column(db.String(2083), nullable=True)
    facebook = db.Column(db.String(500), nullable=True)
    twitter = db.Column(db.String(500), nullable=True)
    youtube = db.Column(db.String(500), nullable=True)
    other_media = db.Column(db.String(500), nullable=True)
    street = db.Column(db.String(255), nullable=True)
    city = db.Column(db.String(100), nullable=True)
    county = db.Column(db.String(100), nullable=True)
    state = db.Column(db.String(2), db.ForeignKey('states.state_id'), nullable=False)
    zipcode = db.Column(db.String(15), nullable=True)
    season_1_date =  db.Column(db.String(200), nullable=True)
    season_1_time =  db.Column(db.String(200), nullable=True)
    season_2_date =  db.Column(db.String(200), nullable=True)
    season_2_time =  db.Column(db.String(200), nullable=True)
    season_3_date =  db.Column(db.String(200), nullable=True)
    season_3_time =  db.Column(db.String(200), nullable=True)
    season_4_date =  db.Column(db.String(200), nullable=True)
    season_4_time =  db.Column(db.String(200), nullable=True)
    longitude = db.Column(db.Float, nullable=True)
    latitude = db.Column(db.Float, nullable=True)
    latlng = db.Column(Geometry(geometry_type='POINT'), nullable=True)
    location = db.Column(db.String(100), nullable=True)
    credit = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    wic = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    wic_cash = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    sfmnp = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    snap = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    organic = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    baked_goods = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    cheese = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    crafts = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    flowers = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    eggs = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    seafood = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    herbs = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    vegetables = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    honey = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    jams = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    maple = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    meat = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    nursery = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    nuts = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    plants = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    poultry = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    prepared = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    soap = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    trees = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    wine = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    coffee = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    beans = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    fruits = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    grains = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    juices = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    mushrooms = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    pet_food = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    tofu = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    wild_harvested = db.Column(db.String(10), db.ForeignKey('hasitems.has_item_id'), nullable=False)
    date_updated = db.Column(db.String(50), nullable=False)

    def __repr__(self):
        """ Shows information about the farmer's market. """
        return '<FarmersMarket id=%s name=%s>' % (self.fm_id, self.name)
Esempio n. 20
0
class Mine(AuditMixin, Base):
    __tablename__ = 'mine'
    mine_guid = db.Column(UUID(as_uuid=True),
                          primary_key=True,
                          server_default=FetchedValue())
    mine_no = db.Column(db.String(10))
    mine_name = db.Column(db.String(60), nullable=False)
    mine_note = db.Column(db.String(300), default='')
    legacy_mms_mine_status = db.Column(db.String(50))
    major_mine_ind = db.Column(db.Boolean, nullable=False, default=False)
    deleted_ind = db.Column(db.Boolean,
                            nullable=False,
                            server_default=FetchedValue())
    mine_region = db.Column(db.String(2),
                            db.ForeignKey('mine_region_code.mine_region_code'))
    ohsc_ind = db.Column(db.Boolean,
                         nullable=False,
                         server_default=FetchedValue())
    union_ind = db.Column(db.Boolean,
                          nullable=False,
                          server_default=FetchedValue())
    latitude = db.Column(db.Numeric(9, 7))
    longitude = db.Column(db.Numeric(11, 7))
    geom = db.Column(Geometry('POINT', 3005))
    mine_location_description = db.Column(db.String)
    # Relationships

    #Almost always used and 1:1, so these are joined
    mine_status = db.relationship('MineStatus',
                                  backref='mine',
                                  order_by='desc(MineStatus.update_timestamp)',
                                  lazy='joined')
    mine_tailings_storage_facilities = db.relationship(
        'MineTailingsStorageFacility',
        backref='mine',
        order_by=
        'desc(MineTailingsStorageFacility.mine_tailings_storage_facility_name)',
        lazy='joined')

    #Almost always used, but faster to use selectin to load related data
    mine_permit = db.relationship('Permit',
                                  backref='mine',
                                  order_by='desc(Permit.create_timestamp)',
                                  lazy='selectin')
    mine_type = db.relationship(
        'MineType',
        backref='mine',
        order_by='desc(MineType.update_timestamp)',
        primaryjoin=
        "and_(MineType.mine_guid == Mine.mine_guid, MineType.active_ind==True)",
        lazy='selectin')

    mine_documents = db.relationship(
        'MineDocument',
        backref='mine',
        primaryjoin=
        "and_(MineDocument.mine_guid == Mine.mine_guid, MineDocument.active_ind==True)",
        lazy='select')

    mine_party_appt = db.relationship('MinePartyAppointment',
                                      backref="mine",
                                      lazy='select')
    mine_incidents = db.relationship('MineIncident',
                                     backref="mine",
                                     lazy='select')
    mine_reports = db.relationship('MineReport', backref="mine", lazy='select')

    def __repr__(self):
        return '<Mine %r>' % self.mine_guid

    @reconstructor
    def init_on_load(self):
        if self.latitude and self.longitude:
            try:
                self.utm_values = utm.from_latlon(self.latitude,
                                                  self.longitude)
            except utm.error.OutOfRangeError:
                self.utm_values = ()

    @hybrid_property
    def utm_easting(self):
        return self.utm_values[0] if self.utm_values else None

    @hybrid_property
    def utm_northing(self):
        return self.utm_values[1] if self.utm_values else None

    @hybrid_property
    def utm_zone_number(self):
        return self.utm_values[2] if self.utm_values else None

    @hybrid_property
    def utm_zone_letter(self):
        return self.utm_values[3] if self.utm_values else None

    @hybrid_property
    def mine_location(self):
        return {
            'latitude': self.latitude,
            'longitude': self.longitude,
            'utm_easting': self.utm_easting,
            'utm_northing': self.utm_northing,
            'utm_zone_number': self.utm_zone_number,
            'utm_zone_letter': self.utm_zone_letter,
            'mine_location_description': self.mine_location_description
        }

    @staticmethod
    def active(records):
        return list(filter(lambda x: x.active_ind, records))

    @classmethod
    def find_by_mine_guid(cls, _id):
        try:
            uuid.UUID(_id, version=4)
            return cls.query.filter_by(mine_guid=_id).filter_by(
                deleted_ind=False).first()
        except ValueError:
            return None

    @classmethod
    def find_by_mine_no(cls, _id):
        return cls.query.filter_by(mine_no=_id).filter_by(
            deleted_ind=False).first()

    @classmethod
    def find_by_mine_name(cls, term=None):
        MINE_LIST_RESULT_LIMIT = 50
        if term:
            name_filter = Mine.mine_name.ilike('%{}%'.format(term))
            mines_q = Mine.query.filter(name_filter).filter_by(
                deleted_ind=False)
            mines = mines_q.limit(MINE_LIST_RESULT_LIMIT).all()
        else:
            mines = Mine.query.limit(MINE_LIST_RESULT_LIMIT).all()
        return mines

    @classmethod
    def find_by_name_no_permit(cls, term=None):
        MINE_LIST_RESULT_LIMIT = 50
        if term:
            name_filter = Mine.mine_name.ilike('%{}%'.format(term))
            number_filter = Mine.mine_no.ilike('%{}%'.format(term))
            permit_filter = Permit.permit_no.ilike('%{}%'.format(term))
            mines_q = Mine.query.filter(name_filter | number_filter).filter_by(
                deleted_ind=False)
            permit_q = Mine.query.join(Permit).filter(permit_filter)
            mines = mines_q.union(permit_q).limit(MINE_LIST_RESULT_LIMIT).all()
        else:
            mines = Mine.query.limit(MINE_LIST_RESULT_LIMIT).all()
        return mines

    @classmethod
    def find_all_major_mines(cls):
        return cls.query.filter_by(major_mine_ind=True).filter_by(
            deleted_ind=False).all()

    @classmethod
    def find_by_mine_no_or_guid(cls, _id):
        result = cls.find_by_mine_guid(_id)
        if result is None:
            result = cls.find_by_mine_no(_id)

        return result

    @validates('mine_name')
    def validate_mine_name(self, key, mine_name):
        if not mine_name:
            raise AssertionError('No mine name provided.')
        if len(mine_name) > 60:
            raise AssertionError('Mine name must not exceed 60 characters.')
        return mine_name

    @validates('mine_note')
    def validate_mine_note(self, key, mine_note):
        mine_note = mine_note if mine_note else ''
        if len(mine_note) > 300:
            raise AssertionError('Mine note must not exceed 300 characters.')
        return mine_note

    @validates('mine_no')
    def validate_mine_no(self, key, mine_no):
        mine_no = mine_no if mine_no else ''
        if mine_no and len(mine_no) > 10:
            raise AssertionError('Mine number must not exceed 10 characters.')
        return mine_no

    @validates('mine_region')
    def validate_mine_region(self, key, mine_region):
        if not mine_region:
            raise AssertionError('No mine region code provided.')
        if len(mine_region) > 2:
            raise AssertionError('Invalid region code')
        return mine_region
Esempio n. 21
0
import piecewise.config
from geoalchemy2 import Geometry
from sqlalchemy import MetaData, Table, Column, String, BigInteger, Integer, DateTime, Boolean, Float, create_engine, func, select, text
from sqlalchemy.sql.expression import label
import json
import datetime

aggregator = piecewise.config.read_config(
    json.load(open("/etc/piecewise/config.json")))

engine = create_engine(aggregator.database_uri)
metadata = MetaData()
metadata.bind = engine
extra_data = Table('extra_data', metadata,
                   Column('id', Integer, primary_key=True),
                   Column('timestamp', DateTime, server_default=func.now()),
                   Column('verified', Boolean, server_default=text("True")),
                   Column('bigquery_key', String),
                   Column('bigquery_test_id', String),
                   Column('location', Geometry("Point", srid=4326)),
                   Column('isp', String), Column('connection_type', String),
                   Column('advertised_download', Integer),
                   Column('actual_download', Float),
                   Column('advertised_upload', Integer),
                   Column('actual_upload', Float), Column('min_rtt', Integer),
                   Column('location_type', String), Column('isp_user', String),
                   Column('client_ip', BigInteger),
                   Column('cost_of_service', String))
metadata.drop_all()
metadata.create_all()
Esempio n. 22
0
    def setup_method(self, _):
        # Always see the diff
        # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff
        self.maxDiff = None  # pylint: disable=invalid-name
        self._tables = []

        from geoalchemy2 import Geometry
        from sqlalchemy import Column, Table, func, types
        from sqlalchemy.ext.declarative import declarative_base

        from c2cgeoportal_commons.models import DBSession
        from c2cgeoportal_commons.models.main import (
            OGCSERVER_AUTH_GEOSERVER,
            OGCSERVER_TYPE_GEOSERVER,
            FullTextSearch,
            Functionality,
            Interface,
            LayerGroup,
            LayerWMS,
            OGCServer,
            RestrictionArea,
            Role,
            Theme,
        )
        from c2cgeoportal_commons.models.static import User

        setup_db()

        role1 = Role(name="__test_role1")
        user1 = User(username="******",
                     password="******",
                     settings_role=role1,
                     roles=[role1])
        user1.email = "*****@*****.**"

        role2 = Role(name="__test_role2",
                     extent=WKTElement("POLYGON((1 2, 1 4, 3 4, 3 2, 1 2))",
                                       srid=21781))
        user2 = User(username="******",
                     password="******",
                     settings_role=role2,
                     roles=[role2])

        main = Interface(name="desktop")
        mobile = Interface(name="mobile")

        engine = DBSession.c2c_rw_bind
        engine.connect()

        a_geo_table = Table(
            "a_geo_table",
            declarative_base(bind=engine).metadata,
            Column("id", types.Integer, primary_key=True),
            Column("geom", Geometry("POINT", srid=21781)),
            schema="geodata",
        )

        self._tables = [a_geo_table]
        a_geo_table.drop(checkfirst=True)
        a_geo_table.create()

        ogcserver = create_default_ogcserver()

        private_layer_edit = LayerWMS(name="__test_private_layer_edit",
                                      public=False)
        private_layer_edit.layer = "__test_private_layer"
        private_layer_edit.geo_table = "a_schema.a_geo_table"
        private_layer_edit.interfaces = [main, mobile]
        private_layer_edit.ogc_server = ogcserver

        public_layer2 = LayerWMS(name="__test_public_layer",
                                 layer="__test_public_layer_bis",
                                 public=True)
        public_layer2.interfaces = [main, mobile]
        public_layer2.ogc_server = ogcserver

        private_layer = LayerWMS(name="__test_private_layer",
                                 layer="__test_private_layer_bis",
                                 public=False)
        private_layer.interfaces = [main, mobile]
        private_layer.ogc_server = ogcserver

        interface_not_in_mapfile = Interface(name="interface_not_in_mapfile")
        public_layer_not_in_mapfile = LayerWMS(
            name="__test_public_layer_not_in_mapfile",
            layer="__test_public_layer_not_in_mapfile",
            public=True)
        public_layer_not_in_mapfile.interfaces = [interface_not_in_mapfile]
        public_layer_not_in_mapfile.ogc_server = ogcserver

        interface_notmapfile = Interface(name="interface_notmapfile")
        ogcserver_notmapfile = OGCServer(name="__test_ogc_server_notmapfile")
        ogcserver_notmapfile.url = mapserv_url + "?map=not_a_mapfile"
        public_layer_not_mapfile = LayerWMS(
            name="__test_public_layer_notmapfile",
            layer="__test_public_layer_notmapfile",
            public=True)
        public_layer_not_mapfile.interfaces = [interface_notmapfile]
        public_layer_not_mapfile.ogc_server = ogcserver_notmapfile

        interface_geoserver = Interface(name="interface_geoserver")
        ogcserver_geoserver = OGCServer(name="__test_ogc_server_geoserver")
        ogcserver_geoserver.url = mapserv_url
        ogcserver_geoserver.type = OGCSERVER_TYPE_GEOSERVER
        ogcserver_geoserver.auth = OGCSERVER_AUTH_GEOSERVER
        public_layer_geoserver = LayerWMS(name="__test_public_layer_geoserver",
                                          layer="testpoint_unprotected",
                                          public=True)
        public_layer_geoserver.interfaces = [interface_geoserver]
        public_layer_geoserver.ogc_server = ogcserver_geoserver

        interface_no_layers = Interface(name="interface_no_layers")
        public_layer_no_layers = LayerWMS(name="__test_public_layer_no_layers",
                                          public=True)
        public_layer_no_layers.interfaces = [interface_no_layers]
        public_layer_no_layers.ogc_server = ogcserver

        group = LayerGroup(name="__test_layer_group")
        group.children = [
            private_layer_edit,
            public_layer2,
            public_layer_not_in_mapfile,
            public_layer_not_mapfile,
            public_layer_geoserver,
            public_layer_no_layers,
            private_layer,
        ]
        theme = Theme(name="__test_theme")
        theme.children = [group]
        theme.interfaces = [
            main,
            interface_not_in_mapfile,
            interface_notmapfile,
            interface_geoserver,
            interface_no_layers,
        ]

        functionality1 = Functionality(name="test_name", value="test_value_1")
        functionality2 = Functionality(name="test_name", value="test_value_2")
        theme.functionalities = [functionality1, functionality2]

        poly = "POLYGON((-100 0, -100 20, 100 20, 100 0, -100 0))"

        area = WKTElement(poly, srid=21781)
        RestrictionArea(
            name="__test_ra1",
            description="",
            layers=[private_layer_edit, private_layer],
            roles=[role1],
            area=area,
        )

        area = WKTElement(poly, srid=21781)
        RestrictionArea(
            name="__test_ra2",
            description="",
            layers=[private_layer_edit, private_layer],
            roles=[role2],
            area=area,
            readwrite=True,
        )

        entry1 = FullTextSearch()
        entry1.label = "label1"
        entry1.layer_name = "layer1"
        entry1.ts = func.to_tsvector("french", "soleil travail")
        entry1.the_geom = WKTElement("POINT(-90 -45)", 21781)
        entry1.public = True

        entry2 = FullTextSearch()
        entry2.label = "label1"
        entry2.layer_name = "layer1"
        entry2.ts = func.to_tsvector("french", "soleil travail")
        entry2.the_geom = WKTElement("POINT(-90 -45)", 21781)
        entry2.public = True

        entry3 = FullTextSearch()
        entry3.label = "label1"
        entry3.layer_name = None
        entry3.ts = func.to_tsvector("french", "soleil travail")
        entry3.the_geom = WKTElement("POINT(-90 -45)", 21781)
        entry3.public = True

        DBSession.add_all([user1, user2, theme, entry1, entry2, entry3])
        DBSession.flush()

        self.role1_id = role1.id

        transaction.commit()
Esempio n. 23
0
def geodataframe_to_postgis(
        geodataframe: gpd.GeoDataFrame,
        output_table_name: str,
        uri: str,
        src_epsg: Union[bool, int] = None,
        output_epsg: Union[bool, int] = None,
        debug: bool = False
):
    """
    Write a ``geopandas.GeoDataFrame`` to a PostGIS table in a SQL database.

    Assumes that the geometry column has already been named 'geometry'

    :param geodataframe: geopandas.GeoDataFrame
    :param output_table_name: 'name_of_the_output_table'
    :param src_epsg: if not None, will assign the geodataframe this EPSG in the format of {"init": "epsg:2227"}
    :param output_epsg: if not None, will reproject data from input EPSG to specified EPSG
    :param uri: connection string
    :return: None
    """
    start_time = time.time()

    # Get the geometry type
    # It's possible there are both MULTIPOLYGONS and POLYGONS. This grabs the MULTI variant
    geom_types = list(geodataframe.geometry.geom_type.unique())
    geom_typ = max(geom_types, key=len).upper()

    if debug:
        print(f'## PROCESSING {geom_typ} geodataframe to {output_table_name} in SQL')

    # Manually set the EPSG if the user passes one
    if src_epsg:
        geodataframe.crs = f"epsg:{src_epsg}"
        epsg_code = src_epsg

    # Otherwise, try to get the EPSG value directly from the geodataframe
    else:
        try:
            # Older geodataframes has CRS stored as a dict: {'init': 'epsg:4326'}
            if type(geodataframe.crs) == dict:
                epsg_code = int(geodataframe.crs['init'].split(" ")[0].split(':')[1])
            # Now geopandas has a different approach:
            else:
                epsg_code = int(str(geodataframe.crs).split(':')[1])
        except:
            print('This geodataframe does not have a valid EPSG. Aborting.')
            print(geodataframe.crs)
            return

    # Sanitize the columns before writing to the database
    # Make all column names lower case
    geodataframe.columns = [x.lower() for x in geodataframe.columns]

    # Replace the 'geom' column with 'geometry'
    if 'geom' in geodataframe.columns:
        geodataframe['geometry'] = geodataframe['geom']
        geodataframe.drop('geom', 1, inplace=True)

    # Drop the 'gid' column
    if 'gid' in geodataframe.columns:
        geodataframe.drop('gid', 1, inplace=True)

    # Rename 'uid' to 'old_uid'
    if 'uid' in geodataframe.columns:
        geodataframe['old_uid'] = geodataframe['uid']
        geodataframe.drop('uid', 1, inplace=True)

    # Build a 'geom' column using geoalchemy2 and drop the source 'geometry' column
    geodataframe['geom'] = geodataframe['geometry'].apply(lambda x: WKTElement(x.wkt, srid=epsg_code))
    geodataframe.drop('geometry', 1, inplace=True)

    # write geodataframe to SQL database
    if debug:
        print(f'## -> WRITING TO {uri}')

    engine = sqlalchemy.create_engine(uri)
    geodataframe.to_sql(output_table_name, engine,
                        if_exists='replace', index=True, index_label='gid',
                        dtype={'geom': Geometry(geom_typ, srid=epsg_code)})
    engine.dispose()

    if debug:
        runtime = round((time.time() - start_time), 2)
        print(f'\t FINISHED IN {runtime} seconds')

    log_activity("pGIS.geodataframe_to_postgis",
                 uri=uri,
                 query_text=f"Wrote geopandas.GeoDataFrame to {output_table_name}",
                 debug=debug)

    # If provided an EPSG, alter whatever the native projection was to the output_epsg
    if output_epsg:
        project_spatial_table(output_table_name, geom_typ, epsg_code, output_epsg, uri=uri, debug=debug)

    # Add a unique_id column and do a spatial index
    prep_spatial_table(output_table_name, uri=uri, debug=debug)
Esempio n. 24
0
class Feature(Base):
    __tablename__ = 'feature'
    __template__ = 'tooltips/category.mako'

    id = Column(Integer, primary_key=True)
    name = Column(Unicode(50))
    description = Column(Unicode)
    image = Column(Unicode(255))
    thumbnail = Column(Unicode(255))
    color = Column(Unicode(255))
    stroke = Column(Integer, default=2)
    is_label = Column(Boolean, default=False)
    is_circle = Column(Boolean, default=False)
    linestyle = Column(Integer, default=0)
    show_orientation = Column(Boolean, default=False)
    geometry = Column(Geometry(srid=2169))
    map_id = Column(Unicode, ForeignKey('map.uuid'))
    symbol_id = Column(Integer)
    size = Column(Float, default=10)
    angle = Column(Float, default=0)
    font_size = Column(Integer, default=15)
    opacity = Column(Float, default=0.5)
    shape = Column(Unicode(255))

    def __init__(self, feature=None):
        if feature:
            self.__update__(feature)

    def __update__(self, feature):
        self.name = feature.properties.get('name')
        self.description = feature.properties.get('description')
        self.thumbnail = feature.properties.get('thumbnail')
        self.image = feature.properties.get('image')
        self.color = feature.properties.get('color')
        self.stroke = feature.properties.get('stroke')
        self.is_label = feature.properties.get('isLabel')
        self.is_circle = feature.properties.get('isCircle')
        self.show_orientation = feature.properties.get('showOrientation')
        linestyle = feature.properties.get('linestyle')
        self.linestyle = 0 if linestyle == 'plain' else 1\
            if linestyle == 'dashed' else 2
        self.shape = feature.properties.get('shape')
        size = feature.properties.get('size')
        self.size = size if size is not None and unicode(size).isnumeric()\
            else 10
        angle = feature.properties.get('angle')
        try:
            self.angle = float(angle)
        except TypeError:
            self.angle = 0
        font_size = feature.properties.get('fontSize')
        self.font_size = font_size if font_size is not None and\
            unicode(font_size).isnumeric() else 15
        symbol_id = feature.properties.get('symbolId')
        self.symbol_id = None if symbol_id is not None and\
            len(unicode(symbol_id)) == 0\
            else symbol_id

        opacity = feature.properties.get('opacity')
        self.opacity = opacity if opacity is not None and\
            unicode(opacity).isnumeric() else 0.5

        if hasattr(feature.geometry, "__geo_interface__"):
            ob = feature.geometry.__geo_interface__
        else:
            ob = feature.geometry
        geom_type = ob.get("type").lower()

        if geom_type != 'geometrycollection':
            # openlayers gpx writter creates a 4 dimension geometry and
            # shapely does not allow if for linestring.
            if geom_type == 'linestring':
                feature.geometry.coordinates = \
                    [coordinate[0:2] for coordinate
                        in feature.geometry.coordinates]
            elif geom_type == 'multilinestring':
                multilinestring = feature.geometry.coordinates
                feature.geometry.coordinates = \
                    [[coord[0:2] for coord in multilinestring[i]]
                        for i in range(len(multilinestring))]

            shape = asShape(feature.geometry)

        else:
            geoms = []
            is_transformable = True
            types = None
            for geom in feature.geometry.geometries:
                if hasattr(geom, "__geo_interface__"):
                    ob = geom.__geo_interface__
                else:
                    ob = geom
                geom_type = ob.get("type").lower()

                if types is None:
                    types = geom_type
                else:
                    is_transformable = types == geom_type
                    if not is_transformable:
                        break

                geoms.append(asShape(geom))
            if is_transformable:
                if types == "point":
                    shape = asMultiPoint(geoms)
                elif types == "linestring":
                    shape = asMultiLineString(geoms)
                elif types == "polygon":
                    shape = MultiPolygonAdapter(geoms, context_type='geojson')
            else:
                shape = None
        # ST_FORCE2D is used because the db only allows geometry with
        # 2 dimensions.
        self.geometry = func.ST_Force2D(from_shape(shape, srid=2169))\
            if shape is not None else None

    @property
    def __geo_interface__(self):
        geometry = wkb.loads(str(self.geometry), True)
        properties = dict(
            name=self.name,
            description=self.description,
            thumbnail=self.thumbnail,
            image=self.image,
            color=self.color,
            stroke=self.stroke,
            isLabel=self.is_label,
            isCircle=self.is_circle,
            showOrientation=self.show_orientation,
            linestyle='plain' if self.linestyle == 0 else
            'dashed' if self.linestyle == 1 else 'dotted',
            fid=self.id,
            symbolId=self.symbol_id,
            angle=self.angle if self.angle is not None else 0,
            size=self.size if self.size is not None else 10,
            fontSize=self.font_size if self.font_size is not None else 15,
            opacity=self.opacity if self.opacity is not None else 0.5,
            shape=self.shape)
        return geojson.Feature(id=self.id,
                               geometry=geometry,
                               properties=properties)

    @property
    def geom(self):
        if hasattr(self.geometry, "geom_wkb"):
            return wkb.loads(str(self.geometry.geom_wkb))
        else:
            if hasattr(self, "_shape"):
                return self._shape
            else:
                return None
Esempio n. 25
0
class Task(Base):
    __tablename__ = "task"
    id = Column(Integer, default=task_id_factory)
    x = Column(Integer)
    y = Column(Integer)
    zoom = Column(Integer)
    project_id = Column(Integer, ForeignKey('project.id'), index=True)
    geometry = Column(Geometry('MultiPolygon', srid=4326))
    date = Column(DateTime, default=datetime.datetime.utcnow)
    lock_date = Column(DateTime, default=None)
    extra_properties = Column(Unicode)

    assigned_to_id = Column(Integer, ForeignKey('users.id'))
    assigned_to = relationship(User)
    assigned_date = Column(DateTime)

    difficulty_easy = 1
    difficulty_medium = 2
    difficulty_hard = 3
    difficulty = Column(Integer)

    cur_lock = relationship(
        TaskLock,
        primaryjoin=lambda: and_(
            Task.id == TaskLock.task_id, Task.project_id == TaskLock.
            project_id, TaskLock.date == select([func.max(
                TaskLock.date)]).where(
                    and_(TaskLock.task_id == Task.id, TaskLock.project_id ==
                         Task.project_id)).correlate(Task.__table__)),
        uselist=False)

    cur_state = relationship(
        TaskState,
        primaryjoin=lambda: and_(
            Task.id == TaskState.task_id, Task.project_id == TaskState.
            project_id, TaskState.date == select([func.max(
                TaskState.date)]).where(
                    and_(TaskState.task_id == Task.id, TaskState.project_id ==
                         Task.project_id)).correlate(Task.__table__)),
        uselist=False)

    locks = relationship(TaskLock,
                         order_by="desc(TaskLock.date)",
                         cascade="all, delete, delete-orphan",
                         backref="task")

    states = relationship(TaskState,
                          order_by="desc(TaskState.date)",
                          cascade="all, delete, delete-orphan",
                          backref="task")

    comments = relationship(TaskComment,
                            order_by="desc(TaskComment.date)",
                            cascade="all, delete, delete-orphan",
                            backref="task")

    __table_args__ = (
        PrimaryKeyConstraint('project_id', 'id'),
        Index('task_lock_date_', date.desc()),
        {},
    )

    def __init__(self, x, y, zoom, geometry=None, properties=None):
        self.x = x
        self.y = y
        self.zoom = zoom
        if properties is not None:
            self.extra_properties = _dumps(properties)
        if geometry is None:
            geometry = self.to_polygon()
            multipolygon = MultiPolygon([geometry])
            geometry = ST_Transform(shape.from_shape(multipolygon, 3857), 4326)

        self.geometry = geometry

        self.states.append(TaskState())
        self.locks.append(TaskLock())

    def to_polygon(self):
        # task size (in meters) at the required zoom level
        step = max / (2**(self.zoom - 1))
        tb = TileBuilder(step)
        return tb.create_square(self.x, self.y)

    def to_feature(self):
        properties = {
            'state': self.cur_state.state if self.cur_state else 0,
            'locked': self.lock_date is not None
        }
        if self.difficulty:
            properties['difficulty'] = self.difficulty
        if self.x and self.y and self.zoom:
            properties['x'] = self.x
            properties['y'] = self.y
            properties['zoom'] = self.zoom
        return Feature(geometry=shape.to_shape(self.geometry),
                       id=self.id,
                       properties=properties)

    def get_extra_instructions(self):
        instructions = self.project.per_task_instructions
        properties = {}
        if self.x:
            properties['x'] = str(self.x)
        if self.y:
            properties['y'] = str(self.y)
        if self.zoom:
            properties['z'] = str(self.zoom)
        if self.extra_properties:
            properties.update(_loads(self.extra_properties))
        return instructions.format(**properties)
Esempio n. 26
0
 def add_geometry_column(cls):
     if not hasattr(cls, 'geom'):
         cls.geom = Column(Geometry(geometry_type='POINT',
                                    srid=config.SRID))
Esempio n. 27
0
 def add_geometry_column(cls):
     cls.geom = Column(Geometry(geometry_type='POINT', srid=config.SRID))
    def import_geodataframe(
        self,
        gdf: gpd.GeoDataFrame,
        table_name: str,
        src_epsg: Union[int, bool] = False,
        if_exists: str = "replace",
        schema: str = None,
        uid_col: str = "uid",
    ):
        """
        Import an in-memory ``geopandas.GeoDataFrame`` to the SQL database.

        :param gdf: geodataframe with data you want to save
        :type gdf: gpd.GeoDataFrame
        :param table_name: name of the table that will get created
        :type table_name: str
        :param src_epsg: The source EPSG code can be passed as an integer.
                         By default this function will try to read the EPSG
                         code directly, but some spatial data is funky and
                         requires that you explicitly declare its projection.
                         Defaults to False
        :type src_epsg: Union[int, bool], optional
        :param if_exists: pandas argument to handle overwriting data,
                          defaults to "replace"
        :type if_exists: str, optional
        """
        if not schema:
            schema = self.ACTIVE_SCHEMA

        # Read the geometry type. It's possible there are
        # both MULTIPOLYGONS and POLYGONS. This grabs the MULTI variant

        geom_types = list(gdf.geometry.geom_type.unique())
        geom_typ = max(geom_types, key=len).upper()

        print(f"\t -> SQL tablename: {schema}.{table_name}")
        print(f"\t -> Geometry type: {geom_typ}")
        print(f"\t -> Beginning DB import...")

        start_time = datetime.now()

        # Manually set the EPSG if the user passes one
        if src_epsg:
            gdf.crs = f"epsg:{src_epsg}"
            epsg_code = src_epsg

        # Otherwise, try to get the EPSG value directly from the geodataframe
        else:
            # Older gdfs have CRS stored as a dict: {'init': 'epsg:4326'}
            if type(gdf.crs) == dict:
                epsg_code = int(gdf.crs["init"].split(" ")[0].split(":")[1])
            # Now geopandas has a different approach
            else:
                epsg_code = int(str(gdf.crs).split(":")[1])

        # Sanitize the columns before writing to the database
        # Make all column names lower case
        gdf.columns = [x.lower() for x in gdf.columns]

        # Replace the 'geom' column with 'geometry'
        if "geom" in gdf.columns:
            gdf["geometry"] = gdf["geom"]
            gdf.drop("geom", 1, inplace=True)

        # Drop the 'gid' column
        if "gid" in gdf.columns:
            gdf.drop("gid", 1, inplace=True)

        # Rename 'uid' to 'old_uid'
        if uid_col in gdf.columns:
            gdf[f"old_{uid_col}"] = gdf[uid_col]
            gdf.drop(uid_col, 1, inplace=True)

        # Build a 'geom' column using geoalchemy2
        # and drop the source 'geometry' column
        gdf["geom"] = gdf["geometry"].apply(
            lambda x: WKTElement(x.wkt, srid=epsg_code))
        gdf.drop("geometry", 1, inplace=True)

        # Write geodataframe to SQL database
        self.add_schema(schema)

        engine = sqlalchemy.create_engine(self.uri())
        gdf.to_sql(
            table_name,
            engine,
            if_exists=if_exists,
            # index=True,
            # index_label=uid_col,
            schema=schema,
            dtype={"geom": Geometry(geom_typ, srid=epsg_code)},
        )
        engine.dispose()

        end_time = datetime.now()

        runtime = end_time - start_time
        print(f"\t -> ... import completed in {runtime}")

        self.table_add_uid_column(table_name, schema=schema, uid_col=uid_col)
        self.table_add_spatial_index(table_name, schema=schema)
Esempio n. 29
0
class clientLogIpInfo(CommonColumns):
    __tablename__ = 'clientLogIpInfo'
    cid = Column(Integer, ForeignKey('clientProfile.id'), nullable=False)
    ip = Column(Integer, nullable=False)
    date = Column(Integer, nullable=False)
    coordinates = Column(Geometry('POINT'))  # SQLAlchemy has no point type
Esempio n. 30
0
def _write_postgis(
    gdf,
    name,
    con,
    schema=None,
    if_exists="fail",
    index=False,
    index_label=None,
    chunksize=None,
    dtype=None,
):
    """
    Upload GeoDataFrame into PostGIS database.

    This method requires SQLAlchemy and GeoAlchemy2, and a PostgreSQL
    Python driver (e.g. psycopg2) to be installed.

    Parameters
    ----------
    name : str
        Name of the target table.
    con : sqlalchemy.engine.Engine
        Active connection to the PostGIS database.
    if_exists : {'fail', 'replace', 'append'}, default 'fail'
        How to behave if the table already exists:

        - fail: Raise a ValueError.
        - replace: Drop the table before inserting new values.
        - append: Insert new values to the existing table.
    schema : string, optional
        Specify the schema. If None, use default schema: 'public'.
    index : bool, default True
        Write DataFrame index as a column.
        Uses *index_label* as the column name in the table.
    index_label : string or sequence, default None
        Column label for index column(s).
        If None is given (default) and index is True,
        then the index names are used.
    chunksize : int, optional
        Rows will be written in batches of this size at a time.
        By default, all rows will be written at once.
    dtype : dict of column name to SQL type, default None
        Specifying the datatype for columns.
        The keys should be the column names and the values
        should be the SQLAlchemy types.

    Examples
    --------

    >>> from sqlalchemy import create_engine
    >>> engine = create_engine("postgres://myusername:mypassword@myhost:5432\
/mydatabase";)
    >>> gdf.to_postgis("my_table", engine)
    """
    try:
        from geoalchemy2 import Geometry
    except ImportError:
        raise ImportError("'to_postgis()' requires geoalchemy2 package. ")

    if not compat.SHAPELY_GE_17:
        raise ImportError(
            "'to_postgis()' requires newer version of Shapely "
            "(>= '1.7.0').\nYou can update the library using "
            "'pip install shapely --upgrade' or using "
            "'conda update shapely' if using conda package manager."
        )

    gdf = gdf.copy()
    geom_name = gdf.geometry.name

    # Get srid
    srid = _get_srid_from_crs(gdf)

    # Get geometry type and info whether data contains LinearRing.
    geometry_type, has_curve = _get_geometry_type(gdf)

    # Build dtype with Geometry
    if dtype is not None:
        dtype[geom_name] = Geometry(geometry_type=geometry_type, srid=srid)
    else:
        dtype = {geom_name: Geometry(geometry_type=geometry_type, srid=srid)}

    # Convert LinearRing geometries to LineString
    if has_curve:
        gdf = _convert_linearring_to_linestring(gdf, geom_name)

    # Convert geometries to EWKB
    gdf = _convert_to_ewkb(gdf, geom_name, srid)

    if if_exists == "append":
        # Check that the geometry srid matches with the current GeoDataFrame
        with con.begin() as connection:
            if schema is not None:
                schema_name = schema
            else:
                schema_name = "public"

            # Only check SRID if table exists
            if connection.run_callable(connection.dialect.has_table, name, schema):
                target_srid = connection.execute(
                    "SELECT Find_SRID('{schema}', '{table}', '{geom_col}');".format(
                        schema=schema_name, table=name, geom_col=geom_name
                    )
                ).fetchone()[0]

                if target_srid != srid:
                    msg = (
                        "The CRS of the target table (EPSG:{epsg_t}) differs from the "
                        "CRS of current GeoDataFrame (EPSG:{epsg_src}).".format(
                            epsg_t=target_srid, epsg_src=srid
                        )
                    )
                    raise ValueError(msg)

    with con.begin() as connection:

        gdf.to_sql(
            name,
            connection,
            schema=schema,
            if_exists=if_exists,
            index=index,
            index_label=index_label,
            chunksize=chunksize,
            dtype=dtype,
            method=_psql_insert_copy,
        )

    return