Example #1
0
class PostgresEngineSpec(PostgresBaseEngineSpec):
    engine = "postgresql"
    engine_aliases = ("postgres", )
    max_column_name_length = 63
    try_remove_schema_from_table_name = False

    column_type_mappings = (
        (
            re.compile(r"^double precision", re.IGNORECASE),
            DOUBLE_PRECISION(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^array.*", re.IGNORECASE),
            lambda match: ARRAY(int(match[2])) if match[2] else String(),
            utils.GenericDataType.STRING,
        ),
        (
            re.compile(r"^json.*", re.IGNORECASE),
            JSON(),
            utils.GenericDataType.STRING,
        ),
        (
            re.compile(r"^enum.*", re.IGNORECASE),
            ENUM(),
            utils.GenericDataType.STRING,
        ),
    )

    @classmethod
    def get_allow_cost_estimate(cls, extra: Dict[str, Any]) -> bool:
        return True

    @classmethod
    def estimate_statement_cost(cls, statement: str,
                                cursor: Any) -> Dict[str, Any]:
        sql = f"EXPLAIN {statement}"
        cursor.execute(sql)

        result = cursor.fetchone()[0]
        match = re.search(r"cost=([\d\.]+)\.\.([\d\.]+)", result)
        if match:
            return {
                "Start-up cost": float(match.group(1)),
                "Total cost": float(match.group(2)),
            }

        return {}

    @classmethod
    def query_cost_formatter(
            cls, raw_cost: List[Dict[str, Any]]) -> List[Dict[str, str]]:
        return [{k: str(v) for k, v in row.items()} for row in raw_cost]

    @classmethod
    def get_table_names(cls, database: "Database", inspector: PGInspector,
                        schema: Optional[str]) -> List[str]:
        """Need to consider foreign tables for PostgreSQL"""
        tables = inspector.get_table_names(schema)
        tables.extend(inspector.get_foreign_table_names(schema))
        return sorted(tables)

    @classmethod
    def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
        tt = target_type.upper()
        if tt == utils.TemporalType.DATE:
            return f"TO_DATE('{dttm.date().isoformat()}', 'YYYY-MM-DD')"
        if "TIMESTAMP" in tt or "DATETIME" in tt:
            dttm_formatted = dttm.isoformat(sep=" ", timespec="microseconds")
            return f"""TO_TIMESTAMP('{dttm_formatted}', 'YYYY-MM-DD HH24:MI:SS.US')"""
        return None

    @staticmethod
    def get_extra_params(database: "Database") -> Dict[str, Any]:
        """
        For Postgres, the path to a SSL certificate is placed in `connect_args`.

        :param database: database instance from which to extract extras
        :raises CertificateException: If certificate is not valid/unparseable
        :raises SupersetException: If database extra json payload is unparseable
        """
        try:
            extra = json.loads(database.extra or "{}")
        except json.JSONDecodeError:
            raise SupersetException("Unable to parse database extras")

        if database.server_cert:
            engine_params = extra.get("engine_params", {})
            connect_args = engine_params.get("connect_args", {})
            connect_args["sslmode"] = connect_args.get("sslmode",
                                                       "verify-full")
            path = utils.create_ssl_cert_file(database.server_cert)
            connect_args["sslrootcert"] = path
            engine_params["connect_args"] = connect_args
            extra["engine_params"] = engine_params
        return extra

    @classmethod
    def get_column_spec(  # type: ignore
        cls,
        native_type: Optional[str],
        source: utils.ColumnTypeSource = utils.ColumnTypeSource.GET_TABLE,
        column_type_mappings: Tuple[Tuple[Pattern[str],
                                          Union[TypeEngine,
                                                Callable[[Match[str]],
                                                         TypeEngine]],
                                          GenericDataType, ],
                                    ..., ] = column_type_mappings,
    ) -> Union[ColumnSpec, None]:

        column_spec = super().get_column_spec(native_type)
        if column_spec:
            return column_spec

        return super().get_column_spec(
            native_type, column_type_mappings=column_type_mappings)
Example #2
0
        def process_result_value(self, value, dialect):
            if value is None:
                return None
            return int(value)

        @property
        def python_type(self):
            return int

    TINY_TYPE = SmallInteger  # -32768 to 32767
    MEDIUM_TYPE = Integer  # -2147483648 to 2147483647
    UNSIGNED_HUGE_TYPE = NumInt(precision=20, scale=0)  # up to 20 digits
    HUGE_TYPE = BigInteger
    PRIMARY_HUGE_TYPE = HUGE_TYPE
    FLOAT_TYPE = DOUBLE_PRECISION(asdecimal=False)
    LONG_TEXT = TEXT
else:

    class TextInt(TypeDecorator):
        '''Modify Text type for integers'''
        impl = Text

        def process_bind_param(self, value, dialect):
            return str(value)

        def process_result_value(self, value, dialect):
            return int(value)

    TINY_TYPE = SmallInteger
    MEDIUM_TYPE = Integer
Example #3
0
class BusinessOwner(User):
    __tablename__ = 'business_owners'
    id = db.Column(db.Integer, db.ForeignKey('users.id'), primary_key=True)
    phone_number_workspace = db.Column(db.String(11), nullable=True)
    # order_items = db.relationship("OrderItem")
    address = db.Column(db.String(100), nullable=True)
    workspace_name = db.Column(db.String(20), nullable=True)
    lat = db.Column(DOUBLE_PRECISION(), nullable=True)
    lng = db.Column(DOUBLE_PRECISION(), nullable=True)
    geom = db.Column(Geometry('POINT', srid=Keys.SRID_VALUE), nullable=True)
    flags = db.Column(db.JSON, default={Keys.ACTIVATION_STATUS: True})
    uuid = db.Column(db.String(300),
                     nullable=True,
                     default='75106e9e-a9eb-11e8-a66e-34f39aa7f24b')

    __mapper_args__ = {
        'polymorphic_identity': 'BusinessOwners',
    }

    def __repr__(self):
        return '<BusinessOwners %r>' % self.id

    @staticmethod
    def find(limit=1, **kwargs):
        res = None
        try:
            res = True, BusinessOwner.query.filter_by(
                **kwargs).limit(limit).all()
        except:
            res = db_error_message(logger)
        return res

    @staticmethod
    def update_lng_lat(id, lng, lat, db_connection):
        geom = 'SRID=' + str(
            Keys.SRID_VALUE) + ';POINT(' + str(lng) + " " + str(lat) + ')'
        data = {Keys.LAT: lat, Keys.LNG: lng, Keys.GEOM: geom}
        return BusinessOwner.update_by_id(id, db_connection, data)

    @staticmethod
    def update_by_id(id, db_connection, data):
        result = None
        try:
            BusinessOwner.query.filter_by(id=id).update(data)
            db_connection.session.commit()
            logger.info('update user. ID: %s' % id)
            params = {"user_id": id}
            success = SuccessUpdate(status=200,
                                    message=MessagesKeys.SUCCESS_UPDATE,
                                    params=params)
            # raise Exception()
            result = True, success

        except InvalidRequestError as error:
            result = db_error_message(logger)
            db_connection.session.rollback()

        except:
            result = db_error_message(logger)
            db_connection.session.rollback()
        return result
Example #4
0
class PostgresEngineSpec(PostgresBaseEngineSpec, BasicParametersMixin):
    engine = "postgresql"
    engine_aliases = {"postgres"}

    default_driver = "psycopg2"
    sqlalchemy_uri_placeholder = (
        "postgresql://*****:*****@host:port/dbname[?key=value&key=value...]")
    # https://www.postgresql.org/docs/9.1/libpq-ssl.html#LIBQ-SSL-CERTIFICATES
    encryption_parameters = {"sslmode": "require"}

    max_column_name_length = 63
    try_remove_schema_from_table_name = False

    column_type_mappings = (
        (
            re.compile(r"^double precision", re.IGNORECASE),
            DOUBLE_PRECISION(),
            GenericDataType.NUMERIC,
        ),
        (
            re.compile(r"^array.*", re.IGNORECASE),
            lambda match: ARRAY(int(match[2])) if match[2] else String(),
            utils.GenericDataType.STRING,
        ),
        (
            re.compile(r"^json.*", re.IGNORECASE),
            JSON(),
            utils.GenericDataType.STRING,
        ),
        (
            re.compile(r"^enum.*", re.IGNORECASE),
            ENUM(),
            utils.GenericDataType.STRING,
        ),
    )

    @classmethod
    def get_allow_cost_estimate(cls, extra: Dict[str, Any]) -> bool:
        return True

    @classmethod
    def estimate_statement_cost(cls, statement: str,
                                cursor: Any) -> Dict[str, Any]:
        sql = f"EXPLAIN {statement}"
        cursor.execute(sql)

        result = cursor.fetchone()[0]
        match = re.search(r"cost=([\d\.]+)\.\.([\d\.]+)", result)
        if match:
            return {
                "Start-up cost": float(match.group(1)),
                "Total cost": float(match.group(2)),
            }

        return {}

    @classmethod
    def query_cost_formatter(
            cls, raw_cost: List[Dict[str, Any]]) -> List[Dict[str, str]]:
        return [{k: str(v) for k, v in row.items()} for row in raw_cost]

    @classmethod
    def get_table_names(cls, database: "Database", inspector: PGInspector,
                        schema: Optional[str]) -> List[str]:
        """Need to consider foreign tables for PostgreSQL"""
        tables = inspector.get_table_names(schema)
        tables.extend(inspector.get_foreign_table_names(schema))
        return sorted(tables)

    @classmethod
    def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
        tt = target_type.upper()
        if tt == utils.TemporalType.DATE:
            return f"TO_DATE('{dttm.date().isoformat()}', 'YYYY-MM-DD')"
        if "TIMESTAMP" in tt or "DATETIME" in tt:
            dttm_formatted = dttm.isoformat(sep=" ", timespec="microseconds")
            return f"""TO_TIMESTAMP('{dttm_formatted}', 'YYYY-MM-DD HH24:MI:SS.US')"""
        return None

    @staticmethod
    def get_extra_params(database: "Database") -> Dict[str, Any]:
        """
        For Postgres, the path to a SSL certificate is placed in `connect_args`.

        :param database: database instance from which to extract extras
        :raises CertificateException: If certificate is not valid/unparseable
        :raises SupersetException: If database extra json payload is unparseable
        """
        try:
            extra = json.loads(database.extra or "{}")
        except json.JSONDecodeError as ex:
            raise SupersetException("Unable to parse database extras") from ex

        if database.server_cert:
            engine_params = extra.get("engine_params", {})
            connect_args = engine_params.get("connect_args", {})
            connect_args["sslmode"] = connect_args.get("sslmode",
                                                       "verify-full")
            path = utils.create_ssl_cert_file(database.server_cert)
            connect_args["sslrootcert"] = path
            engine_params["connect_args"] = connect_args
            extra["engine_params"] = engine_params
        return extra

    @classmethod
    def get_column_spec(
        cls,
        native_type: Optional[str],
        source: utils.ColumnTypeSource = utils.ColumnTypeSource.GET_TABLE,
        column_type_mappings: Tuple[Tuple[Pattern[str],
                                          Union[TypeEngine,
                                                Callable[[Match[str]],
                                                         TypeEngine]],
                                          GenericDataType, ],
                                    ..., ] = column_type_mappings,
    ) -> Union[ColumnSpec, None]:

        column_spec = super().get_column_spec(native_type)
        if column_spec:
            return column_spec

        return super().get_column_spec(
            native_type, column_type_mappings=column_type_mappings)

    @classmethod
    def get_cancel_query_id(cls, cursor: Any, query: Query) -> Optional[str]:
        """
        Get Postgres PID that will be used to cancel all other running
        queries in the same session.

        :param cursor: Cursor instance in which the query will be executed
        :param query: Query instance
        :return: Postgres PID
        """
        cursor.execute("SELECT pg_backend_pid()")
        row = cursor.fetchone()
        return row[0]

    @classmethod
    def cancel_query(cls, cursor: Any, query: Query,
                     cancel_query_id: str) -> bool:
        """
        Cancel query in the underlying database.

        :param cursor: New cursor instance to the db of the query
        :param query: Query instance
        :param cancel_query_id: Postgres PID
        :return: True if query cancelled successfully, False otherwise
        """
        try:
            cursor.execute("SELECT pg_terminate_backend(pid) "
                           "FROM pg_stat_activity "
                           f"WHERE pid='{cancel_query_id}'")
        except Exception:  # pylint: disable=broad-except
            return False

        return True
from sqlalchemy import *
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION

meta = MetaData()

geocode = Table(
    'geocode',
    meta,
    Column('query', String(255), primary_key=True),
    Column('lat', DOUBLE_PRECISION()),
    Column('lng', DOUBLE_PRECISION()),
    Column('source', String(30)),
    Column('json', String(1024)),
)


def upgrade(migrate_engine):
    meta.bind = migrate_engine
    geocode.create()


def downgrade(migrate_engine):
    meta.bind = migrate_engine
    geocode.drop()
Example #6
0
class APSchedulerJob(db.Model):
    __tablename__ = 'apscheduler_jobs'
    id = Column(Unicode(191), autoincrement=False, nullable=False, primary_key=True)
    next_run_time = Column(DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True, index=True, unique=False)
    job_state = Column(BYTEA, autoincrement=False, nullable=False)
Example #7
0
                           Column('f_table_catalog', String(256)),
                           Column('f_table_schema', String),
                           Column('f_table_name', String),
                           Column('f_geometry_column', String),
                           Column('coord_dimension', Integer),
                           Column('srid', Integer), Column('type', String(30)))

t_raster_columns = Table(
    'raster_columns', metadata, Column('r_table_catalog', String),
    Column('r_table_schema', String), Column('r_table_name', String),
    Column('r_raster_column', String), Column('srid', Integer),
    Column('scale_x', Float(53)), Column('scale_y', Float(53)),
    Column('blocksize_x', Integer), Column('blocksize_y', Integer),
    Column('same_alignment', Boolean), Column('regular_blocking', Boolean),
    Column('num_bands', Integer), Column('pixel_types', ARRAY(TEXT())),
    Column('nodata_values', ARRAY(DOUBLE_PRECISION(precision=53))),
    Column('out_db', Boolean), Column('extent', Geometry),
    Column('spatial_index', Boolean))

t_raster_overviews = Table('raster_overviews', metadata,
                           Column('o_table_catalog', String),
                           Column('o_table_schema', String),
                           Column('o_table_name', String),
                           Column('o_raster_column', String),
                           Column('r_table_catalog', String),
                           Column('r_table_schema', String),
                           Column('r_table_name', String),
                           Column('r_raster_column', String),
                           Column('overview_factor', Integer))