Example #1
0
 def to_sql_statement_create_or_replace(self):
     """Not implemented, postgres policies do not support replace."""
     yield sql_text(
         f"DROP POLICY IF EXISTS {self.signature} on {self.on_entity};")
     yield sql_text(
         f"CREATE POLICY {self.signature} on {self.on_entity} {self.definition};"
     )
Example #2
0
    def to_sql_statement_create_or_replace(self) -> Generator[TextClause, None, None]:
        """Generates a SQL "create or replace view" statement"""
        # Remove possible semicolon from definition because we're adding a "WITH DATA" clause
        definition = self.definition.rstrip().rstrip(";")

        yield sql_text(
            f"""DROP MATERIALIZED VIEW IF EXISTS {self.literal_schema}."{self.signature}"; """
        )
        yield sql_text(
            f"""CREATE MATERIALIZED VIEW {self.literal_schema}."{self.signature}" AS {definition} WITH {"NO" if not self.with_data else ""} DATA"""
        )
Example #3
0
    def from_database(cls, connection, schema="%") -> List[PGFunction]:
        """Get a list of all functions defined in the db"""
        sql = sql_text(f"""
        select
            n.nspname as function_schema,
            p.proname as function_name,
            pg_get_function_arguments(p.oid) as function_arguments,
            case
                when l.lanname = 'internal' then p.prosrc
                else pg_get_functiondef(p.oid)
            end as create_statement,
            t.typname as return_type,
            l.lanname as function_language
        from
            pg_proc p
            left join pg_namespace n on p.pronamespace = n.oid
            left join pg_language l on p.prolang = l.oid
            left join pg_type t on t.oid = p.prorettype
        where
            n.nspname not in ('pg_catalog', 'information_schema')
            and n.nspname like '{schema}';
        """)
        rows = connection.execute(sql).fetchall()
        db_functions = [PGFunction.from_sql(x[3]) for x in rows]

        for func in db_functions:
            assert func is not None

        return db_functions
Example #4
0
def reflect_all_constraint_comments(engine, schema: str) -> CommentMap:
    """Collect a mapping of constraint comments"""

    sql = sql_text(
        """
    select
            c.relnamespace::regnamespace::text schemaname,
            c.relname tablename,
            t.conname constraintname,
            d.description comment_body
    from pg_class c
            join pg_constraint t
                    on c.oid = t.conrelid
            join pg_description d
                    on t.oid = d.objoid
                    and t.tableoid = d.classoid
    where
            c.relnamespace::regnamespace::text = :schema
    """
    )

    results = engine.execute(sql, schema=schema).fetchall()

    comment_map: CommentMap = {}

    for schema_name, table_name, constraint_name, comment in results:
        comment_map[schema_name] = comment_map.get(schema_name, {})
        comment_map[schema_name][table_name] = comment_map[schema_name].get(table_name, {})
        comment_map[schema_name][table_name][constraint_name] = comment

    return comment_map
Example #5
0
    async def notify_processor(self, q, engine):
        async def _write(pld):
            f = open("output.log", "a")
            f.write(f"{datetime.datetime.now().isoformat()} {pld}\n")
            f.close()

        def json_serial(obj):
            """JSON serializer for objects not serializable by default json code"""

            if isinstance(obj, (datetime.datetime, datetime.date)):
                return obj.isoformat()
            raise TypeError(f"Type {type(obj)} not serializable")

        while 1:
            item = await q.get()
            await _write(json.dumps(item, default=json_serial))
            query = sql_text(
                f"UPDATE {TABLE_NAME} SET logged_at=:curr_date WHERE id=:the_id"
            )
            query = query.bindparams(the_id=item.get("id"),
                                     curr_date=datetime.datetime.now())
            async with engine.acquire() as conn:

                await conn.execute(query)

            q.task_done()
    def _iterate(self) -> Iterator[bytes]:
        """
        Execute a query against a postgres DB
        using SQLAlchemy

        See http://docs.sqlalchemy.org/en/latest/_modules/examples/performance/large_resultsets.html
        for SQLAlchemy docs on querying large data sets
        """
        # Execute the query, creating a DB cursor object
        self._db_cursor = self._db \
            .execution_options(stream_results=True) \
            .execute(sql_text(self._query))

        while True:
            # Fetch 1000 records at a time from the DB
            records = self._db_cursor.fetchmany(1000)

            # If there are no more results, we can stop iterating
            if not records:
                yield b""
                break

            # Format results as a CSV
            csv = to_csv(records)
            yield csv.encode('utf8')
Example #7
0
 def to_sql_statement_create(self) -> TextClause:
     """Generates a SQL "create view" statement"""
     with_grant_option = " WITH GRANT OPTION" if self.with_grant_option else ""
     maybe_columns_clause = f'( {", ".join(self.columns)} )' if self.columns else ""
     return sql_text(
         f"GRANT {self.grant} {maybe_columns_clause} ON {self.literal_schema}.{coerce_to_quoted(self.table)} TO {coerce_to_quoted(self.role)} {with_grant_option}"
     )
Example #8
0
def processed_layer_shapes_by_year_id(
        year_id: int, *extra_cols: str) -> geopandas.GeoDataFrame:
    """
    Loads a GIS Processed Layer into a GeoDataFrame

    :param year_id:
    :param extra_cols:
    :return: geo dataframe of the pl pps, with shapexml converted to shapely geometry
    """
    col_names = [name for (name, dtype) in pps_cols()]
    extra_cols = extra_cols or []

    cols = set(list(extra_cols) + col_names)
    cols.add('ShapeXml')

    cols_sql = ','.join(cols)

    sql_ = sql_text(f"""
        SELECT {cols_sql}
        FROM GIS..PremierProcessedShapes pps with (NOLOCK)
        join gis..ProcessedLayer pl with (NOLOCK) on pl.UID = pps.ProcessedLayerUID
        join PlatformManager..YearIDMapping ym on pl.HierarchyItemUID = ym.YearUID
        where ym.ID = :YearId
    """)

    df = pandas.read_sql(sql_, con=gis_db, params={'YearId': year_id})

    shape_xml_polygons = [parse_shape_xml(xml) for xml in df['ShapeXml']]
    df.drop('ShapeXml', axis=1, inplace=True)

    crs_ = {'init': 'epsg:4326'}
    geo_df = geopandas.GeoDataFrame(df, crs=crs_, geometry=shape_xml_polygons)
    return geo_df
Example #9
0
    def to_sql_statement_drop(self) -> str:
        """Generates a SQL "drop function" statement for PGFunction"""
        template = "{function_name}({parameters})"
        result = parse(template, self.signature, case_sensitive=False)
        try:
            function_name = result["function_name"]
            parameters_str = result["parameters"].strip()
        except TypeError:
            # Did not match, NoneType is not scriptable
            result = parse("{function_name}()",
                           self.signature,
                           case_sensitive=False)
            function_name = result["function_name"]
            parameters_str = ""

        # NOTE: Will fail if a text field has a default and that deafult contains a comma...
        parameters: List[str] = parameters_str.split(",")
        parameters = [
            x[:len(x.lower().split("default")[0])] for x in parameters
        ]
        parameters = [x.strip() for x in parameters]
        drop_params = ", ".join(parameters)
        return sql_text(
            f'DROP FUNCTION {self.literal_schema}."{function_name}"({drop_params})'
        )
Example #10
0
    def from_database(cls, sess, schema):
        """Get a list of all triggers defined in the db"""

        sql = sql_text(
            """
        select
            pc.relnamespace::regnamespace::text as table_schema,
            tgname trigger_name,
            pg_get_triggerdef(pgt.oid) definition
        from
            pg_trigger pgt
                inner join pg_class pc
                    on pgt.tgrelid = pc.oid
        where
            not tgisinternal
            and pc.relnamespace::regnamespace::text like :schema
        """
        )
        rows = sess.execute(sql, {"schema": schema}).fetchall()

        db_triggers = [cls.from_sql(x[2]) for x in rows]

        for trig in db_triggers:
            assert trig is not None

        return db_triggers
Example #11
0
    def to_sql_statement_create(self):
        """ Generates a SQL "create function" statement for PGFunction """

        # We need to parse and replace the schema qualifier on the table for simulate_entity to
        # operate
        _def = self.definition
        _template = "{event}{:s}ON{:s}{on_entity}{:s}{action}"
        match = parse(_template, _def)
        if not match:
            raise SQLParseFailure(
                f'Failed to parse SQL into PGTrigger.definition """{_def}"""')

        event = match["event"]
        action = match["action"]

        # Ensure entity is qualified with schema
        on_entity = match["on_entity"]
        if "." in on_entity:
            _, _, on_entity = on_entity.partition(".")
        on_entity = f"{self.schema}.{on_entity}"

        # Re-render the definition ensuring the table is qualified with
        def_rendered = _template.replace("{:s}",
                                         " ").format(event=event,
                                                     on_entity=on_entity,
                                                     action=action)

        return sql_text(
            f"CREATE{' CONSTRAINT ' if self.is_constraint else ' '}TRIGGER {self.signature} {def_rendered}"
        )
Example #12
0
    def generate_coords(self, latitude, longitude, step_size, distance):
        sql = """
            SELECT
                id as "gym_id",
                latitude as "gym_latitude",
                longitude as "gym_longitude",
                (
                6371 * acos (
                  cos ( radians({0}) )
                  * cos( radians( latitude ) )
                  * cos( radians( longitude ) - radians({1}) )
                  + sin ( radians({2}) )
                  * sin( radians( latitude ) )
                ) * 1000
              ) AS "gym_distance"
            FROM gym
            HAVING gym_distance < {3}
            ORDER BY gym_distance

        """.format(latitude, longitude, latitude, distance)

        coords = []

        for gym in self.session.execute(sql_text(sql)):
            lat = gym[1] + random.uniform(-step_size, step_size)
            lng = gym[2] + random.uniform(-step_size, step_size)

            coords.append({'lat': lat, 'lng': lng, 'id': gym[0]})

        return coords
Example #13
0
    def generate_coords(self, latitude, longitude, step_size, distance):
        sql = """
            SELECT
                id as "pokestop_id",
                latitude as "pokestop_latitude",
                longitude as "pokestop_longitude",
                (
                6371 * acos (
                  cos ( radians({0}) )
                  * cos( radians( latitude ) )
                  * cos( radians( longitude ) - radians({1}) )
                  + sin ( radians({2}) )
                  * sin( radians( latitude ) )
                ) * 1000
              ) AS "pokestop_distance"
            FROM pokestop
            HAVING pokestop_distance < {3}
            ORDER BY pokestop_distance

        """.format(latitude, longitude, latitude, distance)

        coords = []

        for pokestop in self.session.execute(sql_text(sql)):
            lat = pokestop[1] + random_lat_long_delta()
            lng = pokestop[2] + random_lat_long_delta()

            coords.append({'lat': lat, 'lng': lng, 'id': pokestop[0]})

        return coords
Example #14
0
    def generate_coords(self, latitude, longitude, step_size, distance):
        sql = """
            SELECT
                id as "pokestop_id",
                latitude as "pokestop_latitude",
                longitude as "pokestop_longitude",
                (
                6371 * acos (
                  cos ( radians({0}) )
                  * cos( radians( latitude ) )
                  * cos( radians( longitude ) - radians({1}) )
                  + sin ( radians({2}) )
                  * sin( radians( latitude ) )
                ) * 1000
              ) AS "pokestop_distance"
            FROM pokestop
            HAVING pokestop_distance < {3}
            ORDER BY pokestop_distance

        """.format(latitude, longitude, latitude, distance)

        coords = []

        for pokestop in self.session.execute(sql_text(sql)):
            lat = pokestop[1] + random_lat_long_delta()
            lng = pokestop[2] + random_lat_long_delta()

            coords.append({'lat': lat, 'lng': lng, 'id': pokestop[0]})

        return coords
Example #15
0
 def run(self) -> None:
     with self.connection('report') as conn:
         query = sql_text(
             'select * from {object}'.format(object=self.report_name))
         result = conn.execute(query)
         cols = result.keys()
         rows = result.fetchall()
         self._csvout().export(cols, rows)
Example #16
0
def get_df(db_url, query):
    engine = create_engine(db_url)
    con = engine.connect()
    try:
        return pd.read_sql_query(sql_text(query), con=engine)
    except exc.SQLAlchemyError as e:
        print(e)
    finally:
        con.close()
Example #17
0
def get_pps_crop(year_id: int) -> str:
    sql = """
SELECT top 1 pps.Crop
FROM GIS..PremierProcessedShapes as pps with (NOLOCK)
  join gis..ProcessedLayer as pl with (NOLOCK ) on pl.UID = pps.ProcessedLayerUID
  join PlatformManager..YearIDMapping ym on ym.YearUID = pl.HierarchyItemUID
where ym.id = :year_id;"""

    return gis_db.execute(sql_text(sql), year_id=year_id).scalar()
Example #18
0
    def to_sql_statement_create(self) -> TextClause:
        """Generates a SQL "create view" statement"""

        # Remove possible semicolon from definition because we're adding a "WITH DATA" clause
        definition = self.definition.rstrip().rstrip(";")

        return sql_text(
            f'CREATE MATERIALIZED VIEW {self.literal_schema}."{self.signature}" AS {definition} WITH {"NO" if not self.with_data else ""} DATA;'
        )
Example #19
0
    def from_database(cls, connection, schema):
        """Get a list of all policies defined in the db"""
        sql = sql_text(
            f"""
        select
            schemaname,
            tablename,
            policyname,
            permissive,
            roles,
            cmd,
            qual,
            with_check
        from
            pg_policies
        where
            schemaname = '{schema}'
        """
        )
        rows = connection.execute(sql).fetchall()

        def get_definition(permissive, roles, cmd, qual, with_check):
            definition = ""
            if permissive is not None:
                definition += f"as {permissive} "
            if cmd is not None:
                definition += f"for {cmd} "
            if roles is not None:
                definition += f"to {', '.join(roles)} "
            if qual is not None:
                if qual[0] != "(":
                    qual = f"({qual})"
                definition += f"using {qual} "
            if with_check is not None:
                if with_check[0] != "(":
                    with_check = f"({with_check})"
                definition += f"with check {with_check} "
            return definition

        db_policies = []
        for schema, table, policy_name, permissive, roles, cmd, qual, with_check in rows:
            definition = get_definition(permissive, roles, cmd, qual, with_check)

            schema = coerce_to_quoted(schema)
            table = coerce_to_quoted(table)
            policy_name = coerce_to_quoted(policy_name)
            policy = PGPolicy.from_sql(
                f"create policy {policy_name} on {schema}.{table} {definition}"
            )
            db_policies.append(policy)

        for policy in db_policies:
            assert policy is not None

        return db_policies
Example #20
0
def exist_df(db_url, table):
    engine = create_engine(db_url)
    con = engine.connect()
    try:
        sql1 = "select * from to_regclass('{table}')".format(table=table)
        table_exist = con.execute(sql_text(sql1)).fetchall()
        return table_exist[0][0]
    except exc.SQLAlchemyError as e:
        print(e)
    finally:
        con.close()
Example #21
0
class Book(Base):
    __tablename__ = "book"

    id = Column(Integer, primary_key=True, comment="@exclude create, update")
    title = Column(Text, nullable=False)
    author_id = Column(Integer, ForeignKey("author.id"), nullable=False)
    created_at = Column(
        DateTime,
        nullable=False,
        default=sql_text("now()"),
        comment="@exclude create, update",
    )
Example #22
0
    def to_sql_statement_create_or_replace(self) -> TextClause:
        return sql_text(f"""
        do $$
            begin
                {self.to_sql_statement_drop()};

            exception when others then
                {self.to_sql_statement_create()};
                {self.to_sql_statement_drop()};
            end;
        $$ language 'plpgsql'
        """)
Example #23
0
File: app.py Project: usri99/nebulo
class Author(Base):
    __tablename__ = "author"

    id = Column(Integer, primary_key=True, comment="@exclude create, update")
    name = Column(Text, nullable=False)
    created_at = Column(
        DateTime,
        nullable=False,
        server_default=sql_text("now()"),
        comment="@exclude create, update",
    )

    books = relationship("Book", uselist=True)
Example #24
0
 def apply_filters(self, q):
     for col, val in self.data.get('filters', {}).items():
         q = q.where(self.get_column(col) == val)
     for col, val in self.data.get('filters_not', {}).items():
         q = q.where(self.get_column(col) != val)
     # not sure this is a great idea:
     if self.data.get('where'):
         q = q.where(sql_text(self.data.get('where')))
     for join in self.data.get('joins', []):
         left = self.get_column(join.get('left'))
         right = self.get_column(join.get('right'))
         q = q.where(left == right)
     return q
Example #25
0
def reflect_views(engine, schema, declarative_base) -> List[TableProtocol]:
    """Reflect SQLAlchemy ORM Tables from the database"""

    sql = sql_text(
        """
    select
	    relname view_name, description view_comment
    from
	    pg_views v
	    left join pg_description c
		    on true
	    left join pg_class on c.objoid = pg_class.oid
	    left join pg_namespace on pg_class.relnamespace = pg_namespace.oid
    where
	    v.viewname = relname
	    and nspname= :schema
    """
    )
    rows = engine.execute(sql, schema=schema).fetchall()

    views: List[TableProtocol] = []

    for view_name, view_comment in rows:
        primary_key_constraint = reflect_virtual_primary_key_constraint(view_comment)
        foreign_key_constraints = reflect_virtual_foreign_key_constraints(view_comment)

        # Reflect view as base table
        view_tab = Table(
            view_name,
            declarative_base.metadata,
            schema=schema,
            autoload=True,
            autoload_with=engine,
            *[primary_key_constraint],
            *foreign_key_constraints,
        )

        class_name = rename_table(declarative_base, view_name, view_tab)

        # ORM View Table
        view_orm = type(
            class_name,
            (
                declarative_base,
                ViewMixin,
            ),
            {"__table__": view_tab},
        )
        views.append(view_orm)  # type: ignore

    return views
class Note(db.Model):

    __tablename__ = "notes"
    id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=sql_text("uuid_generate_v4()"), nullable=False)
    title = db.Column(db.Text, nullable = False)
    text = db.Column(db.Text, nullable = False)
    date = db.Column(db.Text, nullable = False)
    userId = db.Column(UUID(as_uuid=True), db.ForeignKey("users.id"), nullable=False)
    createdAt = db.Column(db.DateTime, nullable=False)
    updatedAt = db.Column(db.DateTime, nullable=False)


    def __repr__(self):
        return f"noteid:{self.id}\ntitele:{self.title}\ntext:{self.text}\ndate:{self.date}\ncreatedAt:{self.createdAt}\nupdatedAt:{self.updatedAt}\nuserId:{self.userId}"
Example #27
0
def weather_by_year_id(year_id: int) -> pandas.Series:
    sql = """
SELECT TOP 1 w.*
FROM gis..PCS_Weather w
  join PlatformManager..YearIDMapping ym on ym.YearUID = w.YearUID
WHERE ym.ID = :year_id"""

    exclude_cols = ['FieldID', 'YearUID']
    df = pandas.read_sql(sql_text(sql),
                         con=gis_db,
                         params={'year_id': year_id})
    df.drop(exclude_cols, axis=1, inplace=True)

    return df.iloc[0]
Example #28
0
    def complete(self) -> bool:
        '''Each script's last query tells whether it is complete.

        It should be a scalar query that returns non-zero for done
        and either zero or an error for not done.
        '''
        last_query = self.last_query()
        params = params_used(self.complete_params(), last_query)
        with self.connection(event=self.task_family + ' complete query: ' + self.script.name) as conn:
            try:
                result = conn.scalar(sql_text(last_query), params)
                return bool(result)
            except DatabaseError as exc:
                conn.log.warning('%(event)s: %(exc)s',
                                 dict(event='complete query error', exc=exc))
                return False
Example #29
0
 def post(self):
     """
     Add a book to the db
     Expect a JSON payload with the following format
     {
         "title": "The title of the book",
         "author": "The author",
     }
     """
     data = request.get_json()
     query = "INSERT INTO `book` (`title`, `author`) VALUES (:title, :author)"
     try:
         self.db.connection.execute(sql_text(query), data)
         return True
     except:
         return False
Example #30
0
def get_titles_from_tconst_list(tconst_list: list) -> list:
    """

    :param tconst_list: list of tconst ids, normally generated via get_recommendation_from_tconst
    :return: list of tuples, where first field is the tconst and second field is the title from the title_basics table
    """
    dbhandler = DbHandler()
    dbhandler.connect()
    all_titles = [
        row for row in dbhandler.conn.execute(
            sql_text(f"SELECT tconst, primaryTitle FROM "
                     f"title_basics NATURAL JOIN title_soup"))
    ]
    all_titles_df = pd.DataFrame(data=all_titles,
                                 columns=['tconst', 'primaryTitle'])
    return [(i, all_titles_df.loc[all_titles_df['tconst'] == i]
             ['primaryTitle'].values[0]) for i in tconst_list]
Example #31
0
 def post(self):
     """
     Add a quote to the db 
     Expect a JSON payload with the following format
     {
         "quote": "The quote",
         "quote_by": "The person who said the quote",
         "added_by": The person who is posting the quote"
     }
     """
     data = request.get_json()
     query = "INSERT INTO `quote` (`quote`, `quote_by`, `added_by`) VALUES (:quote, :quote_by, :added_by)"
     try:
         self.db.connection.execute(sql_text(query), data)
         return True
     except:
         return False
Example #32
0
 def from_database(cls, sess, schema):
     """Get a list of all extensions defined in the db"""
     sql = sql_text(f"""
     select
         np.nspname schema_name,
         ext.extname extension_name
     from
         pg_extension ext
         join pg_namespace np
             on ext.extnamespace = np.oid
     where
         np.nspname not in ('pg_catalog')
         and np.nspname like :schema;
     """)
     rows = sess.execute(sql, {"schema": schema}).fetchall()
     db_exts = [cls(x[0], x[1]) for x in rows]
     return db_exts
Example #33
0
 def _database_execute(self, sql):
     return self.session_mysql.execute(sql_text(sql))