예제 #1
0
 def check_eq(self, col1, col2, constraint={}):
     return self._run_query(
         SQL("{0} != {1}").format(Identifier(col1), Identifier(col2)),
         constraint)
예제 #2
0
    def write(self, message):
        """Database writer.

        Persists a message to the postgresql database.
        The message value is expected to be a JSON with the following keys:
            - "url"
            - "ts"
            - "status"
            - "latency"
            - "regex_match" (optional)

        Args:
            message (confluent_kafka.Message): The message as received from the Kafka topic.

        """
        if message.error() is None:
            self.logger.info("Writing message to database")
            payload = json.loads(message.value())

            with self.db_conn.cursor(cursor_factory=DictCursor) as cursor:
                cursor.execute(
                    SQL(  # See https://stackoverflow.com/a/6722460
                        """
                    WITH insert_row AS (
                        INSERT INTO {} (url)
                        SELECT %(url)s WHERE NOT EXISTS (
                            SELECT * FROM urls WHERE url = %(url)s
                        )
                        RETURNING *
                    )
                    SELECT * FROM insert_row
                    UNION
                    SELECT * FROM urls WHERE url = %(url)s
                    """).format(Identifier("urls")),
                    {"url": payload["url"]},
                )

                url_id = cursor.fetchone()["id"]

                cursor.execute(
                    SQL("INSERT INTO {} VALUES (%(url_id)s, %(ts)s, %(status)s, %(latency)s)"
                        ).format(Identifier("status")),
                    {
                        "url_id": url_id,
                        "ts": payload["check_time"],
                        "status": payload["status"],
                        "latency": payload["latency"],
                    },
                )

                if payload.get("regex_match"):
                    cursor.execute(
                        SQL("INSERT INTO {} VALUES (%(url_id)s, %(ts)s, %(match)s)"
                            ).format(Identifier("regex")),
                        {
                            "url_id": url_id,
                            "ts": payload["check_time"],
                            "match": payload["regex_match"],
                        },
                    )

        else:
            self.logger.warn("Received an error message. Ignoring.")
예제 #3
0
 def to_sql(self):
     expr_sql, expr_args = self.expression.to_sql()
     return (SQL('({})').format(expr_sql), expr_args)
def get_weather_data(startdate,
                     enddate,
                     cat_cols,
                     db_table_name="shop_dates_weather"):
    # start DB instance
    start_instance()

    # get data out of the Postgres database
    cast_dict = {
        "shop_id": "uint8",
        "sdw_elevation": "int16",
        "sdw_distance": "float32",
        "sdw_prcp": "uint16",
        "sdw_tmax": "float32",
        "sdw_tmin": "float32",
        "sdw_only_neg_temp_ind": "uint8",
        "sdw_only_pos_temp_ind": "uint8",
        "sdw_tmin_diff_lag": "float32",
        "sdw_tmax_diff_lag": "float32",
        "sdw_tmin_diff_lead": "float32",
        "sdw_tmax_diff_lead": "float32",
        "sdw_prcp_diff_lag": "int16",
        "sdw_prcp_diff_lead": "int16",
        "sdw_days_w_prcp_last_7d": "uint8",
        "sdw_total_prcp_last_7d": "int16",
        "sdw_tmax_diff_lag7": "float32",
        "sdw_tmax_diff_lag14": "float32",
        "sdw_tmax_diff_lag21": "float32",
    }
    # df = df_from_sql_table(db_table_name, cast_dict, date_list=["sale_date"])

    sql_str = ("SELECT "
               "{0} "
               "FROM {1} "
               "WHERE {2} >= %(start_dt)s AND {2} <= %(end_dt)s;")
    sql = SQL(sql_str).format(
        SQL(", ").join([
            Identifier(col) for col in list(cast_dict.keys()) + ['sale_date']
        ]),
        Identifier(db_table_name),
        Identifier('sale_date'),
    )
    params = {"start_dt": startdate, "end_dt": enddate}
    df = df_from_sql_query(sql,
                           cast_dict,
                           params=params,
                           date_list=['sale_date'])

    # subset to relevant columns
    # df = df.filter(items=list(cast_dict.keys()) + ["sale_date"])

    # rename binary columns to format consistent with other binary cols
    bin_cols = ("sdw_only_neg_temp_ind", "sdw_only_pos_temp_ind")
    last_bin_col_num = max([int(x.split("_")[1]) for x in cat_cols])
    rename_dict = {
        col: f"cat_{i}_{col}"
        for i, col in enumerate(bin_cols, last_bin_col_num + 1)
    }
    df.rename(rename_dict, axis=1, inplace=True)

    return df
예제 #5
0
 def get_as_sql(self, expression_sql):
     return SQL('{} AS {}').format(expression_sql,
                                   Identifier(self.alias)), ()
예제 #6
0
 def check_fricke_eigenval(self):
     """
     if present, check that fricke_eigenval is product of atkin_lehner_eigenvals
     """
     # TIME about 3s
     return self._run_query(SQL('fricke_eigenval != prod2(atkin_lehner_eigenvals)'), {'fricke_eigenval':{'$exists':True}})
예제 #7
0
 def check_hecke_ring_index_factorization(self):
     """
     if present, verify that hecke_ring_index_factorization matches hecke_ring_index
     """
     # TIME about 2s
     return self._run_query(SQL('hecke_ring_index != prod_factorization(hecke_ring_index_factorization)'), {'hecke_ring_index_factorization':{'$exists':True}})
    def loadTransferTimes(self):
        if self.parent_stops:

            sql_exists = """SELECT EXISTS (
                               SELECT 1
                               FROM   information_schema.tables 
                               WHERE  table_name = %s
                           );
            """
            #sql_exists = SQL(sql_exists).format(Identifier("transfers_"+str(self.region)))

            try:
                cursor = self.conn.getCursor()

                cursor.execute(sql_exists, ('transfers_' + str(self.region), ))
                (exists, ) = cursor.fetchone()
                if not exists:
                    print("Table does not exist!")
                    return

                print("Loading transfer times...")
                sql = """SELECT from_stop_id, to_stop_id, transfer_type, min_transfer_time, from_route_id, to_route_id, from_trip_id
                FROM {};
                """
                sql = SQL(sql).format(
                    Identifier("transfers_" + str(self.region)))
                cursor.execute(sql)

                row = cursor.fetchone()

                while row is not None:
                    (from_stop_id, to_stop_id, transfer_type,
                     min_transfer_time, from_route_id, to_route_id,
                     from_trip_id) = row
                    if not min_transfer_time:
                        min_transfer_time = MultimodalNetwork.MIN_TRANSFER_TIME
                    if from_stop_id != to_stop_id and not from_trip_id:
                        if not from_route_id:
                            if from_stop_id in self.stop_transfers:
                                self.stop_transfers[from_stop_id][
                                    to_stop_id] = min_transfer_time
                            else:
                                self.stop_transfers[from_stop_id] = {
                                    to_stop_id: min_transfer_time
                                }
                        else:
                            if from_route_id in self.route_transfers:
                                if to_route_id in self.route_transfers[
                                        from_route_id]:
                                    if from_stop_id in self.route_transfers[
                                            from_route_id][to_route_id]:
                                        self.route_transfers[from_route_id][
                                            to_route_id][from_stop_id][
                                                to_stop_id] = min_transfer_time
                                    else:
                                        self.route_transfers[from_route_id][
                                            to_route_id][from_stop_id] = {
                                                to_stop_id: min_transfer_time
                                            }
                                else:
                                    self.route_transfers[from_route_id][
                                        to_route_id] = {
                                            from_stop_id: {
                                                to_stop_id: min_transfer_time
                                            }
                                        }
                            else:
                                self.route_transfers[from_route_id] = {
                                    to_route_id: {
                                        from_stop_id: {
                                            to_stop_id: min_transfer_time
                                        }
                                    }
                                }
                    row = cursor.fetchone()

                cursor.close()
            except IOError as e:
                print("I/O error({0}): {1}".format(e.errno, e.strerror))
            except (Exception, psycopg2.DatabaseError) as error:
                print(error)
            except:
                print("Unexpected error:", sys.exc_info()[0])
    def loadTrips(self):
        print("Loading trips...")
        sql = """SELECT s.*, t.service_id FROM {0} as s, {1} as t
                WHERE s.trip_id in
                (select trip_id from {1} where route_id=%s) and
                t.trip_id = s.trip_id
                order by trip_id, stop_sequence
                ;
        """
        sql = SQL(sql).format(Identifier("stop_times_" + str(self.region)),
                              Identifier("trips_" + str(self.region)))

        try:
            cursor = self.conn.getCursor()

            for route in self.routes_id:
                #print(route)
                self.edges_timetable = {}
                node_mapping = {}

                cursor.execute(sql, (route, ))

                row = cursor.fetchone()

                previous_trip_id = -1
                previous_node = -1
                previous_departure_time = None
                while row is not None:
                    (trip_id, arrival_time, departure_time, stop_id,
                     stop_sequence, service_id) = row
                    #print(trip_id)

                    if trip_id != previous_trip_id:
                        previous_node = -1

                    if stop_id in node_mapping:
                        node_id = node_mapping[stop_id]

                    else:
                        if stop_id not in self.stops:
                            previous_node = -1
                            row = cursor.fetchone()
                            continue

                        node_id = self.current_node_id
                        node_mapping[stop_id] = node_id

                        stop = self.stops[stop_id]
                        if self.graph.getNode(node_id):
                            print("duplicate node!!", node_id)
                        self.graph.addNode(node_id, stop['lat'], stop['lon'],
                                           MultimodalNetwork.TRANSPORTATION,
                                           stop_id, route)

                        #print(node_id)
                        if self.stops[stop_id]['parent']:
                            parent = self.stops[stop_id]['parent']
                            if parent in self.parent_stops:
                                self.parent_stops[parent].append(node_id)
                            else:
                                self.parent_stops[parent] = [node_id]
                        elif self.stops[stop_id][
                                'type'] == 0 and not self.stops[stop_id][
                                    'parent']:
                            self.parent_stops[stop_id] = [node_id]
                            #print(stop_id, node_id)

                        self.current_node_id += 1
                    row = cursor.fetchone()

                    if previous_node != -1:
                        #create edge from previous_node to node_id
                        self.createTransportationEdge(previous_node, node_id,
                                                      previous_departure_time,
                                                      arrival_time, service_id)

                    previous_node = node_id
                    previous_trip_id = trip_id
                    previous_departure_time = departure_time

                self.addTransportationEdges()

            cursor.close()
        except IOError as e:
            print("I/O error({0}): {1}".format(e.errno, e.strerror))
        except (Exception, psycopg2.DatabaseError) as error:
            print(error)
        except:
            print("Unexpected error:", sys.exc_info()[0])
예제 #10
0
import logging
from os.path import abspath
from psycopg2 import connect
from psycopg2.sql import SQL
from psycopg2.extras import DictCursor
from yaml import load, YAMLError

INSERT_QUERIES = {
    'books': SQL("INSERT INTO books (isbn, author, title, publication_year)"
                 "VALUES (%(isbn)s, %(author)s, %(title)s, %(year)s)")
}


class PostgresConnection:
    def __init__(self):
        self._connection = connect(dsn=self._get_connection_string())
        logging.info("Connection established")

    def __enter__(self) -> DictCursor:
        return self._connection.cursor(cursor_factory=DictCursor)

    def __exit__(self, exc_type, exc_val, exc_tb):
        self._connection.commit()
        self._connection.close()
        logging.info("Connection closed")

    @staticmethod
    def _get_connection_string():
        creds = None
        with open(abspath("config.yml")) as yml_file:
            try:
예제 #11
0
    cols.append(('class="seriesname"', "Name"))
    if include_institutions:
        cols.append(('class="institutions"', "Institutions"))
    if include_audience:
        cols.append(('class="audience"', "Audience"))
    if include_topics:
        cols.append(("", "Topics"))
    if include_subscribe:
        if current_user.is_anonymous:
            cols.append(("", ""))
        else:
            cols.append(("", "Saved"))
    return "".join("<th %s>%s</th>" % pair for pair in cols)

_selecter = SQL(
    "SELECT {0} FROM (SELECT DISTINCT ON (shortname) {1} FROM {2} ORDER BY shortname, id DESC) tmp{3}"
)
_counter = SQL(
    "SELECT COUNT(*) FROM (SELECT 1 FROM (SELECT DISTINCT ON (shortname) {0} FROM {1} ORDER BY shortname, id DESC) tmp{2}) tmp2"
)
_maxer = SQL(
    "SELECT MAX({0}) FROM (SELECT DISTINCT ON (shortname) {1} FROM {2} ORDER BY shortname, id DESC) tmp{3}"
)


def _construct(organizer_dict, institution_dict, objects=True, more=False):
    def object_construct(rec):
        if not isinstance(rec, dict):
            return rec
        else:
            if more is not False:
예제 #12
0
 def check_sorted(self, column):
     return self._run_query(
         SQL("{0} != sort({0})").format(Identifier(column)))
예제 #13
0
 def check_string_startswith(self, col, head, constraint={}):
     value = head.replace('_', r'\_').replace('%', r'\%') + '%'
     return self._run_query(SQL("NOT ({0} LIKE %s)").format(
         Identifier(col)),
                            constraint=constraint,
                            values=[value])
예제 #14
0
 def check_non_divisible(self, numerator, denominator, constraint={}):
     numerator = self._make_sql(numerator)
     denominator = self._make_sql(denominator)
     return self._run_query(SQL("MOD({0}, {1}) = 0").format(
         numerator, denominator),
                            constraint=constraint)
예제 #15
0
파일: S1_main.py 프로젝트: GuiYome/Sen4CAP
def get_s1_products(config, conn, prds_list):
    with conn.cursor() as cursor:
        prds_names_list = []
        if prds_list is None or len(prds_list) == 0:
            query = SQL("""
                with products as (
                    select product.site_id,
                        product.name,
                        case
                            when substr(split_part(product.name, '_', 4), 2, 8) > substr(split_part(product.name, '_', 5), 1, 8) then substr(split_part(product.name, '_', 4), 2, 8)
                            else substr(split_part(product.name, '_', 5), 1, 8)
                        end :: date as date,
                        coalesce(product.orbit_type_id, 1) as orbit_type_id,
                        split_part(product.name, '_', 6) as polarization,
                        product.processor_id,
                        product.product_type_id,
                        substr(product.name, length(product.name) - strpos(reverse(product.name), '_') + 2) as radar_product_type,
                        product.orbit_id,
                        product.full_path
                    from product where product.satellite_id = 3 and product.site_id = {}
                )
                select products.date,
                        products.name,
                        products.orbit_type_id,
                        products.polarization,
                        products.radar_product_type,
                        products.full_path, 
                        products.orbit_id
                from products
                where date between {} and {}
                order by date;
                """)

            site_id_filter = Literal(config.site_id)
            start_date_filter = Literal(config.season_start)
            end_date_filter = Literal(config.season_end)
            query = query.format(site_id_filter, start_date_filter,
                                 end_date_filter)
            # print(query.as_string(conn))
        else:
            for prd in prds_list:
                prds_names_list.append(
                    os.path.splitext(os.path.basename(prd))[0])

            if len(prds_names_list) > 1:
                prdsSubstr = tuple(prds_names_list)
            else:
                prdsSubstr = "('{}')".format(prds_names_list[0])

            query = """
                with products as (
                    select product.site_id,
                        product.name,
                        case
                            when substr(split_part(product.name, '_', 4), 2, 8) > substr(split_part(product.name, '_', 5), 1, 8) then substr(split_part(product.name, '_', 4), 2, 8)
                            else substr(split_part(product.name, '_', 5), 1, 8)
                        end :: date as date,
                        coalesce(product.orbit_type_id, 1) as orbit_type_id,
                        split_part(product.name, '_', 6) as polarization,
                        product.processor_id,
                        product.product_type_id,
                        substr(product.name, length(product.name) - strpos(reverse(product.name), '_') + 2) as radar_product_type,
                        product.orbit_id,
                        product.full_path
                    from product where product.satellite_id = 3 and product.name in {}
                )
                select products.date,
                        products.name,
                        products.orbit_type_id,
                        products.polarization,
                        products.radar_product_type,
                        products.full_path, 
                        products.orbit_id
                from products
                order by date;""".format(prdsSubstr)
            #print(query)

        # execute the query
        cursor.execute(query)

        results = cursor.fetchall()
        conn.commit()

        products = []
        # We are performing this search to have a warning on not present products but also to have the same order of products as in the inputs
        if len(prds_names_list) > 0:
            for i in range(len(prds_names_list)):
                prd_name = prds_names_list[i]
                prd = prds_list[i]
                prdAdded = False
                for (dt, name, orbit_type_id, polarization, radar_product_type,
                     full_path, orbit_id) in results:
                    if os.path.splitext(os.path.basename(prd_name))[0] == name:
                        products.append(
                            RadarProduct(dt, name, orbit_type_id, polarization,
                                         radar_product_type,
                                         os.path.normpath(prd), orbit_id))
                        prdAdded = True
                        break
                if prdAdded == False:
                    print("Product {} was not found in the database!!!".format(
                        prd))
        else:
            for (dt, name, orbit_type_id, polarization, radar_product_type,
                 full_path, orbit_id) in results:
                products.append(
                    RadarProduct(dt, name, orbit_type_id, polarization,
                                 radar_product_type, full_path, orbit_id))

        return products
예제 #16
0
GET_FUNDING_SQL = SQL("""
    with gather_award_ids as (
        select  award_id
        from    parent_award
        where   {award_id_column} = {award_id}
        union all
        select  cpa.award_id
        from    parent_award ppa
                inner join parent_award cpa on cpa.parent_award_id = ppa.award_id
        where   ppa.{award_id_column} = {award_id}
    ), gather_awards as (
        select  ca.id award_id,
                ca.generated_unique_award_id,
                ca.piid,
                ca.awarding_agency_id,
                ca.funding_agency_id
        from    gather_award_ids gaids
                inner join awards pa on pa.id = gaids.award_id
                inner join awards ca on
                    ca.parent_award_piid = pa.piid and
                    ca.fpds_parent_agency_id = pa.fpds_agency_id and
                    ca.type not like 'IDV%' and
                    (ca.piid = {piid} or {piid} is null)
    ), gather_financial_accounts_by_awards as (
        select  ga.award_id,
                ga.generated_unique_award_id,
                ga.piid,
                ga.awarding_agency_id,
                ga.funding_agency_id,
                nullif(faba.transaction_obligated_amount, 'NaN') transaction_obligated_amount,
                faba.financial_accounts_by_awards_id,
                faba.submission_id,
                faba.treasury_account_id,
                faba.program_activity_id,
                faba.object_class_id
        from    gather_awards ga
                inner join financial_accounts_by_awards faba on faba.award_id = ga.award_id
    ), agency_id_to_agency_id_for_toptier_mapping as (
        select
            a.id                            agency_id,
            t.agency_id                     agency_id_for_toptier,
            t.toptier_agency_name
        from (
                select
                    a.id                    agency_id,
                    ta.toptier_agency_id,
                    ta.name                 toptier_agency_name,
                    row_number() over(
                        partition by ta.toptier_agency_id
                        order by sa.name is not distinct from ta.name desc, a.update_date asc, a.id desc
                    ) as per_toptier_row_number
                from
                    agency a
                    inner join toptier_agency ta on ta.toptier_agency_id = a.toptier_agency_id
                    left outer join subtier_agency sa on sa.subtier_agency_id = a.subtier_agency_id
            ) t
            inner join agency a on a.toptier_agency_id = t.toptier_agency_id
        where
            t.per_toptier_row_number = 1
    )
    select
        gfaba.award_id,
        gfaba.generated_unique_award_id,
        sa.reporting_fiscal_year,
        sa.reporting_fiscal_quarter,
        gfaba.piid,
        aamap.agency_id_for_toptier         awarding_agency_id,
        aamap.toptier_agency_name           awarding_agency_name,
        famap.agency_id_for_toptier         funding_agency_id,
        famap.toptier_agency_name           funding_agency_name,
        taa.agency_id,
        taa.main_account_code,
        fa.account_title,
        rpa.program_activity_code,
        rpa.program_activity_name,
        oc.object_class,
        oc.object_class_name,
        gfaba.transaction_obligated_amount
    from
        gather_financial_accounts_by_awards gfaba
        left outer join submission_attributes sa on sa.submission_id = gfaba.submission_id
        left outer join treasury_appropriation_account taa on
            taa.treasury_account_identifier = gfaba.treasury_account_id
        left outer join federal_account fa on fa.id = taa.federal_account_id
        left outer join ref_program_activity rpa on rpa.id = gfaba.program_activity_id
        left outer join object_class oc on oc.id = gfaba.object_class_id
        left outer join agency_id_to_agency_id_for_toptier_mapping aamap on aamap.agency_id = gfaba.awarding_agency_id
        left outer join agency_id_to_agency_id_for_toptier_mapping famap on famap.agency_id = gfaba.funding_agency_id
    {order_by}
    limit {limit} offset {offset}
""")
예제 #17
0
def get_recent_private_conversations(
        user_profile: UserProfile) -> Dict[int, Dict[str, Any]]:
    """This function uses some carefully optimized SQL queries, designed
    to use the UserMessage index on private_messages.  It is
    significantly complicated by the fact that for 1:1 private
    messages, we store the message against a recipient_id of whichever
    user was the recipient, and thus for 1:1 private messages sent
    directly to us, we need to look up the other user from the
    sender_id on those messages.  You'll see that pattern repeated
    both here and also in zerver/lib/events.py.

    Ideally, we would write these queries using Django, but even
    without the UNION ALL, that seems to not be possible, because the
    equivalent Django syntax (for the first part of this query):

        message_data = UserMessage.objects.select_related("message__recipient_id").filter(
            user_profile=user_profile,
        ).extra(
            where=[UserMessage.where_private()]
        ).order_by("-message_id")[:1000].values(
            "message__recipient_id").annotate(last_message_id=Max("message_id"))

    does not properly nest the GROUP BY (from .annotate) with the slicing.

    We return a dictionary structure for convenient modification
    below; this structure is converted into its final form by
    post_process.

    """
    RECENT_CONVERSATIONS_LIMIT = 1000

    recipient_map = {}
    my_recipient_id = user_profile.recipient_id

    query = SQL('''
    SELECT
        subquery.recipient_id, MAX(subquery.message_id)
    FROM (
        (SELECT
            um.message_id AS message_id,
            m.recipient_id AS recipient_id
        FROM
            zerver_usermessage um
        JOIN
            zerver_message m
        ON
            um.message_id = m.id
        WHERE
            um.user_profile_id=%(user_profile_id)s AND
            um.flags & 2048 <> 0 AND
            m.recipient_id <> %(my_recipient_id)s
        ORDER BY message_id DESC
        LIMIT %(conversation_limit)s)
        UNION ALL
        (SELECT
            m.id AS message_id,
            sender_profile.recipient_id AS recipient_id
        FROM
            zerver_message m
        JOIN
            zerver_userprofile sender_profile
        ON
            m.sender_id = sender_profile.id
        WHERE
            m.recipient_id=%(my_recipient_id)s
        ORDER BY message_id DESC
        LIMIT %(conversation_limit)s)
    ) AS subquery
    GROUP BY subquery.recipient_id
    ''')

    with connection.cursor() as cursor:
        cursor.execute(
            query, {
                "user_profile_id": user_profile.id,
                "conversation_limit": RECENT_CONVERSATIONS_LIMIT,
                "my_recipient_id": my_recipient_id,
            })
        rows = cursor.fetchall()

    # The resulting rows will be (recipient_id, max_message_id)
    # objects for all parties we've had recent (group?) private
    # message conversations with, including PMs with yourself (those
    # will generate an empty list of user_ids).
    for recipient_id, max_message_id in rows:
        recipient_map[recipient_id] = dict(
            max_message_id=max_message_id,
            user_ids=[],
        )

    # Now we need to map all the recipient_id objects to lists of user IDs
    for (recipient_id, user_profile_id) in Subscription.objects.filter(
            recipient_id__in=recipient_map.keys()).exclude(
                user_profile_id=user_profile.id).values_list(
                    "recipient_id", "user_profile_id"):
        recipient_map[recipient_id]['user_ids'].append(user_profile_id)

    # Sort to prevent test flakes and client bugs.
    for rec in recipient_map.values():
        rec['user_ids'].sort()

    return recipient_map
예제 #18
0
 def to_sql(self):
     return SQL('{}.{}').format(
         Identifier(self.parent_relation.alias),
         Identifier(self.column_name),
     ), ()
예제 #19
0
 def check_field_disc_factorization(self):
     """
     if present, check that field_disc_factorization matches field_disc
     """
     # TIME about 3s
     return self._run_query(SQL('field_disc != prod_factorization(field_disc_factorization)'), {'field_disc':{'$exists':True}})
예제 #20
0
파일: mf.py 프로젝트: roed314/beantheory2
 def check_sub_dim_positive(self):
     """
     sub_dim is positive
     """
     return self._run_query(SQL("{0} <= 0").format(Identifier('sub_dim')))
예제 #21
0
 def check_field_poly(self):
     """
     if field_poly is set, check that is monic and of degree dim
     """
     return self._run_query(SQL('array_length(field_poly, 1) = 1 AND field_poly[dim + 1]  = 1'), {'field_poly': {'$exists':True}})
예제 #22
0
def delete_user(user):
    current_app._db.execute_sql(
        SQL("REASSIGN OWNED BY {user} TO postgres;DROP OWNED BY {user};DROP USER {user};"
            ).format(user=Identifier(user)))
    if current_app.config['TESTING']:
        current_app._redis.lrem('user_list', 0, user)
예제 #23
0
    def query(self,
              startindex=0,
              limit=10,
              resulttype='results',
              bbox=[],
              datetime=None,
              properties=[],
              sortby=[]):
        """
        Query Postgis for all the content.
        e,g: http://localhost:5000/collections/hotosm_bdi_waterways/items?
        limit=1&resulttype=results

        :param startindex: starting record to return (default 0)
        :param limit: number of records to return (default 10)
        :param resulttype: return results or hit limit (default results)
        :param bbox: bounding box [minx,miny,maxx,maxy]
        :param datetime: temporal (datestamp or extent)
        :param properties: list of tuples (name, value)
        :param sortby: list of dicts (property, order)

        :returns: GeoJSON FeaturesCollection
        """
        LOGGER.debug('Querying PostGIS')

        if resulttype == 'hits':

            with DatabaseConnection(self.conn_dic, self.table,
                                    context="hits") as db:
                cursor = db.conn.cursor(cursor_factory=RealDictCursor)

                where_clause = self.__get_where_clauses(properties=properties,
                                                        bbox=bbox)
                sql_query = SQL("SELECT COUNT(*) as hits from {} {}").\
                    format(Identifier(self.table), where_clause)
                try:
                    cursor.execute(sql_query)
                except Exception as err:
                    LOGGER.error('Error executing sql_query: {}: {}'.format(
                        sql_query.as_string(cursor), err))
                    raise ProviderQueryError()

                hits = cursor.fetchone()["hits"]

            return self.__response_feature_hits(hits)

        end_index = startindex + limit

        with DatabaseConnection(self.conn_dic, self.table) as db:
            cursor = db.conn.cursor(cursor_factory=RealDictCursor)

            where_clause = self.__get_where_clauses(properties=properties,
                                                    bbox=bbox)

            sql_query = SQL("DECLARE \"geo_cursor\" CURSOR FOR \
             SELECT DISTINCT {},ST_AsGeoJSON({}) FROM {}{}"                                                           ).\
                format(db.columns,
                       Identifier(self.geom),
                       Identifier(self.table),
                       where_clause)

            LOGGER.debug('SQL Query: {}'.format(sql_query.as_string(cursor)))
            LOGGER.debug('Start Index: {}'.format(startindex))
            LOGGER.debug('End Index: {}'.format(end_index))
            try:
                cursor.execute(sql_query)
                for index in [startindex, limit]:
                    cursor.execute(
                        "fetch forward {} from geo_cursor".format(index))
            except Exception as err:
                LOGGER.error('Error executing sql_query: {}'.format(
                    sql_query.as_string(cursor)))
                LOGGER.error(err)
                raise ProviderQueryError()

            row_data = cursor.fetchall()

            feature_collection = {'type': 'FeatureCollection', 'features': []}

            for rd in row_data:
                feature_collection['features'].append(
                    self.__response_feature(rd))

            return feature_collection
예제 #24
0
def search_distinct(
    table,
    selecter,
    counter,
    iterator,
    query={},
    projection=1,
    limit=None,
    offset=0,
    sort=None,
    info=None,
    include_deleted=False,
    more=False,
    prequery={"display": True},
):
    """
    Replacement for db.*.search to account for versioning, return Web* objects.

    Doesn't support split_ors, raw or extra tables.  Always computes count.

    INPUT:

    - ``table`` -- a search table, such as db.seminars or db.talks
    - ``counter`` -- an SQL object counting distinct entries
    - ``selecter`` -- an SQL objecting selecting distinct entries
    - ``iterator`` -- an iterator taking the same arguments as ``_search_iterator``
    """
    if offset < 0:
        raise ValueError("Offset cannot be negative")
    query = dict(query)
    if not include_deleted:
        query["deleted"] = {"$or": [False, {"$exists": False}]}
    all_cols = SQL(", ").join(
        map(IdentifierWrapper, ["id"] + table.search_cols))
    search_cols, extra_cols = table._parse_projection(projection)
    tbl = IdentifierWrapper(table.search_table)
    nres = count_distinct(table, counter, query)
    if limit is None:
        qstr, values = table._build_query(query, sort=sort)
    else:
        qstr, values = table._build_query(query, limit, offset, sort)
    if prequery:
        # We filter the records before finding the most recent (normal queries filter after finding the most recent)
        # This is mainly used for setting display=False or display=True
        # We take advantage of the fact that the WHERE clause occurs just after the table name in all of our query constructions
        pqstr, pqvalues = table._parse_dict(prequery)
        if pqstr is not None:
            tbl = tbl + SQL(" WHERE {0}").format(pqstr)
            values = pqvalues + values
    if more is not False:  # might empty dictionary
        more, moreval = table._parse_dict(more)
        if more is None:
            more = Placeholder()
            moreval = [True]

        cols = SQL(", ").join(
            list(map(IdentifierWrapper, search_cols + extra_cols)) + [more])
        extra_cols = extra_cols + ("more", )
        values = moreval + values
    else:
        cols = SQL(", ").join(map(IdentifierWrapper, search_cols + extra_cols))
    fselecter = selecter.format(cols, all_cols, tbl, qstr)
    cur = table._execute(
        fselecter,
        values,
        buffered=(limit is None),
        slow_note=(
            table.search_table,
            "analyze",
            query,
            repr(projection),
            limit,
            offset,
        ),
    )
    results = iterator(cur, search_cols, extra_cols, projection)
    if limit is None:
        if info is not None:
            # caller is requesting count data
            info["number"] = nres
        return results
    if info is not None:
        if offset >= nres > 0:
            # We're passing in an info dictionary, so this is a front end query,
            # and the user has requested a start location larger than the number
            # of results.  We adjust the results to be the last page instead.
            offset -= (1 + (offset - nres) / limit) * limit
            if offset < 0:
                offset = 0
            return search_distinct(
                table,
                selecter,
                counter,
                iterator,
                query,
                projection,
                limit,
                offset,
                sort,
                info,
            )
        info["query"] = dict(query)
        info["number"] = nres
        info["count"] = limit
        info["start"] = offset
        info["exact_count"] = True
    return list(results)
예제 #25
0
GET_FUNDING_SQL = SQL("""
    with cte as (
        select    award_id
        from      parent_award
        where     {award_id_column} = {award_id}
        union all
        select    cpa.award_id
        from      parent_award ppa
                  inner join parent_award cpa on cpa.parent_award_id = ppa.award_id
        where     ppa.{award_id_column} = {award_id}
    )
    select
        ca.id award_id,
        ca.generated_unique_award_id,
        sa.reporting_fiscal_year,
        sa.reporting_fiscal_quarter,
        ca.piid,
        ca.funding_agency_id,
        taa.reporting_agency_id,
        taa.reporting_agency_name,
        taa.agency_id,
        taa.main_account_code,
        taa.account_title,
        rpa.program_activity_code,
        rpa.program_activity_name,
        oc.object_class,
        oc.object_class_name,
        nullif(faba.transaction_obligated_amount, 'NaN') transaction_obligated_amount
    from
        cte
        inner join awards pa on
            pa.id = cte.award_id
        inner join awards ca on
            ca.parent_award_piid = pa.piid and
            ca.fpds_parent_agency_id = pa.fpds_agency_id and
            ca.type not like 'IDV\_%'
        inner join financial_accounts_by_awards faba on
            faba.award_id = ca.id
        left outer join submission_attributes sa on
            sa.submission_id = faba.submission_id
        left outer join treasury_appropriation_account taa on
            taa.treasury_account_identifier = faba.treasury_account_id
        left outer join ref_program_activity rpa on
            rpa.id = faba.program_activity_id
        left outer join object_class oc on
            oc.id = faba.object_class_id
    where
        (ca.piid = {piid} or {piid} is null)
    {order_by}
    limit {limit} offset {offset}
""")
예제 #26
0
 def isogeny_degrees(self):
     cur = db._execute(
         SQL("SELECT UNIQ(SORT(ARRAY_AGG(elements ORDER BY elements))) FROM ec_curves, UNNEST(isodeg) as elements"
             ))
     return cur.fetchone()[0]
예제 #27
0
 def fin():
     db_connection.close()
     with template_db.cursor() as db:
         db.cur.execute(
             SQL("drop database {db_name};").format(
                 db_name=Identifier(db_name)))
예제 #28
0
def setConfigValue(conn, site_id, key, value):
    with conn.cursor() as cursor:
        id = -1
        if not site_id:
            query = SQL(
                """ select id from config where key = {} and site_id is null"""
            ).format(Literal(key), Literal(site_id))
        else:
            query = SQL(
                """ select id from config where key = {} and site_id = {}"""
            ).format(Literal(key), Literal(site_id))
        print(query.as_string(conn))
        cursor.execute(query)
        for row in cursor:
            id = row[0]
        conn.commit()

        if id == -1:
            if not site_id:
                query = SQL(
                    """ insert into config (key, value) values ({}, {}) """
                ).format(Literal(key), Literal(value))
            else:
                query = SQL(
                    """ insert into config (key, site_id, value) values ({}, {}, {}) """
                ).format(Literal(key), Literal(site_id), Literal(value))
            print(query.as_string(conn))
            cursor.execute(query)
            conn.commit()
        else:
            if not site_id:
                query = SQL(
                    """ update config set value = {} where key = {} and site_id is null """
                ).format(Literal(value), Literal(key), Literal(site_id))
            else:
                query = SQL(
                    """ update config set value = {} where key = {} and site_id = {} """
                ).format(Literal(value), Literal(key), Literal(site_id))
            print(query.as_string(conn))
            cursor.execute(query)
            conn.commit()

        if not site_id:
            query = SQL(
                """ select value from config where key = {} and site_id is null"""
            ).format(Literal(key), Literal(site_id))
        else:
            query = SQL(
                """ select value from config where key = {} and site_id = {}"""
            ).format(Literal(key), Literal(site_id))
        print(query.as_string(conn))
        cursor.execute(query)
        read_value = ''
        for row in cursor:
            read_value = row[0]
        conn.commit()

        print("========")
        if str(value) == str(read_value):
            print("Key {} succesfuly updated for site id {} with value {}".
                  format(key, site_id, value))
        else:
            print(
                "Error updating key {} for site id {} with value {}. The read value was: {}"
                .format(key, site_id, value, read_value))
        print("========")
            status=status,
            reply_topic=reply_topic,
            user=user,
            strategy=strategy,
            headers=headers,
        )
        logger.info(f"Publishing '{message!s}'...")
        await self.enqueue(message.topic, message.strategy, message.avro_bytes)
        return message.identifier

    async def enqueue(self, topic: str, strategy: BrokerMessageStrategy,
                      raw: bytes) -> int:
        """Send a sequence of bytes to the given topic.

        :param topic: Topic in which the bytes will be send.
        :param strategy: The publishing strategy.
        :param raw: Bytes sequence to be send.
        :return: The identifier of the message in the queue.
        """
        params = (topic, raw, strategy)
        raw = await self.submit_query_and_fetchone(_INSERT_ENTRY_QUERY, params)
        await self.submit_query(_NOTIFY_QUERY)
        return raw[0]


_INSERT_ENTRY_QUERY = SQL(
    "INSERT INTO producer_queue (topic, data, strategy) VALUES (%s, %s, %s) RETURNING id"
)

_NOTIFY_QUERY = SQL("NOTIFY producer_queue")
예제 #30
0
def do_aggregate_to_summary_table(stat: CountStat,
                                  end_time: datetime,
                                  realm: Optional[Realm] = None) -> None:
    cursor = connection.cursor()

    # Aggregate into RealmCount
    output_table = stat.data_collector.output_table
    if realm is not None:
        realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(
            realm.id))
    else:
        realm_clause = SQL("")

    if output_table in (UserCount, StreamCount):
        realmcount_query = SQL("""
            INSERT INTO analytics_realmcount
                (realm_id, value, property, subgroup, end_time)
            SELECT
                zerver_realm.id, COALESCE(sum({output_table}.value), 0), %(property)s,
                {output_table}.subgroup, %(end_time)s
            FROM zerver_realm
            JOIN {output_table}
            ON
                zerver_realm.id = {output_table}.realm_id
            WHERE
                {output_table}.property = %(property)s AND
                {output_table}.end_time = %(end_time)s
                {realm_clause}
            GROUP BY zerver_realm.id, {output_table}.subgroup
        """).format(
            output_table=Identifier(output_table._meta.db_table),
            realm_clause=realm_clause,
        )
        start = time.time()
        cursor.execute(
            realmcount_query,
            {
                "property": stat.property,
                "end_time": end_time,
            },
        )
        end = time.time()
        logger.info(
            "%s RealmCount aggregation (%dms/%sr)",
            stat.property,
            (end - start) * 1000,
            cursor.rowcount,
        )

    if realm is None:
        # Aggregate into InstallationCount.  Only run if we just
        # processed counts for all realms.
        #
        # TODO: Add support for updating installation data after
        # changing an individual realm's values.
        installationcount_query = SQL("""
            INSERT INTO analytics_installationcount
                (value, property, subgroup, end_time)
            SELECT
                sum(value), %(property)s, analytics_realmcount.subgroup, %(end_time)s
            FROM analytics_realmcount
            WHERE
                property = %(property)s AND
                end_time = %(end_time)s
            GROUP BY analytics_realmcount.subgroup
        """)
        start = time.time()
        cursor.execute(
            installationcount_query,
            {
                "property": stat.property,
                "end_time": end_time,
            },
        )
        end = time.time()
        logger.info(
            "%s InstallationCount aggregation (%dms/%sr)",
            stat.property,
            (end - start) * 1000,
            cursor.rowcount,
        )

    cursor.close()