def transaction_search(context, data_dict, timeout):
    fields = []
    fields[:] = [
        field.get('id') for field in db._get_fields(context, data_dict)
    ]
    try:
        select_columns = u', '.join(
            [u'"{0}"'.format(field_id) for field_id in fields])
        sql_string = u'SELECT {0} FROM "{1}"'.format(select_columns,
                                                     data_dict['resource_id'])

        return format_to_array_of_dict(
            context['connection'].execute(sql_string), fields)
    except DBAPIError, e:
        if e.orig.pgcode == db._PG_ERR_CODE['query_canceled']:
            raise db.ValidationError({'query': ['Search took too long']})
        raise db.ValidationError({
            'query': ['Invalid query'],
            'info': {
                'statement': [e.statement],
                'params': [e.params],
                'orig': [str(e.orig)]
            }
        })
        context['connection'].close()
def query_extent(data_dict, connection=None):
    """ Return the spatial query extent of a datastore search

    @param data_dict: Dictionary defining the search
    @returns a dictionary defining:
        {
            total_count: The total number of rows in the query,
            geom_count: The number of rows that have a geom,
            bounds: ((lat min, long min), (lat max, long max)) for the
                  queries rows
    """
    r = toolkit.get_action('datastore_search')({}, data_dict)
    if 'total' not in r or r['total'] == 0:
        return {
            'total_count': 0,
            'geom_count': 0,
            'bounds': None
        }
    result = {
        'total_count': r['total'],
        'bounds': None
    }
    field_types = dict([(f['id'], f['type']) for f in r['fields']])
    field_types['_id'] = 'int'
    # Call plugin to obtain correct where statement
    (ts_query, where_clause, values) = invoke_search_plugins(data_dict, field_types)
    # Prepare and run our query
    query = """
        SELECT COUNT(r) AS count,
               ST_YMIN(ST_EXTENT(r)) AS ymin,
               ST_XMIN(ST_EXTENT(r)) AS xmin,
               ST_YMAX(ST_EXTENT(r)) AS ymax,
               ST_XMAX(ST_EXTENT(r)) AS xmax
        FROM   (
          SELECT "{geom_field}" AS r
          FROM   "{resource_id}" {ts_query}
          {where_clause}
        ) _tilemap_sub
    """.format(
        geom_field=config['postgis.field'],
        resource_id=data_dict['resource_id'],
        where_clause=where_clause,
        ts_query=ts_query
    )
    if not is_single_statement(query):
        raise datastore_db.ValidationError({
            'query': ['Query is not a single statement.']
        })
    with get_connection(connection) as c:
        query_result = c.execute(query, values)
        r = query_result.fetchone()

    result['geom_count'] = r['count']
    if result['geom_count'] > 0:
        result['bounds'] = ((r['ymin'], r['xmin']), (r['ymax'], r['xmax']))
    return result
def transaction_upsert(context, data_dict, timeout, trans):
    try:
        db.upsert_data(context, data_dict)
        return db._unrename_json_field(data_dict)
    except IntegrityError, e:
        if e.orig.pgcode == db._PG_ERR_CODE['unique_violation']:
            raise db.ValidationError({
                'constraints': [
                    'Cannot insert records or create index because'
                    ' of uniqueness constraint'
                ],
                'info': {
                    'orig': str(e.orig),
                    'pgcode': e.orig.pgcode
                }
            })
        raise
Пример #4
0
def run_stats_query(select, resource_id, ts_query, where_clause, group_by,
                    values):
    query = 'SELECT {select} FROM "{resource_id}" {ts_query} {where_clause} {group_by}'.format(
        select=select,
        resource_id=resource_id,
        where_clause=where_clause,
        ts_query=ts_query,
        group_by=group_by)

    if not is_single_statement(query):
        raise datastore_db.ValidationError(
            {'query': ['Query is not a single statement.']})

    # The interfaces.IDatastore return SQL to be directly executed
    # So just use an sqlalchemy connection, rather than the API
    # So we don't have to faff around converting to pure SQL
    engine = _get_engine()
    with engine.begin() as connection:
        try:
            query_result = connection.execute(query, values)
            return query_result.fetchall()
        except (DatabaseError, DBAPIError):
            pass
            raise db.ValidationError({
                'constraints': [
                    'Cannot insert records or create index because'
                    ' of uniqueness constraint'
                ],
                'info': {
                    'orig': str(e.orig),
                    'pgcode': e.orig.pgcode
                }
            })
        raise
    except DataError, e:
        trans.rollback()
        raise db.ValidationError({
            'data': e.message,
            'info': {
                'orig': [str(e.orig)]
            }
        })
    except DBAPIError, e:
        trans.rollback()
        if e.orig.pgcode == db._PG_ERR_CODE['query_canceled']:
            raise db.ValidationError({'query': ['Query took too long']})
        raise
    except Exception, e:
        trans.rollback()
        raise


def transaction_audit(context, data_dict, old_records, new_records,
                      update_time, primary_keys, delete_absent):