Пример #1
0
def create():
    for k in ['email', 'password']:
        if len(request.json.get(k, '')) > 128:
            return dict(error=k + " too long")

    for k in ['public_key', 'private_key']:
        if len(request.json.get(k, '')) > 4096:
            return dict(error=k + " too long")
    try:
        conn = db.getconn()
        c = conn.cursor()
        pass_hash = sha256(request.json['password']).hexdigest()
        c.execute(
            "INSERT INTO keys(pass_hash, public, private) VALUES (%s, %s, %s) RETURNING id",
            (pass_hash, Binary(request.json['public_key'].decode('hex')),
             Binary(request.json['private_key'].decode('hex'))))

        key_id = c.fetchone()[0]
        c.execute("INSERT INTO emails(email, key) VALUES (%s, %s)", (
            request.json['email'],
            key_id,
        ))
        return dict(id=key_id)

    except IntegrityError:
        raise HTTPError(409)

    finally:
        c.close()
        db.putconn(conn)
Пример #2
0
    def generate_insert_stmt(self, data):

        sqlstmt = u'INSERT INTO public."{}"('.format(self.postgres_table_name)
        i = 0
        for key, value in six.iteritems(data):
            if i != 0:
                sqlstmt += u', '
            i += 1
            sqlstmt += u'{}'.format(key)
        sqlstmt += ') VALUES (' + '%s, ' * (i - 1) + '%s);'
        sqldata = ()
        for key, value in six.iteritems(data):
            val = value[0]
            if value[0] is None:
                val = None
            elif isinstance(value[0], set):
                val = list(value[0])
            elif isinstance(value[0], RichTextValue):
                val = value[0].output
            elif isinstance(value[0], NamedBlobImage):
                tmpblob = value[0]._blob._p_blob_uncommitted  # temp file
                val = Binary(open(tmpblob, 'rb').read())
            elif isinstance(value[0], NamedBlobFile):
                tmpblob = value[0]._blob._p_blob_uncommitted  # temp file
                val = Binary(open(tmpblob, 'rb').read())
            sqldata = sqldata + (val,)
        return sqlstmt, sqldata
Пример #3
0
def insert_cross_validation_results_into_table(
        forecast_id: int, horizon_days: int, initial_days: int,
        period_days: int, cross_validation_df, metrics_df):
    with mara_db.postgresql.postgres_cursor_context('mara') as cursor:
        cursor.execute(
            f"INSERT INTO forecasts_cross_validation"
            f"(forecast_id, horizon_days, initial_days, period_days, cross_validation_df, metrics_df) "
            f"VALUES ({'%s, %s, %s, %s, %s, %s'})",
            (forecast_id, horizon_days, initial_days, period_days,
             Binary(cross_validation_df), Binary(metrics_df)))
Пример #4
0
 def get_db_prep_value(self, value, connection, prepared=False):
     value = value if prepared else self.get_prep_value(value)
     if isinstance(value, six.text_type):
         value = Binary(value.encode('utf-8'))
     elif isinstance(value, six.binary_type):
         value = Binary(value)
     elif isinstance(value, psycopg_binary_class) or value is None:
         value = value
     else:
         raise ValueError("only str and bytes permited")
     return value
Пример #5
0
 def get_db_prep_value(self, value, connection, prepared=False):
     value = value if prepared else self.get_prep_value(value)
     if isinstance(value, unicode):
         value = Binary(value.encode('utf-8'))
     elif isinstance(value, str):
         value = Binary(value)
     elif isinstance(value, (psycopg_bynary_class, types.NoneType)):
         value = value
     else:
         raise ValueError("only str, unicode and bytea permited")
     return value
Пример #6
0
    def _process_latex(latex, image, template_id, verbose: bool = False):
        """Helper function to process latex, image and template_id flags"""
        data = {}

        if latex is not None or image is not None or template_id is not None:

            only_template_changed = template_id is not None and latex is None and image is None
            only_latex_changed = latex is not None and image is None  # regardless of template_id state
            # always need template_id when image is manually changed
            acceptable_image_change = image is not None and template_id is not None

            recompile = only_latex_changed or only_template_changed
            if recompile:
                the_template = template(version=template_id, verbose=verbose)
                a_template = the_template.data
                template_id = the_template.id
                image = Binary(
                    compile_pattern(pattern=latex,
                                    aTemplate=a_template,
                                    verbose=verbose))

                data.update(image=image, template_id=template_id)
                if only_latex_changed:
                    data.update(latex=latex)
            elif acceptable_image_change:
                # Note if template_id is not specified
                data.update(image=image, template_id=template_id)
                if latex is not None:
                    data.update(latex=latex)
            else:
                warn(
                    'User Input Images must also specify show_template_manager used',
                    ImageWithoutTemplateIDError)
        return data
Пример #7
0
    def blobEncode(self, blob):
        try:
            from psycopg2 import Binary
        except ImportError:
            from psycopg import Binary

        return Binary(blob)
Пример #8
0
 def __init__(self, geom):
     "Initializes on the geometry."
     # Getting the WKB (in string form, to allow easy pickling of
     # the adaptor) and the SRID from the geometry.
     self.ewkb = str(geom.ewkb)
     self.srid = geom.srid
     self._adapter = Binary(self.ewkb)
Пример #9
0
 def _make_document_data_invalid(self, ident=2, filename='index.cnxml'):
     """Hacks a chunk out of the file given as ``filename``
     at module with the given ``ident``.
     This to ensure a transform failure.
     """
     self.db_cursor.execute(
         "SELECT file from files "
         "  WHERE fileid = "
         "    (SELECT fileid FROM module_files "
         "       WHERE module_ident = %s "
         "         AND filename = %s);", (ident, filename))
     index_cnxml = self.db_cursor.fetchone()[0][:]
     if sys.version_info > (3, ):
         index_cnxml = index_cnxml.tobytes()
     # Make a mess of things...
     content = index_cnxml[:600] + index_cnxml[700:]
     payload = (
         Binary(content),
         ident,
         filename,
     )
     self.db_cursor.execute(
         "UPDATE files SET file = %s "
         "  WHERE fileid = "
         "    (SELECT fileid FROM module_files "
         "       WHERE module_ident = %s "
         "         AND filename = %s);", payload)
     return ident
Пример #10
0
def pg_sanitize_value(value, pg_datatype, max_length):
    '''attempt to sanitze the value to be better suitable to
       cast to the desired datatype in postgres.
       
       in case of failures to parse the value it gets returned as it is'''
    if value is not None:
        if pg_datatype in ('date', 'timestamptz', 'timestamp'):
            try:
                return value.isoformat()
            except AttributeError:
                try:
                    return date_parse(value).isoformat()
                except:
                    pass # let postgresql try its best at parsing :(
        elif pg_datatype in ('char', 'text', 'varchar'):
            # truncate texts when there is an charater limit in the db. Cast to string
            # to make in work for values send as int/float/...
            if max_length is not None:
                return str(value)[:max_length]
        elif pg_datatype in ('bytea', 'geometry'):
            return Binary(value)
        elif pg_datatype == 'json':
            # serialize to json to use value with postgresql json type
            return Json(value)
    return value
Пример #11
0
 def _make_document_data_invalid(self, cursor, ident, filename):
     """Hacks a chunk out of the file given as ``filename``
     at module with the given ``ident``.
     This to ensure a transform failure.
     """
     cursor.execute(
         "SELECT file from files "
         "  WHERE fileid = "
         "    (SELECT fileid FROM module_files "
         "       WHERE module_ident = %s "
         "         AND filename = %s);", (ident, filename))
     file = cursor.fetchone()[0][:]
     # Make a mess of things...
     content = file[:600] + file[700:]
     payload = (
         Binary(content),
         ident,
         filename,
     )
     cursor.execute(
         "UPDATE files SET file = %s "
         "  WHERE fileid = "
         "    (SELECT fileid FROM module_files "
         "       WHERE module_ident = %s "
         "         AND filename = %s);", payload)
     return ident
def upload_image(image: InMemoryUploadedFile, city: str) -> str:
    """Uploads an image for the specified city and returns the respective lookup hash.

    Parameters
    ----------
    image: InMemoryUploadedFile
        Image to insert.
    city: str
        City the image belongs to.

    Returns
    -------
    source_hash: str
        Image lookup hash.
    """
    empty_dml_query = (
        "INSERT INTO load_layer.sight_images(sight_image, sight_city, "
        "sight_image_height, sight_image_width, sight_image_data_source) "
        "VALUES (%s, %s, %s, %s, %s)")

    img = Image.open(image)
    img_bytes = img.tobytes()
    source_hash = md5(img_bytes).hexdigest(
    )  # hash image to guarantee unique user input to DWH
    width = img.size[0]
    height = img.size[1]

    query_filling_params = (Binary(img_bytes), city, height, width,
                            source_hash)
    exec_dml_query(empty_dml_query, query_filling_params)

    return source_hash
Пример #13
0
 def create(self, image):
     logger.debug('Saving image %s to database, len=%s', image.name,
                  len(image.payload))
     sql = "INSERT INTO images (name, payload) " + \
           "VALUES (%s, %s)"
     with self.get_cursor() as cur:
         cur.execute(sql, (image.name, Binary(image.payload)))
Пример #14
0
    def push(self, db_cursor):
        """Given a database cursor, check if the record exists before
        attempting to insert its pre- and post- dependencies, as well
        as itself.
        """
        if self.exists(db_cursor):
            return

        for record in self.predepends:
            record.push(db_cursor)
        record_listing = [(k, v) for k, v in self.record.items()]
        record_keys, record_values = zip(*record_listing)
        formats = {
            'table_name': self.table_name,
            'columns': ', '.join(record_keys),
            'values': ', '.join(['%s'] * len(record_values)),
        }
        # Reassign FilePointer to psycopg2.Binary values.
        record_values = [to_sql_value(v) for v in record_values]
        for i, value in enumerate(record_values):
            if isinstance(value, FilePointer):
                with open(value.full_filepath, 'rb') as fb:
                    record_values[i] = Binary(fb.read())
        db_cursor.execute(
            "INSERT INTO {table_name} ({columns}) "
            "  VALUES ({values});".format(**formats), record_values)
        for record in self.postdepends:
            record.push(db_cursor)
        return
Пример #15
0
    def checkinResource(self, object, message='', user=None):
        """
        Checkin a new version of an object to the repository

        object  : the new object
        message : a string describing the changes
        user    : the name of the user creating the new version
        """

        objectId = object.objectId
        vf = self.getVersionFolder(objectId)
        
        # Initialize history if it doesn't exist yet
        if not vf.objectIds():
            version = "1.1"
            addLatestReference(vf, 'latest', '', version)
        else:
            # Sanity check: if latest version isn't the base of these changes, it's a problem
            # if not self.isLatestVersion(object):
            version = object.getVersion()
            if (version != vf.latest.getVersion()):
                raise CommitError, "Version mismatch: version %s checked out, but latest is %s" % (version, vf.latest.getVersion())
            version = incrementMinor(version)
            
        # Clone the object as a new revision of this collection
        #self._log("Cloning %s" % obj, zLOG.INFO)
        zLOG.LOG("VersionFolder", zLOG.INFO, "Cloning %s (%s)" % (object, self.REQUEST['PATH_INFO']))
        vf.manage_clone(object, version)
        clone = getattr(vf, version)

        # Explicity set repository/versioning metadata
        # FIXME: This should be taken care of by the workflow tool
        try:
            clone.setVersion(version)
        except AttributeError:
            clone.version = version
        try:
            clone.setRevised(DateTime())
        except AttributeError:
            clone.revised = DateTime()
        clone.submitter = user
        clone.submitlog = message
        # The state must be public so the object uses the correct method for viewing (ewwww!)
        clone.state = 'public'

        # Reset the 'latest' reference
        vf.latest.edit(clone.Title(), version)
        self.catalog.catalog_object(vf.latest)

        #Push metadata into DB
        self.portal_moduledb.insertModuleVersion(clone)

        # Generate collxml and stuff it into the DB as well
        xml = clone.restrictedTraverse('source_create')()
        # We know this will be a new file, so just insert it.
        res = self.portal_moduledb.sqlInsertFile(file = Binary(xml), media_type='text/xml')
        fid = res[0].fileid
        # This step depends on the InsertModuleVersion call, above
        self.portal_moduledb.sqlInsertModuleFile(moduleid=clone.objectId, version=clone.version, fileid=fid, filename='collection.xml',mimetype='text/xml')
Пример #16
0
    def add(self, item_or_items):
        """Adds any item or set of items to storage."""
        if isinstance(item_or_items, list):
            raise NotImplementedError()
        item = item_or_items
        type_name = item.__class__.__name__.lower()
        cursor = self.conn.cursor()
        if type_name == 'resource':
            exists = self.get(type_=Resource, hash=item._hash)
            if not exists:
                with item.open() as f:
                    data = Binary(f.read())
                checked_execute(cursor, SQL['add-resource'], {
                    'hash': item._hash,
                    'mediatype': item.media_type,
                    'data': data
                })
        elif type_name in ['document', 'binder']:
            args = item.to_dict()
            args['license'] = json.dumps(args['license'])
            args['original_license'] = json.dumps(args['original_license'])
            args['media_type'] = MEDIATYPES[type_name]
            if 'summary' in args:
                args.pop('summary')
            if 'tree' in args:
                args['content'] = json.dumps(args.pop('tree'))
            if 'cnx-archive-uri' not in args:
                args['cnx-archive-uri'] = None
            # BBB 18-Nov-2014 licensors - deprecated property 'licensors'
            #     needs changed in webview and archive before removing here.
            if 'licensors' in args:
                args['copyright_holders'] = args.pop('licensors')
            # /BBB

            for field in JSON_FIELDS:
                args[field] = psycopg2.extras.Json(args[field])
            checked_execute(cursor, SQL['add-document'], args)

            for user_id, permissions in item.acls.items():
                for permission in set(permissions):
                    checked_execute(
                        cursor, SQL['add-document-acl'], {
                            'uuid': item.id,
                            'user_id': user_id,
                            'permission': permission,
                        })
            for licensor in item.licensor_acceptance:
                # licensor format: {'uid': <str>, 'has_accepted': <bool|None>}
                params = {
                    'uuid': item.id,
                    'user_id': licensor['id'],
                    'has_accepted': licensor['has_accepted'],
                }
                checked_execute(cursor,
                                SQL['add-document-licensor-acceptance'],
                                params)
        else:
            raise NotImplementedError(type_name)
        return item
Пример #17
0
def update_keys():
    current_user = get_current_user(request)
    if not current_user:
        raise HTTPError(403)

    data = json.loads(request.body)
    public = Binary(data['public_key'].decode('hex'))
    private = Binary(data['private_key'].decode('hex'))
    conn = db.getconn()
    c = conn.cursor()
    c.execute("UPDATE keys SET public = %s, private = %s WHERE id=%s", (
        public,
        private,
        current_user,
    ))
    c.close()
    db.putconn(conn)
    return
Пример #18
0
def db_coordinates(django_db_setup, django_db_blocker):
    """Calculate what PostgreSQL would produce.

    Despite efforts to sync the PROJ.4 definitions, minor differences between platforms remain.
    So the values are calculated beforehand, so the expected data is included in the tests.
    """
    with django_db_blocker.unblock():
        with connection.cursor() as cursor:
            point1 = "ST_GeomFromEWKB(%(point1)s)"
            point2 = "ST_GeomFromEWKB(%(point2)s)"
            pr = conf.GISSERVER_DB_PRECISION

            cursor.execute(
                "SELECT"
                f" ST_Transform({point1}, 4326) as point1_wgs84,"
                f" ST_AsEWKT(ST_Transform({point1}, 4326), {pr}) as point1_ewkt,"
                f" ST_AsGeoJson(ST_Transform({point1}, 4326), {pr}) as point1_geojson,"
                f" ST_AsGML(3, ST_Transform({point1}, 4326), {pr}, 1) as point1_xml_wgs84,"
                f" ST_AsGML(3, ST_Transform(ST_Transform({point1}, 4326), 28992), {pr}, 1) as point1_xml_rd,"  # noqa: E501
                f" ST_Transform({point2}, 4326) as point2_wgs84,"
                f" ST_AsEWKT(ST_Transform({point2}, 4326), {pr}) as point2_ewkt,"
                f" ST_AsGeoJson(ST_Transform({point2}, 4326), {pr}) as point2_geojson,"
                f" ST_AsGML(3, ST_Transform({point2}, 4326), {pr}, 1) as point2_xml_wgs84",
                {
                    "point1": Binary(CoordinateInputs.point1_rd.ewkb),
                    "point2": Binary(CoordinateInputs.point2_rd.ewkb),
                },
            )

            columns = (x.name for x in cursor.description)
            result = cursor.fetchone()
            result = dict(zip(columns, result))

            return CoordinateInputs(
                point1_wgs84=_get_point(result["point1_wgs84"]),
                point1_ewkt=result["point1_ewkt"],
                point1_geojson=_get_geojson(result["point1_geojson"]),
                point1_xml_wgs84=_get_gml(result["point1_xml_wgs84"]),
                point1_xml_rd=_get_gml(result["point1_xml_rd"]),
                point2_wgs84=_get_point(result["point2_wgs84"]),
                point2_ewkt=result["point2_ewkt"],
                point2_geojson=_get_geojson(result["point2_geojson"]),
                point2_xml_wgs84=_get_gml(result["point2_xml_wgs84"]),
            )
Пример #19
0
def populate_db(cfg):
    conn = psycopg2.connect(host=cfg['db_host'],
                            database="mail_encrypter",
                            user="******",
                            password=cfg['db_pass'])
    c = conn.cursor()

    for i in range(100000):
        c.execute(
            "INSERT INTO keys(pass_hash, public, private) VALUES (%s, %s, %s)",
            (sha256(str(i + 1)).hexdigest(), Binary(
                os.urandom(300)), Binary(os.urandom(1300))))
        c.execute("INSERT INTO emails(email, key) VALUES (%s, %s)", (
            str(i + 1) + "@gmail.com",
            i + 1,
        ))

    conn.commit()
    c.close()
    conn.close()
Пример #20
0
def update_graph(function_name, interval, dt, item_id):
    with get_db_cursor(commit=True) as cur:
        points = get_points_data(function_name, interval, dt)
        graph = get_image_data(points)
        if isinstance(graph, bytes):
            cur.execute(
                "UPDATE graphs SET graph = %s, ts = now() WHERE id = %s",
                (Binary(graph), item_id))
        else:
            cur.execute(
                "UPDATE graphs SET error = %s, ts = now() WHERE id = %s",
                (graph, item_id))
 def _getFileID(self,fileob):
     """Return the fileid for a file, stored in the DB"""
     # let's make sure we've got a utf-8 string
     fdata = _utf8(fileob.data)
     m = md5.new(fdata).hexdigest()
     sha = sha1(fdata).hexdigest()
     res = self.sqlGetFileByMd5(md5=m)
     for r in res:
         if sha1(r.file).hexdigest() == sha:
             return r.fileid
     # Fell through, must be new bytes
     res = self.sqlInsertFile(file = Binary(fdata), media_type=fileob.content_type)
     return res[0].fileid
Пример #22
0
def get_fileid(cursor, fpath):
    with open(fpath) as fp:
        bits = fp.read()
    sha1 = hashlib.new('sha1', bits).hexdigest()
    cursor.execute('select fileid from files where sha1=%s', [sha1])
    res = cursor.fetchall()
    if res:
        return res[0]
    else:
        cursor.execute('insert into files (file) values (%s) returning fileid',
                       [Binary(bits)])
        res = cursor.fetchall()
        return res[0]
Пример #23
0
def npint2pgbyte(a):
    """
    Converts a 128float array to uchar (escaped bytes)
    :param nparray: 128float
    :return: binary string
    """

    l = a.tolist()

    b = array.array('B', l).tostring()

    binstring = str(Binary(b))[1:-8]
    binstring = binstring.replace("''", "\'")

    return binstring
Пример #24
0
    def __init__(self, obj, geography=False):
        """
        Initialize on the spatial object.
        """
        self.is_geometry = isinstance(obj, (GEOSGeometry, PostGISAdapter))

        # Getting the WKB (in string form, to allow easy pickling of
        # the adaptor) and the SRID from the geometry or raster.
        if self.is_geometry:
            self.ewkb = bytes(obj.ewkb)
            self._adapter = Binary(self.ewkb)
        else:
            self.ewkb = to_pgraster(obj)

        self.srid = obj.srid
        self.geography = geography
Пример #25
0
def get_fileid(cursor, fpath):
    with open(fpath) as fp:
        bits = fp.read()
    sha1 = hashlib.new('sha1', bits).hexdigest()
    cursor.execute('select fileid, media_type from files where sha1=%s',
                   [sha1])
    res = cursor.fetchall()
    if res:
        return res[0]
    else:
        mimeType = subprocess.check_output(
            ['file', '--mime-type', '-Lb', fpath]).strip()
        cursor.execute(
            'insert into files (file, media_type) values (%s, %s) returning fileid, media_type',
            (Binary(bits), mimeType))
        res = cursor.fetchall()
        return res[0]
Пример #26
0
def npfloat2pgbyte(a):
    """
    Converts a 128float array to uchar (escaped bytes)
    :param nparray: 128float
    :return: binary string
    """

    a = np.floor(a * 512.0 + 0.5)

    l = a.astype(dtype=int).tolist()

    b = array.array('B', l).tostring()

    binstring = str(Binary(b))[1:-8]
    binstring = binstring.replace("''", "\'")

    return binstring
def up(cursor):
    """Insert all the recipes using the package version"""
    for recipe in recipes:
        sha1 = hashlib.new('sha1', recipe['file']).hexdigest()
        cursor.execute('SELECT fileid FROM files WHERE sha1 = %s', (sha1, ))
        if cursor.rowcount != 0:
            fileid = cursor.fetchone()[0]
        else:
            cursor.execute(
                """INSERT INTO files (file) VALUES (%s)"""
                """ RETURNING fileid""", (Binary(recipe['file']), ))
            fileid = cursor.fetchone()[0]

        cursor.execute(
            """INSERT INTO"""
            """ print_style_recipes"""
            """ (print_style, title, fileid, tag, commit_id)"""
            """ VALUES(%s, %s, %s, %s, %s)""",
            (recipe['id'], recipe['title'], fileid, recipe_tag, recipe_hash))
Пример #28
0
class binary(_column):
    _type = 'binary'
    _symbol_c = '%s'
    _symbol_f = lambda symb: symb and Binary(symb) or None
    _symbol_set = (_symbol_c, _symbol_f)
    _symbol_get = lambda self, x: x and str(x)

    _classic_read = False
    _prefetch = False

    def __init__(self, string='unknown', filters=None, **args):
        _column.__init__(self, string=string, **args)
        self.filters = filters

    def get_memory(self,
                   cr,
                   obj,
                   ids,
                   name,
                   user=None,
                   context=None,
                   values=None):
        if not context:
            context = {}
        if not values:
            values = []
        res = {}
        for i in ids:
            val = None
            for v in values:
                if v['id'] == i:
                    val = v[name]
                    break
            if context.get('bin_size', False) and val:
                res[i] = tools.human_size(long(val))
            else:
                res[i] = val
        return res

    get = get_memory
Пример #29
0
class binary(_column):
    _type = 'binary'
    _symbol_c = '%s'
    _symbol_f = lambda symb: symb and Binary(symb) or None
    _symbol_set = (_symbol_c, _symbol_f)
    _symbol_get = lambda self, x: x and str(x)

    _classic_read = False
    _prefetch = False

    def __init__(self, string='unknown', filters=None, **args):
        _column.__init__(self, string=string, **args)
        self.filters = filters

    def get_memory(self, cr, obj, ids, name, user=None, context=None, values=None):
        if not context:
            context = {}
        if not values:
            values = []
        res = {}
        for i in ids:
            val = None
            for v in values:
                if v['id'] == i:
                    val = v[name]
                    break

            # If client is requesting only the size of the field, we return it instead
            # of the content. Presumably a separate request will be done to read the actual
            # content if it's needed at some point.
            # TODO: after 6.0 we should consider returning a dict with size and content instead of
            #       having an implicit convention for the value
            if val and context.get('bin_size_%s' % name, context.get('bin_size')):
                res[i] = tools.human_size(long(val))
            else:
                res[i] = val
        return res

    get = get_memory
Пример #30
0
def reverse_geocode(point):
    """
    Looks up the nearest block to the point.
    """
    # In degrees for now because transforming to a projected space is
    # too slow for this purpose. TODO: store projected versions of the
    # locations alongside the canonical lng/lat versions.
    min_distance = 0.007
    # We use min_distance to cut down on the searchable space, because
    # the distance query we do next that actually compares distances
    # between geometries does not use the spatial index. TODO: convert
    # this to GeoDjango syntax. Should be possible but there are some
    # subtleties / performance issues with the DB API.
    cursor = connection.cursor()
    cursor.execute(
        """
        SELECT %(field_list)s, ST_Distance(ST_GeomFromWKB(E%(pt_wkb)s, 4326), %(geom_fieldname)s) AS "dist"
        FROM %(tablename)s
        WHERE id IN
            (SELECT id
             FROM %(tablename)s
             WHERE ST_DWithin(%(geom_fieldname)s, ST_GeomFromWKB(E%(pt_wkb)s, 4326), %(min_distance)s))
        ORDER BY "dist"
        LIMIT 1;
    """ % {
            'field_list': ', '.join([f.column for f in Block._meta.fields]),
            'pt_wkb': Binary(point.wkb),
            'geom_fieldname': 'location',
            'tablename': Block._meta.db_table,
            'min_distance': min_distance
        })
    num_fields = len(Block._meta.fields)
    try:
        block, distance = [(Block(*row[:num_fields]), row[-1])
                           for row in cursor.fetchall()][0]
    except IndexError:
        raise ReverseGeocodeError()
    return block, distance