class PostGISAdapter: def __init__(self, obj, geography=False): """ Initialize on the spatial object. """ self.is_geometry = isinstance(obj, (GEOSGeometry, PostGISAdapter)) # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) self.srid = obj.srid self.geography = geography def __conform__(self, proto): """Does the given protocol conform to what Psycopg2 expects?""" if proto == ISQLQuote: return self else: raise Exception( "Error implementing psycopg2 protocol. Is psycopg2 installed?") def __eq__(self, other): return isinstance(other, PostGISAdapter) and self.ewkb == other.ewkb def __hash__(self): return hash(self.ewkb) def __str__(self): return self.getquoted().decode() @classmethod def _fix_polygon(cls, poly): return poly def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ if self.is_geometry: self._adapter.prepare(conn) def getquoted(self): """ Return a properly quoted string for use in PostgreSQL/PostGIS. """ if self.is_geometry: # Psycopg will figure out whether to use E'\\000' or '\000'. return b"%s(%s)" % ( b"ST_GeogFromWKB" if self.geography else b"ST_GeomFromEWKB", self._adapter.getquoted(), ) else: # For rasters, add explicit type cast to WKB string. return b"'%s'::raster" % self.ewkb.encode()
def generate_insert_stmt(self, data): sqlstmt = u'INSERT INTO public."{}"('.format(self.postgres_table_name) i = 0 for key, value in six.iteritems(data): if i != 0: sqlstmt += u', ' i += 1 sqlstmt += u'{}'.format(key) sqlstmt += ') VALUES (' + '%s, ' * (i - 1) + '%s);' sqldata = () for key, value in six.iteritems(data): val = value[0] if value[0] is None: val = None elif isinstance(value[0], set): val = list(value[0]) elif isinstance(value[0], RichTextValue): val = value[0].output elif isinstance(value[0], NamedBlobImage): tmpblob = value[0]._blob._p_blob_uncommitted # temp file val = Binary(open(tmpblob, 'rb').read()) elif isinstance(value[0], NamedBlobFile): tmpblob = value[0]._blob._p_blob_uncommitted # temp file val = Binary(open(tmpblob, 'rb').read()) sqldata = sqldata + (val,) return sqlstmt, sqldata
def create(): for k in ['email', 'password']: if len(request.json.get(k, '')) > 128: return dict(error=k + " too long") for k in ['public_key', 'private_key']: if len(request.json.get(k, '')) > 4096: return dict(error=k + " too long") try: conn = db.getconn() c = conn.cursor() pass_hash = sha256(request.json['password']).hexdigest() c.execute( "INSERT INTO keys(pass_hash, public, private) VALUES (%s, %s, %s) RETURNING id", (pass_hash, Binary(request.json['public_key'].decode('hex')), Binary(request.json['private_key'].decode('hex')))) key_id = c.fetchone()[0] c.execute("INSERT INTO emails(email, key) VALUES (%s, %s)", ( request.json['email'], key_id, )) return dict(id=key_id) except IntegrityError: raise HTTPError(409) finally: c.close() db.putconn(conn)
class PostGISAdapter(object): def __init__(self, geom): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = str(geom.ewkb) self.srid = geom.srid self._adapter = Binary(self.ewkb) def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: raise Exception( 'Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __str__(self): return self.getquoted() def prepare(self, conn): # Pass the connection to the adapter: this allows escaping the binary # in the style required by the server's standard_conforming_string setting. self._adapter.prepare(conn) def getquoted(self): "Returns a properly quoted string for use in PostgreSQL/PostGIS." # psycopg will figure out whether to use E'\\000' or '\000' return 'ST_GeomFromEWKB(%s)' % self._adapter.getquoted() def prepare_database_save(self, unused): return self
def __init__(self, geom): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = str(geom.ewkb) self.srid = geom.srid self._adapter = Binary(self.ewkb)
class PostGISAdapter(object): def __init__(self, geom): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = bytes(geom.ewkb) self.srid = geom.srid self._adapter = Binary(self.ewkb) def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): if not isinstance(other, PostGISAdapter): return False return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __str__(self): return self.getquoted() def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ self._adapter.prepare(conn) def getquoted(self): "Returns a properly quoted string for use in PostgreSQL/PostGIS." # psycopg will figure out whether to use E'\\000' or '\000' return str('ST_GeomFromEWKB(%s)' % self._adapter.getquoted().decode())
class PostGISAdapter(object): def __init__(self, obj, geography=False): """ Initialize on the spatial object. """ self.is_geometry = isinstance(obj, (Geometry, PostGISAdapter)) # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) self.srid = obj.srid self.geography = geography def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): if not isinstance(other, PostGISAdapter): return False return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __hash__(self): return hash((self.ewkb, self.srid)) def __str__(self): return self.getquoted() def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ if self.is_geometry: self._adapter.prepare(conn) def getquoted(self): """ Return a properly quoted string for use in PostgreSQL/PostGIS. """ if self.is_geometry: # Psycopg will figure out whether to use E'\\000' or '\000'. return str('%s(%s)' % ( 'ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB', self._adapter.getquoted().decode()) ) else: # For rasters, add explicit type cast to WKB string. return "'%s'::raster" % self.ewkb
def insert_cross_validation_results_into_table( forecast_id: int, horizon_days: int, initial_days: int, period_days: int, cross_validation_df, metrics_df): with mara_db.postgresql.postgres_cursor_context('mara') as cursor: cursor.execute( f"INSERT INTO forecasts_cross_validation" f"(forecast_id, horizon_days, initial_days, period_days, cross_validation_df, metrics_df) " f"VALUES ({'%s, %s, %s, %s, %s, %s'})", (forecast_id, horizon_days, initial_days, period_days, Binary(cross_validation_df), Binary(metrics_df)))
def get_db_prep_value(self, value, connection, prepared=False): value = value if prepared else self.get_prep_value(value) if isinstance(value, unicode): value = Binary(value.encode('utf-8')) elif isinstance(value, str): value = Binary(value) elif isinstance(value, (psycopg_bynary_class, types.NoneType)): value = value else: raise ValueError("only str, unicode and bytea permited") return value
def get_db_prep_value(self, value, connection, prepared=False): value = value if prepared else self.get_prep_value(value) if isinstance(value, unicode): value = Binary(value.encode("utf-8")) elif isinstance(value, str): value = Binary(value) elif isinstance(value, Binary): value = value else: raise ValueError("only str, unicode and bytea permited") return value
def get_db_prep_value(self, value, connection, prepared=False): value = value if prepared else self.get_prep_value(value) if isinstance(value, six.text_type): value = Binary(value.encode('utf-8')) elif isinstance(value, six.binary_type): value = Binary(value) elif isinstance(value, psycopg_binary_class) or value is None: value = value else: raise ValueError("only str and bytes permited") return value
def get_db_prep_value(self, value, connection, prepared=False): value = value if prepared else self.get_prep_value(value) if isinstance(value, unicode): value = Binary(value.encode('utf-8')) elif isinstance(value, str): value = Binary(value) elif isinstance(value, (psycopg_binary_class, types.NoneType)): value = value else: raise ValueError("Only str, unicode and bytea permited") return value
class PatchedAdapter(PostGISAdapter): def __init__(self, *args, **kwargs): super(PatchedAdapter, self).__init__(*args, **kwargs) self._adapter = Binary(self.ewkb) def prepare(self, conn): # Pass the connection to the adapter: this allows escaping the binary # in the style required by the server's standard_conforming_string setting. self._adapter.prepare(conn) def getquoted(self): "Returns a properly quoted string for use in PostgreSQL/PostGIS." # psycopg will figure out whether to use E'\\000' or '\000' return 'ST_GeomFromEWKB(%s)' % self._adapter.getquoted()
def _make_document_data_invalid(self, ident=2, filename='index.cnxml'): """Hacks a chunk out of the file given as ``filename`` at module with the given ``ident``. This to ensure a transform failure. """ self.db_cursor.execute( "SELECT file from files " " WHERE fileid = " " (SELECT fileid FROM module_files " " WHERE module_ident = %s " " AND filename = %s);", (ident, filename)) index_cnxml = self.db_cursor.fetchone()[0][:] if sys.version_info > (3, ): index_cnxml = index_cnxml.tobytes() # Make a mess of things... content = index_cnxml[:600] + index_cnxml[700:] payload = ( Binary(content), ident, filename, ) self.db_cursor.execute( "UPDATE files SET file = %s " " WHERE fileid = " " (SELECT fileid FROM module_files " " WHERE module_ident = %s " " AND filename = %s);", payload) return ident
def create(self, image): logger.debug('Saving image %s to database, len=%s', image.name, len(image.payload)) sql = "INSERT INTO images (name, payload) " + \ "VALUES (%s, %s)" with self.get_cursor() as cur: cur.execute(sql, (image.name, Binary(image.payload)))
def _process_latex(latex, image, template_id, verbose: bool = False): """Helper function to process latex, image and template_id flags""" data = {} if latex is not None or image is not None or template_id is not None: only_template_changed = template_id is not None and latex is None and image is None only_latex_changed = latex is not None and image is None # regardless of template_id state # always need template_id when image is manually changed acceptable_image_change = image is not None and template_id is not None recompile = only_latex_changed or only_template_changed if recompile: the_template = template(version=template_id, verbose=verbose) a_template = the_template.data template_id = the_template.id image = Binary( compile_pattern(pattern=latex, aTemplate=a_template, verbose=verbose)) data.update(image=image, template_id=template_id) if only_latex_changed: data.update(latex=latex) elif acceptable_image_change: # Note if template_id is not specified data.update(image=image, template_id=template_id) if latex is not None: data.update(latex=latex) else: warn( 'User Input Images must also specify show_template_manager used', ImageWithoutTemplateIDError) return data
def upload_image(image: InMemoryUploadedFile, city: str) -> str: """Uploads an image for the specified city and returns the respective lookup hash. Parameters ---------- image: InMemoryUploadedFile Image to insert. city: str City the image belongs to. Returns ------- source_hash: str Image lookup hash. """ empty_dml_query = ( "INSERT INTO load_layer.sight_images(sight_image, sight_city, " "sight_image_height, sight_image_width, sight_image_data_source) " "VALUES (%s, %s, %s, %s, %s)") img = Image.open(image) img_bytes = img.tobytes() source_hash = md5(img_bytes).hexdigest( ) # hash image to guarantee unique user input to DWH width = img.size[0] height = img.size[1] query_filling_params = (Binary(img_bytes), city, height, width, source_hash) exec_dml_query(empty_dml_query, query_filling_params) return source_hash
def blobEncode(self, blob): try: from psycopg2 import Binary except ImportError: from psycopg import Binary return Binary(blob)
def __init__(self, obj, geography=False): """ Initialize on the spatial object. """ self.is_geometry = isinstance(obj, (GEOSGeometry, PostGISAdapter)) # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) self.srid = obj.srid self.geography = geography
def __init__(self, geom): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = bytes(geom.ewkb) self.srid = geom.srid self._adapter = Binary(self.ewkb)
def push(self, db_cursor): """Given a database cursor, check if the record exists before attempting to insert its pre- and post- dependencies, as well as itself. """ if self.exists(db_cursor): return for record in self.predepends: record.push(db_cursor) record_listing = [(k, v) for k, v in self.record.items()] record_keys, record_values = zip(*record_listing) formats = { 'table_name': self.table_name, 'columns': ', '.join(record_keys), 'values': ', '.join(['%s'] * len(record_values)), } # Reassign FilePointer to psycopg2.Binary values. record_values = [to_sql_value(v) for v in record_values] for i, value in enumerate(record_values): if isinstance(value, FilePointer): with open(value.full_filepath, 'rb') as fb: record_values[i] = Binary(fb.read()) db_cursor.execute( "INSERT INTO {table_name} ({columns}) " " VALUES ({values});".format(**formats), record_values) for record in self.postdepends: record.push(db_cursor) return
def _make_document_data_invalid(self, cursor, ident, filename): """Hacks a chunk out of the file given as ``filename`` at module with the given ``ident``. This to ensure a transform failure. """ cursor.execute( "SELECT file from files " " WHERE fileid = " " (SELECT fileid FROM module_files " " WHERE module_ident = %s " " AND filename = %s);", (ident, filename)) file = cursor.fetchone()[0][:] # Make a mess of things... content = file[:600] + file[700:] payload = ( Binary(content), ident, filename, ) cursor.execute( "UPDATE files SET file = %s " " WHERE fileid = " " (SELECT fileid FROM module_files " " WHERE module_ident = %s " " AND filename = %s);", payload) return ident
def pg_sanitize_value(value, pg_datatype, max_length): '''attempt to sanitze the value to be better suitable to cast to the desired datatype in postgres. in case of failures to parse the value it gets returned as it is''' if value is not None: if pg_datatype in ('date', 'timestamptz', 'timestamp'): try: return value.isoformat() except AttributeError: try: return date_parse(value).isoformat() except: pass # let postgresql try its best at parsing :( elif pg_datatype in ('char', 'text', 'varchar'): # truncate texts when there is an charater limit in the db. Cast to string # to make in work for values send as int/float/... if max_length is not None: return str(value)[:max_length] elif pg_datatype in ('bytea', 'geometry'): return Binary(value) elif pg_datatype == 'json': # serialize to json to use value with postgresql json type return Json(value) return value
def add(self, item_or_items): """Adds any item or set of items to storage.""" if isinstance(item_or_items, list): raise NotImplementedError() item = item_or_items type_name = item.__class__.__name__.lower() cursor = self.conn.cursor() if type_name == 'resource': exists = self.get(type_=Resource, hash=item._hash) if not exists: with item.open() as f: data = Binary(f.read()) checked_execute(cursor, SQL['add-resource'], { 'hash': item._hash, 'mediatype': item.media_type, 'data': data }) elif type_name in ['document', 'binder']: args = item.to_dict() args['license'] = json.dumps(args['license']) args['original_license'] = json.dumps(args['original_license']) args['media_type'] = MEDIATYPES[type_name] if 'summary' in args: args.pop('summary') if 'tree' in args: args['content'] = json.dumps(args.pop('tree')) if 'cnx-archive-uri' not in args: args['cnx-archive-uri'] = None # BBB 18-Nov-2014 licensors - deprecated property 'licensors' # needs changed in webview and archive before removing here. if 'licensors' in args: args['copyright_holders'] = args.pop('licensors') # /BBB for field in JSON_FIELDS: args[field] = psycopg2.extras.Json(args[field]) checked_execute(cursor, SQL['add-document'], args) for user_id, permissions in item.acls.items(): for permission in set(permissions): checked_execute( cursor, SQL['add-document-acl'], { 'uuid': item.id, 'user_id': user_id, 'permission': permission, }) for licensor in item.licensor_acceptance: # licensor format: {'uid': <str>, 'has_accepted': <bool|None>} params = { 'uuid': item.id, 'user_id': licensor['id'], 'has_accepted': licensor['has_accepted'], } checked_execute(cursor, SQL['add-document-licensor-acceptance'], params) else: raise NotImplementedError(type_name) return item
def checkinResource(self, object, message='', user=None): """ Checkin a new version of an object to the repository object : the new object message : a string describing the changes user : the name of the user creating the new version """ objectId = object.objectId vf = self.getVersionFolder(objectId) # Initialize history if it doesn't exist yet if not vf.objectIds(): version = "1.1" addLatestReference(vf, 'latest', '', version) else: # Sanity check: if latest version isn't the base of these changes, it's a problem # if not self.isLatestVersion(object): version = object.getVersion() if (version != vf.latest.getVersion()): raise CommitError, "Version mismatch: version %s checked out, but latest is %s" % (version, vf.latest.getVersion()) version = incrementMinor(version) # Clone the object as a new revision of this collection #self._log("Cloning %s" % obj, zLOG.INFO) zLOG.LOG("VersionFolder", zLOG.INFO, "Cloning %s (%s)" % (object, self.REQUEST['PATH_INFO'])) vf.manage_clone(object, version) clone = getattr(vf, version) # Explicity set repository/versioning metadata # FIXME: This should be taken care of by the workflow tool try: clone.setVersion(version) except AttributeError: clone.version = version try: clone.setRevised(DateTime()) except AttributeError: clone.revised = DateTime() clone.submitter = user clone.submitlog = message # The state must be public so the object uses the correct method for viewing (ewwww!) clone.state = 'public' # Reset the 'latest' reference vf.latest.edit(clone.Title(), version) self.catalog.catalog_object(vf.latest) #Push metadata into DB self.portal_moduledb.insertModuleVersion(clone) # Generate collxml and stuff it into the DB as well xml = clone.restrictedTraverse('source_create')() # We know this will be a new file, so just insert it. res = self.portal_moduledb.sqlInsertFile(file = Binary(xml), media_type='text/xml') fid = res[0].fileid # This step depends on the InsertModuleVersion call, above self.portal_moduledb.sqlInsertModuleFile(moduleid=clone.objectId, version=clone.version, fileid=fid, filename='collection.xml',mimetype='text/xml')
def update_keys(): current_user = get_current_user(request) if not current_user: raise HTTPError(403) data = json.loads(request.body) public = Binary(data['public_key'].decode('hex')) private = Binary(data['private_key'].decode('hex')) conn = db.getconn() c = conn.cursor() c.execute("UPDATE keys SET public = %s, private = %s WHERE id=%s", ( public, private, current_user, )) c.close() db.putconn(conn) return
def db_coordinates(django_db_setup, django_db_blocker): """Calculate what PostgreSQL would produce. Despite efforts to sync the PROJ.4 definitions, minor differences between platforms remain. So the values are calculated beforehand, so the expected data is included in the tests. """ with django_db_blocker.unblock(): with connection.cursor() as cursor: point1 = "ST_GeomFromEWKB(%(point1)s)" point2 = "ST_GeomFromEWKB(%(point2)s)" pr = conf.GISSERVER_DB_PRECISION cursor.execute( "SELECT" f" ST_Transform({point1}, 4326) as point1_wgs84," f" ST_AsEWKT(ST_Transform({point1}, 4326), {pr}) as point1_ewkt," f" ST_AsGeoJson(ST_Transform({point1}, 4326), {pr}) as point1_geojson," f" ST_AsGML(3, ST_Transform({point1}, 4326), {pr}, 1) as point1_xml_wgs84," f" ST_AsGML(3, ST_Transform(ST_Transform({point1}, 4326), 28992), {pr}, 1) as point1_xml_rd," # noqa: E501 f" ST_Transform({point2}, 4326) as point2_wgs84," f" ST_AsEWKT(ST_Transform({point2}, 4326), {pr}) as point2_ewkt," f" ST_AsGeoJson(ST_Transform({point2}, 4326), {pr}) as point2_geojson," f" ST_AsGML(3, ST_Transform({point2}, 4326), {pr}, 1) as point2_xml_wgs84", { "point1": Binary(CoordinateInputs.point1_rd.ewkb), "point2": Binary(CoordinateInputs.point2_rd.ewkb), }, ) columns = (x.name for x in cursor.description) result = cursor.fetchone() result = dict(zip(columns, result)) return CoordinateInputs( point1_wgs84=_get_point(result["point1_wgs84"]), point1_ewkt=result["point1_ewkt"], point1_geojson=_get_geojson(result["point1_geojson"]), point1_xml_wgs84=_get_gml(result["point1_xml_wgs84"]), point1_xml_rd=_get_gml(result["point1_xml_rd"]), point2_wgs84=_get_point(result["point2_wgs84"]), point2_ewkt=result["point2_ewkt"], point2_geojson=_get_geojson(result["point2_geojson"]), point2_xml_wgs84=_get_gml(result["point2_xml_wgs84"]), )
class PostGISAdapter(object): def __init__(self, geom, geography=False): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = bytes(geom.ewkb) self.srid = geom.srid self.geography = geography self._adapter = Binary(self.ewkb) def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: raise Exception( 'Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): if not isinstance(other, PostGISAdapter): return False return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __hash__(self): return hash((self.ewkb, self.srid)) def __str__(self): return self.getquoted() def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ self._adapter.prepare(conn) def getquoted(self): "Returns a properly quoted string for use in PostgreSQL/PostGIS." # psycopg will figure out whether to use E'\\000' or '\000' return str('%s(%s)' % ('ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB', self._adapter.getquoted().decode()))
def populate_db(cfg): conn = psycopg2.connect(host=cfg['db_host'], database="mail_encrypter", user="******", password=cfg['db_pass']) c = conn.cursor() for i in range(100000): c.execute( "INSERT INTO keys(pass_hash, public, private) VALUES (%s, %s, %s)", (sha256(str(i + 1)).hexdigest(), Binary( os.urandom(300)), Binary(os.urandom(1300)))) c.execute("INSERT INTO emails(email, key) VALUES (%s, %s)", ( str(i + 1) + "@gmail.com", i + 1, )) conn.commit() c.close() conn.close()
def update_graph(function_name, interval, dt, item_id): with get_db_cursor(commit=True) as cur: points = get_points_data(function_name, interval, dt) graph = get_image_data(points) if isinstance(graph, bytes): cur.execute( "UPDATE graphs SET graph = %s, ts = now() WHERE id = %s", (Binary(graph), item_id)) else: cur.execute( "UPDATE graphs SET error = %s, ts = now() WHERE id = %s", (graph, item_id))
def get_fileid(cursor, fpath): with open(fpath) as fp: bits = fp.read() sha1 = hashlib.new('sha1', bits).hexdigest() cursor.execute('select fileid from files where sha1=%s', [sha1]) res = cursor.fetchall() if res: return res[0] else: cursor.execute('insert into files (file) values (%s) returning fileid', [Binary(bits)]) res = cursor.fetchall() return res[0]
def _getFileID(self,fileob): """Return the fileid for a file, stored in the DB""" # let's make sure we've got a utf-8 string fdata = _utf8(fileob.data) m = md5.new(fdata).hexdigest() sha = sha1(fdata).hexdigest() res = self.sqlGetFileByMd5(md5=m) for r in res: if sha1(r.file).hexdigest() == sha: return r.fileid # Fell through, must be new bytes res = self.sqlInsertFile(file = Binary(fdata), media_type=fileob.content_type) return res[0].fileid
class PostGISAdapter(object): def __init__(self, geom): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = str(geom.ewkb) self.srid = geom.srid self._adapter = Binary(self.ewkb) def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: m = 'Error implementing psycopg2 protocol. Is psycopg2 installed?' raise Exception(m) def __eq__(self, other): return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __str__(self): return self.getquoted() def prepare(self, conn): # Pass the connection to the adapter: this allows escaping the binary # in the style required by the server's # standard_conforming_string setting self._adapter.prepare(conn) def getquoted(self): "Returns a properly quoted string for use in PostgreSQL/PostGIS." # psycopg will figure out whether to use E'\\000' or '\000' return 'ST_GeomFromEWKB(%s)' % self._adapter.getquoted() def prepare_database_save(self, unused): return self
def npint2pgbyte(a): """ Converts a 128float array to uchar (escaped bytes) :param nparray: 128float :return: binary string """ l = a.tolist() b = array.array('B', l).tostring() binstring = str(Binary(b))[1:-8] binstring = binstring.replace("''", "\'") return binstring
def __init__(self, obj, geography=False): """ Initialize on the spatial object. """ self.is_geometry = isinstance(obj, (Geometry, PostGISAdapter)) # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry or raster. if self.is_geometry: self.ewkb = bytes(obj.ewkb) self._adapter = Binary(self.ewkb) else: self.ewkb = to_pgraster(obj) self.srid = obj.srid self.geography = geography
def get_fileid(cursor, fpath): with open(fpath) as fp: bits = fp.read() sha1 = hashlib.new('sha1', bits).hexdigest() cursor.execute('select fileid, media_type from files where sha1=%s', [sha1]) res = cursor.fetchall() if res: return res[0] else: mimeType = subprocess.check_output( ['file', '--mime-type', '-Lb', fpath]).strip() cursor.execute( 'insert into files (file, media_type) values (%s, %s) returning fileid, media_type', (Binary(bits), mimeType)) res = cursor.fetchall() return res[0]
def npfloat2pgbyte(a): """ Converts a 128float array to uchar (escaped bytes) :param nparray: 128float :return: binary string """ a = np.floor(a * 512.0 + 0.5) l = a.astype(dtype=int).tolist() b = array.array('B', l).tostring() binstring = str(Binary(b))[1:-8] binstring = binstring.replace("''", "\'") return binstring
def __init__(self, *args, **kwargs): super(PatchedAdapter, self).__init__(*args, **kwargs) self._adapter = Binary(self.ewkb)