def DB_update_feed(db, feed, suck, last_ts): #print "db> Updating feed %d and feed info" % feed['id'] c = db.cursor() etag = title = description = language = copyright = pub_date = ttl = None if 'etag' in suck: etag = suck.etag if 'title' in suck.feed: title = try_slice(suck.feed.title, 1000) if 'description' in suck.feed: description = try_slice(suck.feed.description, 10000) if 'language' in suck.feed: language = (suck.feed.language or '').split('-')[0] if language not in iso_map.iso3to2: language = iso_map.iso2to3.get(language, None) if 'copyright' in suck.feed: copyright = try_slice(suck.feed.copyright, 100000) if 'updated_parsed' in suck.feed: pub_date = suck.feed.updated_parsed if 'ttl' in suck.feed: ttl = suck.feed.ttl c.execute( "UPDATE feed SET last_etag=%s, failures=0, pruning_ts_last=%s WHERE id=%s", (psycopg2.Binary(etag) if etag else None, last_ts, feed['id'])) # !bn: tegale res ni treba ob vsakmu feed-updateju flushat v bazo .. zamenji s stored proceduro? -- sej updatea se samo na 7 dni; selecta se pa vedno.. c.execute( "UPDATE feed_ps SET title=%s, description=%s, language=%s, copyright=%s, pub_date=%s, published_ttl=%s, updated=now() WHERE feedid = %s AND updated < (now() - (7*86400)::reltime)", (title, description, language, copyright, mk_timestamp(pub_date), ttl, feed['id'])) db.commit()
def write(self, buf): """Write to the large object, reading from the given buffer. Try to write as much data as we have available. If less is stored, stop and don't do further SQL calls. The number of sent bytes is returned. buf (bytes or bytearray): buffer from which to read data. return (int): the number of bytes written. raise (io.UnsopportedOperation): when the file is closed or not open for writes. """ if self._fd is None: raise io.UnsupportedOperation("Large object is closed.") if not self._writable: raise io.UnsupportedOperation("Large object hasn't been " "opened in 'write' mode.") len_ = self._execute("SELECT lowrite(%(fd)s, %(buf)s);", { 'fd': self._fd, 'buf': psycopg2.Binary(buf) }, "Couldn't write to large object.") return len_
def write_blob(part_id, path_to_file, file_extension): """Insert a BLOB into table""" con = None try: # Leer datos de una imagen drawing = open(path_to_file, 'rb').read() # Leer la configuracion de la Base de Datos params = config() # Conectar a la Base de datos de PostgreSQL conn = psycopg2.connect(**params) # Crear un nuevo cursor cur = con.cursor() # ejecutar la sentencia Insert cur.execute( "INSERT INTO part_drawings,(part_id, file_extension, drawing_data)" "VALUES (%s,%s,%s)", (part_id, file_extension, psycopg2.Binary(drawing))) # Guarda los cambioes en la base de datos con.commit() # Cerramos la comunicacion con la base de datos cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def append_image(doc_id, size): # append new page image to the document content_type = request.headers['Content-Type'] if content_type != "image/tiff" and content_type != 'image/jpeg' and content_type != 'application/pdf': logging.error('Content-Type is not a valid image format') return Response(status=415) cursor = connect(cursor_factory=psycopg2.extras.DictCursor) try: cursor.execute('select max(page)+1 from documents where document_id=%(doc_id)s', {"doc_id": doc_id}) next_page_no = cursor.fetchone() if next_page_no[0] is None: return Response(status=404) cursor.execute("insert into documents (document_id, content_type, page, size, image) " "values ( %(document_id)s, %(content_type)s, %(page)s, %(size)s, %(image)s )", { "document_id": doc_id, "content_type": content_type, "page": next_page_no[0], "size": size, "image": psycopg2.Binary(request.data) }) rowcount = cursor.rowcount complete(cursor) except: rollback(cursor) raise if rowcount == 0: return Response(status=404) return Response(status=201)
def save(self, session): uid = session.uid or None with self.get_cursor() as cr: sql_data = { 'data': psycopg2.Binary( pickle.dumps(dict(session), pickle.HIGHEST_PROTOCOL)), 'id': session.sid, 'uid': uid, } if self.is_valid_key(session.sid): cr.execute( """ UPDATE sessionstore SET data = %(data)s, uid=%(uid)s WHERE id = %(id)s; """, sql_data) else: cr.execute( """ INSERT INTO sessionstore (id, data, uid) VALUES (%(id)s, %(data)s, %(uid)s); """, sql_data) # uid不为None的情况删除其他相同uid的session if uid is not None: cr.execute( """ DELETE FROM sessionstore WHERE uid=%(uid)s AND id != %(id)s """, sql_data)
def testBinaryRoundTrip(self): # test to make sure buffers returned by psycopg2 are # understood by execute: s = bytes(range(256)) buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s), )) buf2 = self.execute("SELECT %s::bytea AS foo", (buf, )) self.assertEqual(s, buf2.tobytes())
def upload(self, image_file): if image_file: path = os.path.join(self._in_dir, image_file) if os.path.isfile(path): base_name = os.path.basename(path) image_name, image_ext = os.path.splitext(base_name) fp = open(path, 'rb') image_data = fp.read() fp.close() hash_key = hashlib.md5(image_data).hexdigest() old_image_id = self._get_image_id(image_name, hash_key) if old_image_id: return old_image_id sqlcmd = """ INSERT INTO spec.spec_image(image_name, image_type, image_blob, hash_key) VALUES (%s, %s, %s, %s) RETURNING image_id; """ self._pg.execute2(sqlcmd, (image_name, image_ext, psycopg2.Binary(image_data), hash_key) ) image_id = self.fetch_id() self._pg.commit2() return image_id else: self._log.error('Dose not exist file: %s' % (image_file,)) return None
def write(self, batch): """ Batch save the data batch is a list of [(key, value), ...] """ cursor = connection.cursor() while batch: values = [ "(%s, %s)" % ( psycopg2.Binary(k), psycopg2.Binary(v) ) for k, v in batch[:self.BATCH_SIZE]] sql = "INSERT INTO %s(k,v) VALUES %s" % (self.table, ",".join(values)) batch = batch[self.BATCH_SIZE:] cursor.execute(sql) cursor.execute("COMMIT")
def to_st_autocorr_table(cluster_no, analysis_ts, tetrode_no, session_name, filename, n_drive_user, animal_id, session_ts, st_autocorr, theta_idx, burst_idx1, burst_idx2): ''' Write full entry to st_autocorr_tb: cluster_no SMALLINT NOT NULL, analysis_ts TIMESTAMP NOT NULL, tetrode_no SMALLINT NOT NULL, session_name VARCHAR NOT NULL, filename VARCHAR NOT NULL, n_drive_user VARCHAR NOT NULL, animal_id INTEGER NOT NULL, session_ts TIMESTAMP NOT NULL, st_autocorr BYTEA NOT NULL, theta_idx DOUBLE PRECISION NOT NULL, burst_idx1 DOUBLE PRECISION NOT NULL, burst_idx2 DOUBLE PRECISION NOT NULL ''' # Transcribe st_autocorr as binary blob object st_autocorr = cPickle.dumps(st_autocorr, -1) sql_command = """INSERT INTO st_autocorr_tb(cluster_no,analysis_ts,tetrode_no,session_name,filename,n_drive_user,animal_id,session_ts, st_autocorr,theta_idx,burst_idx1,burst_idx2) VALUES({},'{}',{},'{}','{}','{}','{}','{}',{},{},{},{}) RETURNING cluster_no;""".format( cluster_no, analysis_ts, tetrode_no, session_name, filename, n_drive_user, animal_id, session_ts, psycopg2.Binary(st_autocorr), theta_idx, burst_idx1, burst_idx2) id = execute_psql(sql_command, 'fetchone') return id
def insertImage(self, objectId, imageName, imageData, imageFileType): '''insert an image associated with an object ''' sql = "INSERT INTO images (objectId, imageName, imageData, imageFileType) VALUES(%s, %s, %s, %s)" image = psycopg2.Binary(imageData) (res, self._dbcon) = self._try_execute(sql, data=(objectId, imageName, image, imageFileType), fetch_N='none') return (objectId, imageName)
def import_pdf(): conn = get_db_connection() data = open("/tmp/kprog20151209-Kurzfassung.pdf", mode='rb').read() cur = conn.cursor() cur.execute("INSERT INTO binaryfiles_tst(filecontent) VALUES(%s)", (psycopg2.Binary(data), )) conn.commit()
def testBinaryRoundTrip(self): # test to make sure buffers returned by psycopg2 are # understood by execute: s = ''.join([chr(x) for x in range(256)]) buf = self.execute("SELECT %s::bytea AS foo", (psycopg2.Binary(s),)) buf2 = self.execute("SELECT %s::bytea AS foo", (buf,)) self.failUnless(str(buf2) == s, "wrong binary quoting")
def update(self, conn): cur = conn.cursor() if self.img is None: uuid_ = None else: uuid_ = psycopg2.extras.UUID_adapter(uuid.uuid4()) try: cur.execute( "INSERT INTO table_upstream" "(isbn,lc,title,auths,publisher,edition,publish_date,abstract,img_uuid,tags)" "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", (self.isbn, self.lc, self.title, self.auths, self.publisher, self.edition, self.publish_date, self.abstract, uuid_, self.tags)) except psycopg2.InternalError as e: conn.rollback() print(e) print('Has this book') return if not (self.img is None): try: cur.execute( "INSERT INTO table_image" "(uuid,img,mime)" "VALUES (%s,%s,%s)", (uuid_, psycopg2.Binary(self.img), self.img_mime)) except psycopg2.InternalError as e: conn.rollback() print(e) print('has such image with uuid') conn.commit() cur.close() return self.isbn
def create_new_place(self, user_id: str, content: dict) -> None: """ Creating a new place in DB """ LOGGER.debug(msg=f'Creating new place. UserID: {user_id}.') try: if 'photo' in content.keys(): self._cursor.execute( f""" insert into places (user_id, lat, long, place_description, photo) values (%(id)s, %(lat)s, %(long)s, %(desc)s, %(photo)s) """, {'id': user_id, 'lat': content['lat'], 'long': content['long'], 'desc': content['description'], 'photo': psycopg2.Binary(content['photo'])} ) self._conn.commit() else: self._cursor.execute( """ insert into places (user_id, lat, long, place_description) values (%(id)s, %(lat)s, %(long)s, %(desc)s) """, {'id': user_id, 'lat': content['lat'], 'long': content['long'], 'desc': content['description']} ) self._conn.commit() LOGGER.debug(msg=f'Creating new place. UserID: {user_id} - Success.') except Exception as err: self._conn.rollback() LOGGER.error(msg=f'Problem creating new place. UserID: {user_id}. Error: {err}')
def create_object(self, job, activation, kindtags, metadata, str_data, bytes_data, json_data, sentence): if str_data is not None: if bytes_data is not None: raise Exception( 'create_object: str_data and bytes_data cannot have value at same time' ) bytes_data = str_data.encode('utf-8') if 'tags' not in kindtags: kindtags['tags'] = [] if activation is None: avid = 0 else: avid = activation.avid cur = self.conn.cursor() cur.execute( "INSERT INTO object (time, jid, avid, kindtags, metadata, bytes_data, json_data, sentence) VALUES (clock_timestamp(), %s, %s, %s, %s, %s, %s, %s) RETURNING oid;", [ job.jid, avid, json.dumps(kindtags), json.dumps(metadata), psycopg2.Binary(bytes_data), json.dumps(json_data), sentence ]) oid = singlevalue(cur) return self.get_object(oid)
def to_autocorr_gs_table(cluster_no, analysis_ts, tetrode_no, session_name, filename, n_drive_user, animal_id, session_ts, autocorr, autocorr_overlap, grid_valid, grid_score): ''' Write full entry to autocorr_gs_tb: cluster_no SMALLINT NOT NULL, analysis_ts TIMESTAMP NOT NULL, tetrode_no SMALLINT NOT NULL, session_name VARCHAR NOT NULL, filename VARCHAR NOT NULL, n_drive_user VARCHAR NOT NULL, animal_id INTEGER NOT NULL, session_ts TIMESTAMP NOT NULL, autocorr BYTEA NOT NULL, autocorr_overlap DOUBLE PRECISION NOT NULL, grid_valid BOOLEAN NOT NULL, grid_score DOUBLE PRECISION NOT NULL, ''' # Transcribe autocorr as binary blob objects autocorr = cPickle.dumps(autocorr, -1) sql_command = """INSERT INTO autocorr_gs_tb(cluster_no,analysis_ts,tetrode_no,session_name,filename,n_drive_user,animal_id,session_ts, autocorr, autocorr_overlap, grid_valid, grid_score) VALUES({},'{}',{},'{}','{}','{}','{}','{}',{},{},{},{}) RETURNING cluster_no;""".format( cluster_no, analysis_ts, tetrode_no, session_name, filename, n_drive_user, animal_id, session_ts, psycopg2.Binary(autocorr), autocorr_overlap, grid_valid, grid_score) id = execute_psql(sql_command, 'fetchone') return id
def convert_type(self, attr, value): try: if value is None or isinstance(value, Null): return value if isinstance(self._attr_type[attr], DtoInteger): return int(value) if isinstance(self._attr_type[attr], DtoNumeric): return float(value) if isinstance(self._attr_type[attr], DtoText): if not isinstance(value, str): raise ValueError() if isinstance(self._attr_type[attr], DtoBytea): return psycopg2.Binary(value) if isinstance(self._attr_type[attr], DtoBoolean): return bool(value) if isinstance(self._attr_type[attr], DtoTimestamp): if not isinstance(value, (datetime, date)): raise ValueError() if isinstance(self._attr_type[attr], DtoDate): if not isinstance(value, date): raise ValueError() if isinstance(self._attr_type[attr], DtoTimeDelta): if not isinstance(value, timedelta): raise ValueError() return value except Exception as e: dto_error = DtoError(e) dto_error.type_error = 'ValueError' dto_error.attribute = attr raise dto_error
def save(): # content = request.files['content'] # fname = content.read() f = open('ff.docx', 'rb') dat = f.read() binary = psycopg2.Binary(dat) connection = get_db() cursor = connection.cursor() cursor.execute("INSERT INTO save (file) VALUES (%s)", (binary, )) cursor.close() connection.commit() return 'success' @app.route("/v1/file", methods=[ "POST", "GET" ]) #------------------To fetch file from db------------------------# def file(): source_id = 1 connection = get_db() cursor = connection.cursor() cursor.execute("SELECT file FROM other_sources WHERE id = %s ", (source_id, )) file1 = cursor.fetchone() open("file.docx", 'wb').write(file1[0]) cursor.close() return 'done'
def push_log_to_db(self, config, database_conn): """Push the log file to the database""" log_file_read, log_file_path = get_latest_file(config.log_dir) log_file_read = psycopg2.Binary(log_file_read) self.logger.debug(f"Reading log file {log_file_path}") query = self.form_query_push_log_file(config, log_file_read) print(f'query: {query}') if database_conn is not None: try: db = database_conn cursor = db.cursor() cursor.execute(query) self.logger.debug( f"Pushed log file for scrape_batch_id {config.scrape_batch_id}!" ) db.commit() except Exception as e: print(f"ERROR: {e}") error_found = True error_message = e finally: cursor.close() print(f"cursor_closed") else: raise ConnectionError("Postgres database is not connected!")
def write_blob(part_id, path_to_file, file_extension): """Insert a BLOB into table""" con = None try: # Read image data drawing = open(path_to_file, 'rb').read() # Read the configuration of the Database params = config() # Connect to the PostgreSQL Database conn = psycopg2.connect(**params) # Create a new cursor cur = con.cursor() # execute the Insert statement cur.execute("INSERT INTO part_drawings,(part_id, file_extension, drawing_data)" "VALUES (%s,%s,%s)", (part_id, file_extension, psycopg2.Binary(drawing))) # Saves the changes in the database con.commit() # We close the communication with the database cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def MakePictoBinary(self, fpath): FilePath = fpath self.pg.connect1() try: if self.pg.CreateTable1_ByName('rdb_guideinfo_pic_blob_bytea') == -1: self.pg.close1() return -1 for files in DirWalker(FilePath): FileExt = os.path.splitext(files) FileName = os.path.split(FileExt[0])[1].encode("utf-8") jpgfile = open(files, 'rb') alldata = jpgfile.read() if self.pg.insert('rdb_guideinfo_pic_blob_bytea', ('image_id', 'data'), (FileName, psycopg2.Binary(alldata)) ) == -1: jpgfile.close() self.pg.close2() return -1 # pgcur_Create.execute(sqlcmd1,(FileName,psycopg2.Binary(alldata),)) jpgfile.close() self.pg.commit() except Exception, ex: print '%s:%s' % (Exception, ex) raise Exception, 'database operate wrong'
def get_location_groups_for_country(country, start_with): cursor = connection.cursor() search_string = start_with.lower().encode() data_dict = {} data_dict['like'] = psycopg2.Binary(search_string + '%') sql = """ (select 'LEVEL4' as LEVEL, name_4||','||name_3||','||name_2||','||name_1 as NAME from location_locationlevel l where name_4 ILIKE CAST(%(like)s as TEXT) limit 10) union (select 'LEVEL3' as LEVEL, name_3||','||name_2||','||name_1 as NAME from location_locationlevel l where name_3 ILIKE CAST(%(like)s as TEXT) limit 10) union (select 'LEVEL2' as LEVEL, name_2||','||name_1 as NAME from location_locationlevel l where name_2 ILIKE CAST(%(like)s as TEXT) limit 5) union (select 'LEVEL1' as LEVEL, name_1 as NAME from location_locationlevel l where name_1 ILIKE CAST(%(like)s as TEXT) limit 5) order by LEVEL """ cursor.execute(sql, data_dict) rows = cursor.fetchall() location_dict = defaultdict(list) for level, location in rows: location_dict[level].append(location) return location_dict
def write_blob(vehicleType, vehicleNo, path_to_file, decision): """ insert a BLOB into a table """ conn = None try: # read data from a picture drawing = open(path_to_file, 'rb').read() # read database configuration #params = config() # connect to the PostgresQL database conn = psycopg2.connect(host=hostname, user=username, password=password, dbname=database) # create a new cursor object cur = conn.cursor() # execute the INSERT statement cur.execute( "INSERT INTO vehicleRecords(vehicleType, vehicleNo, vehicleImage, decision) VALUES(%s,%s,%s,%s)", (vehicleType, vehicleNo, psycopg2.Binary(drawing), decision)) # commit the changes to the database conn.commit() # close the communication with the PostgresQL database cur.close() except (Exception, psycopg2.DatabaseError) as error: print("hi") print(error) finally: if conn is not None: conn.close()
def insert(self, taskmanager_id, generation_id, key, value, header, metadata): self._insert( ds.DataSource.dataproduct_table, { 'taskmanager_id': taskmanager_id, 'generation_id': generation_id, 'key': key, 'value': psycopg2.Binary(value) }) self._insert( ds.DataSource.header_table, { 'taskmanager_id': taskmanager_id, 'generation_id': generation_id, 'key': key, 'create_time': header.get('create_time'), 'scheduled_create_time': header.get('scheduled_create_time'), 'creator': header.get('creator'), 'schema_id': header.get('schema_id') }) self._insert( ds.DataSource.metadata_table, { 'taskmanager_id': taskmanager_id, 'generation_id': generation_id, 'key': key, 'state': metadata.get('state'), 'generation_time': metadata.get('generation_time'), 'missed_update_count': metadata.get('missed_update_count') })
def runCmdWithInput(cmd, script, input_file, id): if cmd in okCmds and script == "rmg": script = "scripts/rmg.py" input_file = "temp/input.py" o = subprocess.call([cmd, script, input_file]) if o == 0: # crete file binary f = open('temp/chemkin/chem.inp', 'rb') data = psycopg2.Binary(f.read()) # connect to db and update the row with # right id conn = get_db() cur = conn.cursor() cur.execute( """UPDATE job_result SET result=%s WHERE id=%s RETURNING id;""", (data, id)) return_value = cur.fetchone() conn.commit() cur.close() return id else: return "Need your effort to make it right!!" else: return (cmd + ' is not an ok command.')
def update(self, taskmanager_id, generation_id, key, value, header, metadata): q = """ UPDATE {} SET value=%s WHERE taskmanager_id=%s AND generation_id=%s AND key=%s """.format(ds.DataSource.dataproduct_table) self._update( q, (psycopg2.Binary(value), taskmanager_id, generation_id, key)) q = """ UPDATE {} SET create_time=%s, expiration_time=%s, scheduled_create_time=%s, creator=%s, schema_id=%s WHERE taskmanager_id=%s AND generation_id=%s AND key=%s """.format(ds.DataSource.header_table) self._update( q, (header.get('create_time'), header.get('expiration_time'), header.get('scheduled_create_time'), header.get('creator'), header.get('schema_id'), taskmanager_id, generation_id, key)) q = """ UPDATE {} SET state=%s, generation_time=%s, missed_update_count=%s WHERE taskmanager_id=%s AND generation_id=%s AND key=%s """.format(ds.DataSource.metadata_table) self._update(q, (metadata.get('state'), metadata.get('generation_time'), metadata.get('missed_update_count'), taskmanager_id, generation_id, key))
def add_descriptor(self, descriptor): """ Add a descriptor to this index. Adding the same descriptor multiple times should not add multiple copies of the descriptor in the index (based on UUID). Added descriptors overwrite indexed descriptors based on UUID. :param descriptor: Descriptor to index. :type descriptor: smqtk.representation.DescriptorElement """ if self.read_only: raise ReadOnlyError("Cannot clear a read-only index.") q = self.UPSERT_TMPL.format( table_name=self.table_name, uuid_col=self.uuid_col, element_col=self.element_col, ) v = { 'uuid_val': str(descriptor.uuid()), 'element_val': psycopg2.Binary( cPickle.dumps(descriptor, self.pickle_protocol) ) } def exec_hook(cur): cur.execute(q, v) list(self._single_execute(exec_hook))
def save(self, session): with self.get_cursor() as cr: sql_data = { "data": psycopg2.Binary( pickle.dumps(dict(session), pickle.HIGHEST_PROTOCOL)), "id": session.sid, } if self.is_valid_key(session.sid): cr.execute( """ UPDATE sessionstore SET data = %(data)s WHERE id = %(id)s; """, sql_data, ) else: cr.execute( """ INSERT INTO sessionstore (id, data) VALUES (%(id)s, %(data)s); """, sql_data, ) cr.commit()
def write_blob(part_id, path_to_file, file_extension): """ insert a BLOB into a table """ conn = None try: # read data from a picture drawing = open(path_to_file, 'rb').read() # read database configuration params = config() # connect to the PostgresQL database conn = psycopg2.connect(**params) # create a new cursor object cur = conn.cursor() # execute the INSERT statement cur.execute( "INSERT INTO part_drawings(part_id,file_extension,drawing_data) " + "VALUES(%s,%s,%s)", (part_id, file_extension, psycopg2.Binary(drawing))) # commit the changes to the database conn.commit() # close the communication with the PostgresQL database cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def writeImage(imgPath= None, email= None): """ Save image in img table of database """ conn = None try: # read data from a picture pic = open(imgPath, 'rb').read() format_pic = (imgPath.split("/")[-1].split("."))[-1] # read database configuration params = config() # connect to the PostgresQL database conn = psycopg2.connect(**params) # create a new cursor object cur = conn.cursor() # find id of user cur.execute("select id from users where email= %s", (email, )) _id = cur.fetchone()[0] queryCode = """insert into img(owner_id, data_pic, format_pic, upload_at) values(%s, %s, %s, now())""" cur.execute(queryCode, (_id, psycopg2.Binary(pic), format_pic, )) conn.commit() # close the communication with the PostgresQL database cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()