def acc2sql(): ''' This is just a test version to read the stateframe once a second from the ACC and send it to the SQL server. A more complete version of this is implemented in schedule.py for "production" work. ''' # Get stateframe structure and version accini = stateframe.rd_ACCfile() sf_version = accini['version'] brange, outlist = sfdef(accini['sf']) with pyodbc.connect("DRIVER={FreeTDS};SERVER=192.168.24.106,1433; \ DATABASE=eOVSA06;UID=aaa;PWD=I@bsbn2w;") as cnxn: cursor = cnxn.cursor() lineno = 0 while 1: lineno += 1 data, msg = stateframe.get_stateframe(accini) version = stateframe.extract(data,['d',8]) if version == sf_version: bufout = transmogrify(data, brange) try: cursor.execute('insert into fBin (Bin) values (?)',pyodbc.Binary(bufout)) print 'Record '+str(lineno)+' successfully written\r', cnxn.commit() except: # An exception could be an error, or just that the entry was already inserted pass else: print 'Error: Incompatible version in stateframe.' break time.sleep(1)
def sendSql(cmd, safe=1, out=1, file=None): startT = time.time() if file != None: f = None if type(file) == bytes or type(file) == bytearray: f = pyodbc.Binary(file) cur.execute(cmd, f) myapp.load() return x = [] if not cmd.startswith("SELECT") and safe: reply = QtGui.QMessageBox.warning( None, 'Izvaja se sprememba ali brisanje!', "Ukaz: {}.\nAli želite nadeljevati?".format(cmd), QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: cur.execute(cmd) myapp.load() else: QtGui.QMessageBox.information(None, "Prekinjeno", "Ukaz je prekinjen") return else: cur.execute(cmd) x = cur.fetchall() if out: print(cmd) print("Sql query time: {}".format(time.time() - startT)) return x
def save_team_info_to_db(teams_list, con, country): for team, value_dict in teams_list.items(): BigImg = '' YearFound = 0 Color = '' Stadium = '' StadiumNumber = '' Coach = '' for key, value in value_dict.items(): if key == 'BigImg': output = io.BytesIO() value.save(output, format='PNG') BigImg = pyodbc.Binary(output.getvalue()) continue if key == 'YearFound': YearFound = value continue if key == 'Color': Color = value continue if key == 'Stadium': Stadium = value continue if key == 'Coach': Coach = value continue if key == 'StadiumNumber': StadiumNumber = value continue cursor = con.cursor() cursor.execute("exec pSaveTeamImg ?,?,?,?,?,?,?,?,?", (str(team), country, BigImg, 2, YearFound, Color, Stadium, StadiumNumber, Coach)) con.commit()
def insert_question(questions, cursor, game_id, cnxn): for question in questions: text = question["questionText"].strip() type_id = int(question["typeID"]) image = question["image"] default = question["needDefaultAnswer"] type_id -= 1 if image != "": with open(image, 'rb') as f: bindata = f.read() ablob = pyodbc.Binary(bindata) else: ablob = None query = "INSERT INTO Question (gameID, questionText, typeID, questionImage, defaultAnswer, " \ "closed, numOfAnswered) VALUES ( ?, ?, ?, ?, ?, ?, ?)" cursor.execute(query, game_id, text, type_id, ablob, default, 0, 0) print(query) cnxn.commit() query = "SELECT max(questionID) FROM Question" cursor.execute(query) question_id = cursor.fetchone() print(question_id[0]) if type_id != 2: answers = question["answers"] insert_answer(answers, cursor, question_id[0]) if default == 1: insert_default_answer(cursor, question_id[0]) else: insert_open_answer(cursor, question_id[0])
def parse_blob(self, currRowDict, tablesDir): blobFileName, blobType = self.get_blob_file_name(currRowDict, self.get_blob_patterns()) blobPath = os.path.join(tablesDir, blobFileName) if not os.path.isfile(blobPath): sys.stderr.write("ERROR: Could not find any blob files named " + blobFileName + "!\n") return pyodbc.Binary(b"") return self.make_blob([ blobPath ], blobType)
def parse_row_value(self, value, currRowDict, tablesDir): if value == "None": return None elif value == "<blob data>": return self.parse_blob(currRowDict, tablesDir) elif value.startswith("0x"): # hex string, convert to binary return pyodbc.Binary(struct.pack('<Q', int(value, 16))) else: return value
def insert_song(artist_id, song_name, song_bytes): # values(\'{song_name}\', \'{artist_id}\', \'{song_bytes}\')" query = 'insert into Songs(Name, ArtistId, Song) values (?,?,?)' # print(query) try: cursor.execute(query, (song_name, artist_id, pyodbc.Binary(song_bytes))) conn.commit() except pyodbc.Error as e: print(e)
def make_blob(cls, blobPrefix, tablesDir): blobFiles = cls.find_blob_files(blobPrefix, tablesDir) if len(blobFiles) == 0: sys.stderr.write( "ERROR: Could not find any blob files for prefix " + blobPrefix + "!\n") blobs = [open(fn, "rb").read() for fn in blobFiles] blob_bytes = cls.package_blobs(blobs, blobPrefix) return pyodbc.Binary(blob_bytes)
def addEmployerLogo(self,employerId,Image): f = open(Image, 'rb') ablob = f.read() #ablob = pyodbc.Binary(ablob) SQLCommand = 'INSERT INTO dbo.EmployerLogo (EmployerID, LogoBLOB) VALUES (?, ?)'.format(employerId, ablob) print(SQLCommand) conn = database(server='DESKTOP-F16R844',database='CVDatabase',username='******',password='******') c = conn._openConnection() c.execute(SQLCommand,employerId,pyodbc.Binary(ablob)) c.close()
def get_song(song_id): query = f'select Name, Song from Songs where Id=?' results = cursor.execute(query, song_id).fetchall() filename = results[0][0] song = pyodbc.Binary(results[0][1]) return send_file(io.BytesIO(song), attachment_filename=filename, mimetype='audio/mpeg')
def SaveNations(con, nations): for nation_key, nation_value in nations.items(): response = req.get(nation_value['URL']) nations[nation_key]['Img'] = Image.open(BytesIO(response.content)) cursor = con.cursor() for key, value in nations.items(): output = io.BytesIO() value['Img'].save(output, format='PNG') cursor.execute( "exec pSaveNation ?,?,?", (str(key), value['URL'], pyodbc.Binary(output.getvalue()))) con.commit()
def parse_team_update_img(images, con, country): image_dict = {} for image in images: response = req.get('http:' + image['src']) image_dict[image['alt']] = Image.open(BytesIO(response.content)) cursor = con.cursor() for key, value in image_dict.items(): output = io.BytesIO() value.save(output, format='PNG') cursor.execute("exec pSaveTeamImg ?,?,?", (str(key), country, pyodbc.Binary(output.getvalue()))) con.commit()
def uploadPic(table_name, field_name, key_name, key, raw): """ 统一的图片上传函数,只支持插入到表中的image字段,其他字段类型会报错 """ try: con = getConn() cur = con.cursor() sql = "update " + table_name + " set " + field_name + " = ? where " \ + key_name + " = ?" cur.execute(sql, (pyodbc.Binary(raw), key)) except Exception, e: raise e return False
def update_allQRtable(self, QRcode, table, file_name): ''' this method reads all QR from server and store them into datebase take QRcode, table in string, file name read in string create new instance if not exist otherwise, update the QRcode ''' QRcode = pyodbc.Binary(QRcode) try: cursor = self.conn.cursor() if table == 'Course': className = file_name.split()[0] classNum = file_name.split()[1] if cursor.execute( 'select * from Course where ClassName = ? and ClassNum = ?', className, classNum).fetchall() == []: print('inserting new class') self.insert_newCourse([className, classNum], QRcode) print('inserted') else: print('update Course QRcode') cursor.execute( """ update Course set QRcode = ? where ClassName = ? and ClassNum = ? """, QRcode, className, classNum) cursor.commit() print('updated') elif table == 'Club': clubName = file_name if cursor.execute('select * from Club where ClubName = ?;', clubName).fetchall() == []: self.insert_newClub(clubName, QRcode) else: print('update Club QRcode') cursor.execute( """ update Club set QRcode = ? where ClubName = ? """, QRcode, clubName) cursor.commit() print('updated') cursor.close() except pyodbc.Error as ex: logging.error('update the QRcode on ' + table + ' error msg: ' + str(ex)) cursor.close()
def insert_answer(answers, cursor, question_id): for answer in answers: text = answer["answerText"].strip() image = answer["answerImage"] if image != "": with open(image, 'rb') as f: bindata = f.read() ablob = pyodbc.Binary(bindata) else: ablob = None query = "INSERT INTO Answer (questionID, answerText, typeID, answerPic, isDefault, chosen, shown) " \ "VALUES (?, ?, ?, ?, ?, ?, ?);" cursor.execute(query, question_id, text, 3, ablob, 0, 0, 0) print(query)
def _save(self, name, content): """Save 'content' as file named 'name'. @note '\' in path will be converted to '/'. """ name = name.replace('\\', '/') binary = pyodbc.Binary(content.read()) size = len(binary) #todo: check result and do something (exception?) if failed. if self.exists(name): self.cursor.execute("UPDATE %s SET %s = ?, %s = ? WHERE %s = '%s'"%(self.db_table,self.blob_column,self.size_column,self.fname_column,name), (binary, size) ) else: self.cursor.execute("INSERT INTO %s VALUES(?, ?, ?)"%(self.db_table), (name, binary, size) ) self.connection.commit() return name
def export(self): self._destination = ".".join( (self.prefix, self.suffix, str(best_protocol))) fio = BytesIO() self.info("Preparing the snapshot...") with self._open_fobj(fio) as fout: pickle.dump(self.workflow, fout, protocol=best_protocol) self.check_snapshot_size(len(fio.getvalue())) binary = pyodbc.Binary(fio.getvalue()) self.info("Executing SQL insert into \"%s\"...", self.table) now = datetime.now() self._cursor_.execute( "insert into %s(timestamp, id, log_id, workflow, name, codec, data" ") values (?, ?, ?, ?, ?, ?, ?);" % self.table, now, self.launcher.id, self.launcher.log_id, self.launcher.workflow.name, self.destination, self.compression, binary) self._db_.commit() self.info("Successfully wrote %d bytes as %s @ %s", len(binary), self.destination, now)
def getFileName(self): self.filename = QFileDialog.getOpenFileName(self, "Выбрать файл") image = open(str(self.filename[0]), 'rb') #open binary file in read mode image_read = image.read() self.image_64_encode = base64.encodestring(image_read) self.image_64_encode = pyodbc.Binary(self.image_64_encode) if (self.item == None): insert = "insert into images(id,Photo) values(?,?)" self.cursor.execute(insert, (self.id, self.image_64_encode)) self.cursor.commit() #self.log("insert into images(id,Photo) values("+str(self.id)+","+str(self.image_64_encode)+")") else: insert = "update images set photo = (?) where id = (?)" self.cursor.execute(insert, (self.image_64_encode, self.id)) self.cursor.commit() # self.log("update images set photo = ("+str(self.image_64_encode)+") where id = ("+str(self.id)+")") #self.cursor = self.data.cursor() #self.cursor.commit() #print(self.item[0]) self.init_ui()
def params(self): with open(self._filename, "rb") as f: package_bits = f.read() pkgdatastr = pyodbc.Binary(package_bits) return pkgdatastr
def put_entity(self, table_name, row_key, column_names, cell_values): self.logger.debug("beginning put_entity (%s, %s)" % (table_name, row_key)) self.logger.debug(column_names) self.logger.debug(cell_values) elist = [ERROR_TT] client = None try: client = self.get_connection() cursor = client.cursor() # Some sanity checking if len(column_names) != len(cell_values): elist[0] += "Error in put call |column_names| != |cell_values|" self.close_connection(client) return elist # Check if the table exist if not self.__table_exist(table_name): self.create_table(table_name, column_names) # Add all the column values to the list values = [] for ii in range(0, len(cell_values)): values.append(pyodbc.Binary(cell_values[ii])) # Query the database for the existance of the row command = "SELECT " + ROW_KEY + " FROM " + table_name + " WHERE " + ROW_KEY + " = ?" cursor.execute(command, tuple([row_key])) row = cursor.fetchone() # Check if the row does not exist. If it does not, do an insert if row == None: # do an insert command = "INSERT INTO " + table_name + " (" for ii in range(0, len(cell_values)): command += column_names[ii] command += ", " command += ROW_KEY + ") VALUES(" for ii in range(0, len(cell_values)): command += "?, " command += "?)" if DEBUG_PRINT_SQL: self.logger.debug(command) cursor.execute(command, tuple(values + [row_key])) else: # do an update command = "UPDATE " + table_name + " SET " for ii in range(0, len(cell_values) - 1): command += column_names[ii] + " = ?, " command += column_names[len(cell_values) - 1] + " = ? WHERE " + ROW_KEY + " = ?" if DEBUG_PRINT_SQL: self.logger.debug(command) cursor.execute(command, tuple(values + [row_key])) except pyodbc.Error, e: self.logger.debug("ERROR FOR PUT") elist = [ ERROR_TT + "put_entity: " + str(e.args[0]) + "--" + e.args[1] ] elist.append("0") self.logger.debug(elist[0]) self.close_connection(client) return elist
def toRaw_mssql(x): if x is None: return x else: return pyodbc.Binary(x)
'sec-fetch-mode': 'navigate', 'sec-fetch-site': 'same-origin', 'sec-fetch-user': '******', 'upgrade-insecure-requests': '1', 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36', 'x-client-data': 'CJe2yQEIprbJAQjEtskBCKmdygEI4qjKAQjLrsoBCMqvygEIzrDKARjEscoB' } cnxn = pyodbc.connect("Driver={ODBC Driver 17 for SQL Server};" "Server=oceanringtech.database.windows.net;" "Database=BillionActs;" "UID=db_billionacts;" "PWD=piecej@mf0und;") cursor = cnxn.cursor() for entity in query_iter: filename = '{}.png'.format(entity['Name']) image_key = entity[COLUMN] url = "https://console.cloud.google.com/m/blobstore/download?pid=billionacts&blob=" + image_key res = requests.get(url, headers=headers) sql = '''INSERT INTO [dbo].[FirebaseImages]([ImageName],[Image],[Kind]) values (?, ?, ?)''' cursor.execute(sql, (filename, pyodbc.Binary(res.content), KIND)) cursor.commit() cursor.close() cnxn.close()
def make_blob(cls, blobFiles, blobType): blobs = [open(fn, "rb").read() for fn in blobFiles] blob_bytes = cls.package_blobs(blobs, blobType) return pyodbc.Binary(blob_bytes)
def log2sql(log_file=None): ''' Transfers the named stateframe log file to the SQL database. This transfer can take a long time, so this should allow interruption of the transfer, and then a subsequent call on the same log file should find the place where it left off to resume the transfer. ''' from util import Time if log_file is None: print 'Error: a stateframe log filename must be provided.' return False if not os.path.isfile(log_file): print 'Error: Named stateframe log file',log_file,'not found.' return False # Log file basename is expected to be in format 'sf_yyyymmdd_vxx.0.log', # where xx is the version number basename = os.path.basename(log_file) logname = basename.split('_') if logname[0] == 'sf': sfdate = logname[1] try: sftime = datetime.datetime.strptime(logname[1],'%Y%m%d') t = Time(str(sftime)) sftimestamp = int(t.lv + 0.5) # Start timestamp, as nearest integer sfver = int(logname[2].split('.')[0][1:]) except: print 'Error: File ',basename,'does not have expected basename format sf_yyyymmdd_vxx.0.log' return False else: return False # At this point, the log file exists and the name is of the right format # Connect to the database and see if there are any data already for this date, and if so # determine the time range. with pyodbc.connect("DRIVER={FreeTDS};SERVER=192.168.24.106,1433; \ DATABASE=eOVSA06;UID=aaa;PWD=I@bsbn2w;") as cnxn: cursor = cnxn.cursor() tblname = 'fV'+str(sfver)+'_vD1' cursor.execute("select top 1 Timestamp from "+tblname+" where Timestamp between "+str(sftimestamp)+" and "+str(sftimestamp+86400-2)+" order by Timestamp desc") rows = cursor.fetchall() if len(rows) == 1: # There are data for this date, and rows[1].Timestamp should be the last time entry, # so start at last time entry + 1 s try: sftimestamp2 = int(rows[0].Timestamp + 1) except: print 'Error: Unexpected data returned from database. Returned value:',rows[0] return False elif len(rows) > 1: print 'Error: Unexpected data returned from database.' return False else: # No data returned from call, which means we should start with current value of sftimestamp pass # We now know where to start, so open log file and read to start of data # First need to find out record length f = open(log_file,'rb') buf = f.read(32) recsize = struct.unpack_from('i', buf, 16)[0] version = struct.unpack_from('d', buf, 8)[0] f.close() if int(version) != sfver: print 'Error: Version in file name is',sfver,'but version in file itself is',int(version) return False # We need the "brange" variable, which is used by transmogrify() to reformat the binary data. # Therefore, the defining stateframe XML file is needed. # The correct XML file for this version must exist in the same directory as the log file xml_file = os.path.dirname(log_file)+'/'+'stateframe_v'+str(sfver)+'.00.xml' if not os.path.isfile(xml_file): print 'Error: Stateframe xml file',xml_file,'not found.' return False sf, version = rxml.xml_ptrs(xml_file) brange, outlist = sfdef(sf) lineno = 0 with open(log_file,'rb') as f: bufin = f.read(recsize) while len(bufin) == recsize: lineno += 1 if struct.unpack_from('d', bufin, 0)[0] >= sftimestamp: # This is new data, so write to database bufout = transmogrify(bufin, brange) try: cursor.execute('insert into fBin (Bin) values (?)',pyodbc.Binary(bufout)) print 'Record '+str(lineno)+' successfully written\r', cnxn.commit() except: # An exception could be an error, or just that the entry was already inserted pass bufin = f.read(recsize) print '\n' return True
cursor.execute( 'INSERT INTO Presencas(QtdPresenca, DtAula, IdAluno, IdTurma, IsDeleted, createdAt, updateDAt) VALUES (?, ?, ?, ?, ?, ?, ?)', (2, datetime.datetime.now(), id, turma, 0, datetime.datetime.now(), datetime.datetime.now())) cursor.commit() print('Presença atribuída com sucesso!!') #CONVERTE A IMAGEM PARA BINARIO f = open(diretorio, 'rb') hexdata = f.read() f.close() #INSERE A IMAGEM RECONHECIDA AO BANDO DE DADOS cursor.execute( 'INSERT INTO ImagemFaces (type, name, data, IsDeleted, IdAluno, createdAt, updatedAt) values (?,?,?,?,?,?,?)', ('image/jpeg', file3, pyodbc.Binary(hexdata), 0, id, datetime.datetime.now(), datetime.datetime.now())) cursor.commit() #MOVE A IMAGEM CAPTURADA PARA A PASTA DE TREINAMENTO shutil.copy2(diretorio, newDiretorio) #RENOMEIA A IMAGEM CONCATENANDO O ID DO ALUNO RECONHECIDO os.rename(newDiretorio + file, newDiretorio + file3) #REMOVE A IMAGEM RECONHECIDA DO DIRETORIO os.remove(diretorio) break if s != 'sim': print('Por favor, retire uma nova foto') # REMOVE A IMAGEM NÃO RECONHECIDA DO DIRETORIO
def nbinary(value, field): return pyodbc.Binary( _limit_check(_type_check(value, (bytes, ), field), "nbinary"))
def handleMysqlStatus(upLogRow): dataId = upLogRow[0] deviceId = upLogRow[1] port = choose(upLogRow[2] == 4000, 4001, 4000) rows = findStatus2(dataId) global lastSuccessRow if len(rows) > 0: row = rows[-1] blobData = row['data_blob'] length = len(blobData) blobData = pyodbc.Binary(blobData) #odbcCursor.execute("insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, convert(VARBINARY(max), ?, 2))", deviceId, port, length, blobData) odbcCursor.execute( "insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, ?)", deviceId, port, length, blobData) odbcCursor.commit() blobData1 = row['data_blob1'] if blobData1: length1 = len(blobData1) blobData1 = pyodbc.Binary(blobData1) odbcCursor.execute( "insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, ?)", deviceId, port, length1, blobData1) odbcCursor.commit() blobData2 = row['data_blob2'] if blobData2: length2 = len(blobData2) blobData2 = pyodbc.Binary(blobData2) odbcCursor.execute( "insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, ?)", deviceId, port, length2, blobData2) odbcCursor.commit() sqlStr = "update qrcode_table set status = 3 where data_id = {}".format( dataId) mysqlConn.cursor().execute(sqlStr) logger.info("running handleMysqlStatus: {}".format(sqlStr)) mysqlConn.commit() lastSuccessRow = row else: sqlStr = "SELECT * FROM qrcode_table WHERE status=3 ORDER BY data_id DESC LIMIT 0,1" cursor = mysqlConn.cursor() if not lastSuccessRow: cursor.execute(sqlStr) result = cursor.fetchall() if len(result) == 0: return logger.info("running handleMysqlStatus: {}".format(sqlStr)) lastSuccessRow = result[0] blobData = lastSuccessRow['data_blob'] length = len(blobData) blobData = pyodbc.Binary(blobData) logger.info("blobData length: mysql = {}, mssql = {}".format( length, len(blobData))) odbcCursor.execute( "insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, ?)", deviceId, port, length, blobData) odbcCursor.commit() doWriteDownLogForApiError(upLogRow, "QR Error") blobData1 = lastSuccessRow['data_blob1'] if blobData1: length1 = len(blobData1) blobData1 = pyodbc.Binary(blobData1) odbcCursor.execute( "insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, ?)", deviceId, port, length1, blobData1) odbcCursor.commit() blobData2 = lastSuccessRow['data_blob2'] if blobData2: length2 = len(blobData2) blobData2 = pyodbc.Binary(blobData2) odbcCursor.execute( "insert into MessageDownLog(DeviceId, Port, Length, Data) values(?, ?, ?, ?)", deviceId, port, length2, blobData2) odbcCursor.commit() sqlStr = "update qrcode_table set status = 4 where data_id = {}".format( dataId) mysqlConn.cursor().execute(sqlStr) logger.info("running handleMysqlStatus: {}".format(sqlStr)) mysqlConn.commit() logger.info("finish handleMysqlStatus!")
def image(value, field): return pyodbc.Binary( _limit_check(_type_check(value, (bytes, ), field), "image"))
def draw_and_show_pic(field_value_id): highlighted_pic_info_sqlstr = 'SELECT * FROM [xtr].[Doc_Field_Pic_Info] WHERE [Doc_Field_Value_ID] = ' + str( field_value_id) + ';' # highlighed_pic_info_df = execute_pure_query_to_pddf(highlighted_pic_info_sqlstr); with connection.cursor() as cursor: cursor.execute(highlighted_pic_info_sqlstr) highlighed_pic_info_df = cursor.fetchall() highlighed_pic_info_df = list(highlighed_pic_info_df) if len(highlighed_pic_info_df) > 0: highlighed_pic_info_df[0] = list(highlighed_pic_info_df[0]) doc_page_pic_id = highlighed_pic_info_df[0][1] doc_field_value_id = highlighed_pic_info_df[0][2] rotation_degree = highlighed_pic_info_df[0][3] minpoint_X = highlighed_pic_info_df[0][4] minpoint_Y = highlighed_pic_info_df[0][5] maxpoint_X = highlighed_pic_info_df[0][6] maxpoint_Y = highlighed_pic_info_df[0][7] if doc_page_pic_id == None: doc_page_pic_id = 0 page_pic_sqlstr = 'SELECT * FROM [xtr].[Doc_Page_Pic] WHERE [Doc_Page_Pic_ID] = ' + str( doc_page_pic_id) + ';' with connection.cursor() as cursor: cursor.execute(page_pic_sqlstr) page_pic_df = cursor.fetchall() page_pic_df = list(page_pic_df) if len(page_pic_df) > 0: page_pic_df[0] = list(page_pic_df[0]) ablob = page_pic_df[0][2] dir_path = os.path.dirname(os.path.realpath(__file__)) # print dir_path; temp_jpg_file = dir_path + '\static\\TempPics\\' + str( field_value_id) + '_temp.jpg' print temp_jpg_file with open(temp_jpg_file, 'wb') as output_file: output_file.write(ablob) im = Image.open(temp_jpg_file) print im.mode im = im.convert('RGB') print im.mode x, y = im.size print im.size draw = ImageDraw.Draw(im) draw.line(((minpoint_X, minpoint_Y), (minpoint_X, maxpoint_Y)), width=4, fill='red') draw.line(((minpoint_X, maxpoint_Y), (maxpoint_X, maxpoint_Y)), width=4, fill='red') draw.line(((maxpoint_X, maxpoint_Y), (maxpoint_X, minpoint_Y)), width=4, fill='red') draw.line(((maxpoint_X, minpoint_Y), (minpoint_X, minpoint_Y)), width=4, fill='red') im = im.rotate(rotation_degree, expand=1) im.save(temp_jpg_file) # os.startfile(temp_jpg_file); # os.remove(temp_jpg_file); output_file.close() bindata = open(temp_jpg_file, 'rb').read() binparams = pyodbc.Binary(bindata) os.remove(temp_jpg_file) binpic = base64.b64encode(bindata) return binpic else: print 'Nothing returned for this doc page id.' special_pic_sqlstr = 'SELECT * FROM [xtr].[Doc_Special_Pic] WHERE [Field_Value_ID] = ' + str( doc_field_value_id) + ' ;' special_pic_df = execute_pure_query_to_pddf(special_pic_sqlstr) if len(special_pic_sqlstr) > 0: ablob = special_pic_df.iloc[0]['Doc_Special_Pic_Bin'] temp_jpg_file = 'Pics\\' + str(field_value_id) + '.jpg' with open(temp_jpg_file, 'wb') as output_file: output_file.write(ablob) os.startfile(temp_jpg_file) else: print special_pic_sqlstr else: print 'Nothing returned for this field value id.'