def loadCredentials(db): try: json_file = open('%s/credentials.json' % os.path.dirname(os.path.realpath(__file__)), encoding='utf-8') try: credentials = json.load(json_file) Log('credentials.json loaded succesfully', 'noprint') sleep(1.1) except AttributeError: Log('AttributeError on credentials.json, exiting') sleep(1.1) exit() except json.decoder.JSONDecodeError: Log('json.decoder.JSONDecodeError on credentials.json, exiting') sleep(1.1) exit() json_file.close() except FileNotFoundError: Log('credentials.json file not found, exiting') sleep(1.1) exit() for key in credentials[db]: if key == 'password': continue else: Log(key.ljust(10) + credentials[db][key].ljust(16), 'noprint') return credentials[db]
def __init__(self): from credentials import loadCredentials from driver import loadDriver credentials = loadCredentials() driver = loadDriver() if 'windows' in driver: Log('Platform: Windows', 'noprint') self.cnxn = pyodbc.connect( 'DRIVER={%s};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s' % (driver['windows'], credentials['server'], credentials['database'], credentials['user'], credentials['password'])) elif 'linux' in driver: Log('Platform: Linux', 'noprint') self.cnxn = pyodbc.connect( 'DRIVER={FreeTDS};SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s' % (credentials['server'], credentials['port'], credentials['database'], credentials['user'], credentials['password'])) self.cursor = self.cnxn.cursor() self.cursor.execute('SET LANGUAGE NORWEGIAN') self.YYYYMMDD = self.cursor.execute( 'SELECT CONVERT(VARCHAR(10),CURRENT_TIMESTAMP,112)').fetchone()[0] self.weekNum = self.cursor.execute( 'SELECT DATENAME(WEEK, CURRENT_TIMESTAMP)-1').fetchone()[0] self.weekday = self.cursor.execute( 'SELECT DATENAME(WEEKDAY, CURRENT_TIMESTAMP)').fetchone()[0] self.timestamp = self.cursor.execute( 'SELECT CONVERT(VARCHAR(20),CURRENT_TIMESTAMP,20)').fetchone()[0] self.time = self.cursor.execute( 'SELECT CONVERT(VARCHAR(16),GETDATE(),20)').fetchone()[0]
def runbuild(self): sessions = [] for fileName in os.listdir(self.sessionPath): if os.path.splitext(fileName)[-1] == '.csv': Log(f'Fetching data.json from {fileName} ') sessions.append(int(fileName[:-4])) for file in sessions: sessionFile = os.path.join(self.sessionPath, str(file)) with open('%s.csv' % sessionFile, 'r') as csvfile: reader = csv.reader(csvfile) for row in reader: item = row[0] shelf = row[1] if item not in self.data: self.data.setdefault(item, []) self.data.setdefault(item, []).append(shelf) with open(self.dataPath, 'w', encoding='utf-8') as jsonLoad: Log('Saving data.json') json.dump(self.data, jsonLoad, indent=2)
def createCredentials(): try: json_file = open('%s/credentials.json' % credentialsPath, encoding='utf-8') try: credentials = json.load(json_file) Log('credentials.json loaded succesfully', 'noprint') sleep(1.1) except AttributeError: Log('AttributeError on credentials.json, exiting') sleep(1.1) exit() except json.decoder.JSONDecodeError: Log('json.decoder.JSONDecodeError on credentials.json, exiting') sleep(1.1) exit() json_file.close() except FileNotFoundError: Log('credentials.json file not found, creating new') credentials = {} sleep(1.1) print('\t1. For Post\n\t2. For Get') db = input() if db == '1': db = 'post' credentials['post'] = {} elif db == '2': db = 'get' credentials['get'] = {} else: exit() attributeNames = ['server', 'port', 'database', 'user', 'password'] inputAccept = False while inputAccept == False: for i in attributeNames: value = input(i + ': ') credentials[db][i] = value isOK = input('is this correct?\n1. yes\n2.no\ntype: ') if isOK == '1': inputAccept = True json_file = open('%s/credentials.json' % credentialsPath, 'w', encoding='utf-8') json.dump(credentials, json_file, indent=2) json_file.close() for key in credentials[db]: if key == 'password': continue else: Log(key.ljust(10) + credentials[db][key].ljust(16), 'noprint')
def initialize(): P = Postconnect() G = Getconnect() P.makeTables() # if tables for data warehouse do not exist, create them # get datetimes and check if weekly and/or monthly execute = True data['Tider'] = G.fetchTime() # get highest id number for article id and brand id brand_idMax = P.brandsGetMax() article_idMax = P.articlesGetMax() # fetch new brands and articles from retial brandsPost = [] for row in G.getBrands(brand_idMax): brandsPost.append(tuple(row)) if brandsPost != []: Log(f'Updating brands from brand_id: {brand_idMax}') P.brandsPost(brandsPost) else: Log(f'No new brands from brand_id: {brand_idMax}') articlesPost = [] for row in G.getArticles(article_idMax): articlesPost.append(tuple(row)) if articlesPost != []: Log(f'Updating articles from article_id: {article_idMax}') P.articlesPost(articlesPost) else: Log(f'No new articles from article_id: {article_idMax}') # update barcodes if data['Tider']['monthly'] == True: barcodes = G.getBarcodes() ''' fetch new barcodes from retial barcodes can be changed, as an easy workaround, instead of updating incrementally we have to delete all records and insert all new barcodes over again ''' barcodePost = [] for row in barcodes: barcodePost.append(tuple(row)) P.barcodesDel() P.barcodesPost(barcodePost) G.close() P.close()
def soldoutDaily(self): data = [] result = self.cursor.execute(''' SELECT Article.articleId, Brands.brandLabel AS Merke, Article.articleName AS Navn, CAST (stockQty AS INT) AS Antall_Lager, articleStock.StorageShelf AS Lager, CONVERT(VARCHAR(10), articleStock.lastReceivedFromSupplier, 105) AS Siste_Importdato, Article.suppliers_art_no AS Leverandor FROM ((Article INNER JOIN articleStock ON Article.articleId = articleStock.articleId) INNER JOIN Brands ON Article.brandId = Brands.brandId) WHERE DATEPART(WEEKDAY, articleStock.lastSold) = DATEPART(WEEKDAY, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(WEEK, articleStock.lastSold) = DATEPART(WEEK, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, articleStock.lastSold) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND ArticleStatus = '0' AND Article.articleName NOT LIKE '[.]%' AND stockQty<='0' AND [articleName] NOT LIKE '%REPOSE DESIGNFOREVIG%' AND [articleName] NOT LIKE 'Retain 24 gavekort%' AND [articleName] NOT LIKE 'Diverse Vinding%' AND [articleName] NOT LIKE 'Diverse Glass%' AND [articleName] NOT LIKE 'Diverse SERVISE%' ORDER BY brandLabel ''', self.yesterday,self.yesterday,self.yesterday).fetchall() for row in result: data.append(row) Log('Getconnect: Fetching from soldoutDaily') return data
def salesDaily(self): data = [] result = self.cursor.execute(''' SELECT Article.articleId AS Vare_Id, Brands.brandLabel AS Merke, Article.articleName AS Navn, CAST(noOfArticles AS INT) AS Antall_Solgt, CONVERT(VARCHAR(10), CustomerSaleHeader.salesDate, 23) AS Dato, CONVERT(VARCHAR(5), CustomerSaleHeader.salesDate, 8) AS Tid, usedPricePerUnit AS Pris, CustomerSales.disCount AS Rabatt, CustomerSaleHeader.additionalInfo AS Betalingsmate FROM (((CustomerSales FULL JOIN Article ON CustomerSales.articleId = Article.articleId) FULL JOIN CustomerSaleHeader ON CustomerSales.customerSaleHeaderId = CustomerSaleHeader.customerSaleHeaderId) FULL JOIN Brands ON Brands.brandId = Article.brandId) WHERE Article.articleId IS NOT NULL AND DATEPART(DAYOFYEAR, [salesdate]) = DATEPART(DAYOFYEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) ORDER BY salesDate ''',self.yesterday,self.yesterday).fetchall() for row in result: data.append(row) Log('Getconnect: Fetching from salesDaily') return data
def getBarcodes(self): result = self.cursor.execute(''' SELECT articleId, eanCode FROM ArticleEAN ''').fetchall() Log('Getconnect: Fetching from getBarcodes') return result
def mainLoop(): ''' this function will loop while you add items and shelf values it will end (and restart if mode.json is set with the live parameter = true) when you scan the excUpdate barcode that is located in the ekstra directory ''' # initialize inventory inventory = Inventory() # start scanning barcodes from items and shelves while True: ledBlink('item') item = input('\n\t\033[96mscan item: ') if item.isnumeric(): ledBlink('shelf') shelf = input('\n\tscan shelf: ') if '-' in shelf: inventory.sessionAdd(item, shelf) continue else: if shelf.upper() == 'EXCUPDATE': ledBlink('update') inventory.sessionExecuteUpdate() del inventory return True elif shelf.upper() == 'EXIT': return False else: Log(shelf + ' is not a valid shelf barcode', 2) sleep(0.5) else: if item.upper() == 'EXCUPDATE': ledBlink('update') inventory.sessionExecuteUpdate() del inventory return True elif item.upper() == 'EXIT': return False else: Log(item + ' is not a valid item barcode', 2) sleep(0.5) continue
def wipeSessions(self): allSessions = [ f for f in os.listdir('%s/inventory/sessions' % os.path.dirname(os.path.realpath(__file__))) if os.path.splitext(f)[-1] == '.csv' ] for session in allSessions: Log('deleting ' + session + ' from ./inventory/sessions/', '2') os.remove( os.path.join(os.path.dirname(os.path.realpath(__file__)), 'inventory', 'sessions', session))
def getBrands(self,val): result = self.cursor.execute(''' SELECT brandId, brandLabel FROM Brands WHERE brandId > (?) ORDER BY brandId ''', val).fetchall() Log('Getconnect: Fetching from getBrands') return result
def getArticles(self,val): result = self.cursor.execute(''' SELECT articleId, brandId, articleName FROM Article WHERE articleId > (?) ORDER BY articleId ''',val).fetchall() Log('Getconnect: Fetching from getArticles') return result
def importsWeekly(self): data = [] data.append(['Vareimport']) data.append([ 'Uke-'+str(self.weekNumYesterday) + ' ' + str(self.yesterdayYYYMMDD[:4])]) data.append(columnNames['imports']) articleList = self.cursor.execute(''' SELECT articleId, stockAdjustmenId FROM StockAdjustment WHERE DATEPART(WEEK, [adjustmentDate]) = DATEPART(WEEK, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [adjustmentDate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND adjustmentCode ='41' ORDER BY adjustmentDate ''',self.yesterday,self.yesterday).fetchall() for article in articleList: for importInfo in self.cursor.execute(''' SELECT Article.articleId, Brands.brandLabel AS Merke, Article.articleName AS Navn, CAST(StockAdjustment.adjustmentQty AS INT) AS Antall_Importert, CAST (stockQty AS INT) AS Antall_Lager, articleStock.StorageShelf AS Lager_plass, Article.suppliers_art_no AS LeverandorID FROM Article INNER JOIN articleStock ON Article.articleId = articleStock.articleId INNER JOIN Brands ON Article.brandId = Brands.brandId INNER JOIN StockAdjustment ON Article.articleId = StockAdjustment.articleId WHERE Article.articleId =(?) AND adjustmentCode ='41' AND stockAdjustmenId = (?) ''', article[0], article[1]).fetchall(): data.append(importInfo) Log('Getconnect: Fetching from importsWeekly') return data
def createCredentials(): credentials = {} attributeNames = ['server', 'port', 'database', 'user', 'password'] inputAccept = False while inputAccept == False: for i in attributeNames: value = input(i + ': ') credentials[i] = value isOK = input('is this correct?\n1. yes\n2.no\ntype: ') if isOK == '1': inputAccept = True credFile = open(credPath, 'w', encoding='utf-8') json.dump(credentials, credFile, indent=2) credFile.close() for key in credentials: if key == 'password': continue else: Log(key.ljust(10) + credentials[key].ljust(16), 'noprint')
def turnoverDaily(self): result = [] rows = [] total = self.cursor.execute(''' SELECT CASE WHEN CAST(SUM(Brto_Salg_Kr) AS INT) IS NULL THEN 0 ELSE CAST(SUM(Brto_Salg_Kr) AS INT) END FROM view_HIP_salesInfo_10 WHERE DATEPART(DAYOFYEAR, [salesdate]) = DATEPART(DAYOFYEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND isGiftCard ='0' ''',self.yesterday,self.yesterday).fetchone() rows.append(total[0]) query = ''' SELECT CASE WHEN CAST(SUM(Brto_Salg_Kr) AS INT) IS NULL THEN 0 ELSE CAST(SUM(Brto_Salg_Kr) AS INT) END FROM view_HIP_salesInfo_10 WHERE DATEPART(HOUR, [salesdate]) = (?) AND DATEPART(DAYOFYEAR, [salesdate]) = DATEPART(DAYOFYEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND isGiftCard = '0' ''' for hour in range(24): # append each hours turnover for each hour of the day hourly = self.cursor.execute(query,hour,self.yesterday,self.yesterday).fetchone() rows.append(hourly[0]) Log('Getconnect: Fetching from turnoverDaily') result.append(rows) return result
def fetchTime(self): data = {} data['today'] = {} data['yesterday'] = {} data['today']['time'] = self.time data['today']['timestamp'] = self.timestamp data['today']['YYYYMMDD'] = self.YYYYMMDD data['today']['weekNum'] = self.weekNum data['today']['weekday'] = self.weekday data['today']['human'] = self.dateHuman data['yesterday']['YYYYMMDD'] = self.yesterdayYYYMMDD data['yesterday']['weekNum'] = self.weekNumYesterday data['yesterday']['weekday'] = self.weekdayYesterday data['yesterday']['month'] = self.monthYesterday data['yesterday']['human'] = self.dateYesterdayHuman data['yesterday']['YYYY-weekNum'] = self.yesterdayYYYMMDD[ :4]+'-'+self.weekNumYesterday data['weekly'] = self.cursor.execute( 'SELECT DATEPART(WEEKDAY, CURRENT_TIMESTAMP)').fetchone()[0] == 1 data['monthly'] = self.cursor.execute( 'SELECT DATEPART(DAY, CURRENT_TIMESTAMP)').fetchone()[0] == 1 Log('Getconnect: Fetching from fetchTime') return data
def loadCredentials(): modeFile = open(modePath, encoding='utf-8') mode = json.load(modeFile) modeFile.close() try: credFile = open(credPath, encoding='utf-8') try: credentials = json.load(credFile) Log('credentials.json loaded succesfully', 'noprint') sleep(1.1) except AttributeError: Log('AttributeError on credentials.json, shutting down') sleep(1.1) call("sudo nohup shutdown -h now", shell=True) except json.decoder.JSONDecodeError: Log('json.decoder.JSONDecodeError on credentials.json, shutting down' ) sleep(1.1) call("sudo nohup shutdown -h now", shell=True) credFile.close() except FileNotFoundError: Log('credentials.json file not found, shutting down') sleep(1.1) call("sudo nohup shutdown -h now", shell=True) for key in credentials: if key == 'password': if mode['passwordhide'] == True: continue else: Log(key.ljust(10) + credentials[key].ljust(16), 'noprint') else: Log(key.ljust(10) + credentials[key].ljust(16), 'noprint') return credentials
isOK = input('is this correct?\n1. yes\n2.no\ntype: ') if isOK == '1': inputAccept = True if db == 'cloud': while inputAccept == False: for i in cloudAttributes: value = input(i + ': ') credentials[db][i] = value isOK = input('is this correct?\n1. yes\n2.no\ntype: ') if isOK == '1': inputAccept = True json_file = open('%s/credentials.json' % os.path.dirname(os.path.realpath(__file__)), 'w', encoding='utf-8') json.dump(credentials, json_file, indent=2) json_file.close() for key in credentials[db]: if key == 'password': continue else: Log(key.ljust(10) + credentials[db][key].ljust(16), 'noprint') if __name__ == '__main__': Log(f'executing {__file__}') Log('Creating new credentials') createCredentials() Log('New credentials was stored')
validFlags = [ 'mode', 'passwordhide', 'build', 'shutdown', 'sql', 'led', 'credentials', 'wipesessions', 'live', 'showcred', 'usb' ] # put file directory into a variable cwd = os.path.dirname(os.path.realpath(__file__)) # check for correct date before initializing dateOK = checkDate() if dateOK == True: pass '''do something''' else: '''do something else''' Log( 'Could not load the correct date, ' + 'creating a temp session for this instance', 1) Log(f'executing {__file__}', '5') # open mode and check parameters modeFile = open('%s/mode.json' % cwd, encoding='utf-8') mode = json.load(modeFile) modeFile.close() # check modes in mode.json if 'mode' in sys.argv: for key in mode: print(f'{key} = {mode[key]}') exit() # apply all arguments
def sessionExecuteUpdate(self): ''' read all values from current session and run queries to update the sql server ''' # if sql is disabled, pyodbc and mariadb will not be imported if self.mode['sql'] == True: try: import pyodbc except ModuleNotFoundError: Log('pyodbc module was not found', 1) if self.mode['shutdown'] == True: from subprocess import call try: call("echo", shell=True) # will trigger exception of error sleep(2) call("sudo nohup shutdown -h now", shell=True) except NameError: exit() try: import mariadb except ModuleNotFoundError: Log('mariadb module was not found', 1) if self.mode['shutdown'] == True: from subprocess import call try: call("echo", shell=True) # will trigger exception of error sleep(2) call("sudo nohup shutdown -h now", shell=True) except NameError: return None Log('executing sessionExecuteUpdate') if self.mode['sql'] == False: Log('sql is not activated, skipping update', 2) return None # connect to store try: cnxnGet = pyodbc.connect( 'DRIVER={FreeTDS};SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s' % (self.credentialsGet['server'], self.credentialsGet['port'], self.credentialsGet['database'], self.credentialsGet['user'], self.credentialsGet['password'])) Log('Get database connected succesfully') cursorGet = cnxnGet.cursor() except pyodbc.OperationalError: Log('Get database connection failed with pyodbc.OperationalError', 1) return None # connect to data warehouse try: cnxnPost = mariadb.connect( user=self.credentialsPost['user'], password=self.credentialsPost['password'], host=self.credentialsPost['server'], port=int(self.credentialsPost['port']), database=self.credentialsPost['database']) Log('Post database connection succesfully') cursorPost = cnxnPost.cursor() except mariadb.Error: Log('Post database connection failed with mariadb.Error', 1) return None articleIdGet = ''' SELECT Article.articleId FROM ArticleEAN INNER JOIN Article ON ArticleEAN.articleId = Article.articleId WHERE ArticleEAN.eanCode = (?) ''' updateShelfGet = ''' UPDATE articleStock SET StorageShelf =(?) FROM articleStock JOIN ArticleEAN ON articleStock.articleId = ArticleEAN.articleId WHERE ArticleEAN.eanCode=(?)''' deDuplicate = [] prepareValidPost = [] prepareInvalidPost = [] Log('reading values from ' + self.intDate + '.csv') with open('%s.csv' % self.sessionPath, 'r') as csvfile: reader = csv.reader(csvfile) Log('updating database ' + self.credentialsGet['database'] + ' at ' + self.credentialsGet['server']) try: for i, row in enumerate(reader): # commented out 06.01.2021 and pulled block under back 1 indent # if [row[0],row[1]] not in deDuplicate: # deDuplicate.append([row[0],row[1]]) # append rows with timestamp and date for data warehouse add = cursorGet.execute(articleIdGet, row[0]).fetchone() prep = [] if add != None: prep.append(add[0]) # article_id prep.append(row[1]) # shelf prep.append(row[2]) # timestamp prep.append(int(self.intDate)) prepareValidPost.append(prep) else: prep.append(row[0]) # scanned barcode prep.append(row[1]) # shelf prep.append(row[2]) # timestamp prep.append(int(self.intDate)) prepareInvalidPost.append(prep) # append to store print(f'Updating shelf for {row[0]} to {row[1]}') cursorGet.execute(updateShelfGet, row[1], row[0]) cnxnGet.commit() sleep(0.2) except csv.Error: Log(f'CSV error when looping through {self.sessionPath} ' + f'on row {str(i+2)} possibly because of corrput ' + 'row due to device not shut off properly') cursorGet.close() cnxnGet.close() # update data warehouse # get list to compare from placement placementSelectPost = ''' SELECT * FROM `placement` WHERE yyyymmdd = (?); ''' selectShelf = cursorPost.execute(placementSelectPost, (self.intDate, )) result = cursorPost.fetchall() for row in result: if list(row) in prepareValidPost: prepareValidPost.remove(list(row)) # after comparison, insert new distinct values placementInsertShelfPost = ''' INSERT INTO `placement` (article_id, stock_location, timestamp, yyyymmdd) VALUES (?, ?, ?, ?); ''' if prepareValidPost != []: Log('Updating database ' + 'Table: placement on ' + self.credentialsPost['database'] + ' at ' + self.credentialsPost['server']) cursorPost.executemany(placementInsertShelfPost, prepareValidPost) cnxnPost.commit() # get list to compare from invalid_placement placement_invalidSelectPost = ''' SELECT * FROM `placement_invalid` WHERE yyyymmdd = (?); ''' selectShelf = cursorPost.execute(placement_invalidSelectPost, (self.intDate, )) result = cursorPost.fetchall() for row in result: if list(row) in prepareInvalidPost: prepareInvalidPost.remove(list(row)) # after comparison, insert new distinct values placement_invalidInsertShelfPost = ''' INSERT INTO `placement_invalid` (barcode, stock_location, timestamp, yyyymmdd) VALUES (?, ?, ?, ?); ''' if prepareInvalidPost != []: Log('Updating database ' + 'Table: placement_invalid on ' + self.credentialsPost['database'] + ' at ' + self.credentialsPost['server']) cursorPost.executemany(placement_invalidInsertShelfPost, prepareInvalidPost) cnxnPost.commit() cnxnPost.close() if self.mode['usb'] == True: os.system("sh backup_usb.sh") Log('Backing up iventory to USB') if self.mode['live'] == True: Log('Finish updating, live mode = True -> keep running', 5) return None # power off if enabled or exit if not try: if self.mode['shutdown'] == True: from subprocess import call call("echo", shell=True) Log('powering off', 5) sleep(2) call("sudo nohup shutdown -h now", shell=True) except NameError: Log('exiting', 5) exit()
def turnoverMonthly(self): data = [] rowSum = [0] * 25 # adding values while iterating rowSum.insert(0, 'SUM') data.append(['Omsetning']) data.append([ self.monthYesterday.title() + ' ' + self.yesterdayYYYMMDD[:4]]) data.append(columnNames['clockHoursMonthly']) daysTotal = self.cursor.execute( 'SELECT DAY(DATEADD(DD,-1,DATEADD(mm, DATEDIFF(mm, 0, GETDATE()), 0)))' ).fetchone()[0] for i in range((-daysTotal),0,1): record = [] record.append(i+daysTotal+1) total = self.cursor.execute(''' SELECT CASE WHEN CAST(SUM(Brto_Salg_Kr) AS INT) IS NULL THEN 0 ELSE CAST(SUM(Brto_Salg_Kr) AS INT) END FROM view_HIP_salesInfo_10 WHERE DATEPART(DAYOFYEAR, [salesdate]) = DATEPART(DAYOFYEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND isGiftCard ='0' ''',i,self.yesterday,).fetchone() record.append(total[0]) for hour in range(24): # append each hours turnover for each hour of the day hourly = self.cursor.execute( ''' SELECT CASE WHEN CAST(SUM(Brto_Salg_Kr) AS INT) IS NULL THEN 0 ELSE CAST(SUM(Brto_Salg_Kr) AS INT) END FROM view_HIP_salesInfo_10 WHERE DATEPART(HOUR, [salesdate]) = (?) AND DATEPART(DAYOFYEAR, [salesdate]) = DATEPART(DAYOFYEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND isGiftCard = '0' ''',hour,i,self.yesterday).fetchone() record.append(hourly[0]) # after appending values, calculate sum for last row for i in range(len(record)): if i != 0: rowSum[i] += record[i] data.append(record) Log('Getconnect: Fetching from turnoverMonthly') data.append(rowSum) return data
def commandAsmbl(cmd): Log(f'Cloud Upload: uploading {category} {when} to {credentials["server"]}' ) subprocess.Popen(cmd, shell=True, executable='/bin/bash')
def writeSpreadsheet(): import openpyxl from openpyxl import Workbook from openpyxl.utils import get_column_letter from openpyxl.styles import Alignment credentials = loadCredentials('cloud') def exportXLSX(fileName: str): def cloudUpload(fileName: str): def commandAsmbl(cmd): Log(f'Cloud Upload: uploading {category} {when} to {credentials["server"]}' ) subprocess.Popen(cmd, shell=True, executable='/bin/bash') commmand = 'curl -u ' commmand += credentials['user'] commmand += ':' commmand += credentials['password'] commmand += ' -T ' commmand += fileName commmand += ' https://' commmand += credentials['server'] commmand += '/remote.php/dav/files/' commmand += credentials['user'] commmand += '/' commmand += category commmand += '/' commmand += when commmand += '/' commandAsmbl(commmand) # load spreadsheet wb = Workbook() ws = wb.active # apply default col length cellLength = {} for i in range(10): cellLength[i] = 5 # set length of col based on length of longest cell value for row in data[category][when]: for i, cell in enumerate(row): if i not in cellLength: cellLength[i] = 1 try: if cellLength[i] < len(str(cell)): cellLength[i] = len(str(cell)) except KeyError: continue # apply col length from values in cellLength to cell A-J ws.column_dimensions['A'].width = cellLength[0] + 5 ws.column_dimensions['B'].width = cellLength[1] + 5 ws.column_dimensions['C'].width = cellLength[2] + 5 ws.column_dimensions['D'].width = cellLength[3] + 5 ws.column_dimensions['E'].width = cellLength[4] + 5 ws.column_dimensions['F'].width = cellLength[5] + 5 ws.column_dimensions['G'].width = cellLength[6] + 5 ws.column_dimensions['H'].width = cellLength[7] + 5 ws.column_dimensions['I'].width = cellLength[8] + 5 ws.column_dimensions['J'].width = cellLength[9] + 5 freeze = None # find freeze point for titles while appending for i, row in enumerate(data[category][when]): if len(row) > 2 and freeze == None: freeze = 'A' + str(i + 2) if category == 'Salg' and i > 2: ws.append(list(row)[:-1]) else: ws.append(list(row)) for col in ws.columns: # align all fields with value to center for cell in col: cell.alignment = Alignment(horizontal='center', vertical='center') if freeze != None: ws.freeze_panes = ws[freeze] # save spreadsheet wb.save(fileName) # send wb to cloud cloudUpload(fileName) # give column names and add date info before exporting to spreadsheet colName = [ 'Totalt', '00-01', '01-02', '02-03', '03-04', '04-05', '05-06', '06-07', '07-08', '08-09', '09-10', '10-11', '11-12', '12-13', '13-14', '14-15', '15-16', '16-17', '17-18', '18-19', '19-20', '20-21', '21-22', '22-23', '23-24' ] data['Omsetning']['Daglig'].insert(0, colName) data['Omsetning']['Daglig'].insert(0, [ data['Tider']['yesterday']['weekday'].title() + ' Uke-' + data['Tider']['yesterday']['weekNum'] ]) data['Omsetning']['Daglig'].insert(0, [data['Tider']['yesterday']['human']]) data['Omsetning']['Daglig'].insert(0, ['Omsetning']) colName = [ 'Artikkel ID', 'Merke', 'Navn', 'Importert', 'Antall Lager', 'Lagerplass', 'Lev.ID' ] data['Import']['Daglig'].insert(0, colName) data['Import']['Daglig'].insert(0, [ data['Tider']['yesterday']['weekday'].title() + ' Uke-' + data['Tider']['yesterday']['weekNum'] ]) data['Import']['Daglig'].insert(0, [data['Tider']['yesterday']['human']]) data['Import']['Daglig'].insert(0, ['Vareimport']) colName = [ 'Artikkel ID', 'Merke', 'Navn', 'Antall Lager', 'Lagerplass', 'Sist Importert', 'Lev. ID' ] data['Utsolgt']['Daglig'].insert(0, colName) data['Utsolgt']['Daglig'].insert(0, [ data['Tider']['yesterday']['weekday'].title() + ' Uke-' + data['Tider']['yesterday']['weekNum'] ]) data['Utsolgt']['Daglig'].insert(0, [data['Tider']['yesterday']['human']]) data['Utsolgt']['Daglig'].insert(0, ['Utsolgte Varer']) colName = [ 'Artikkel ID', 'Merke', 'Navn', 'Antall Solgt', 'Dato', 'Klokketime', 'Pris', 'Rabatt', 'Betalingsmate' ] data['Salg']['Daglig'].insert(0, colName) data['Salg']['Daglig'].insert(0, [ data['Tider']['yesterday']['weekday'].title() + ' Uke-' + data['Tider']['yesterday']['weekNum'] ]) data['Salg']['Daglig'].insert(0, [data['Tider']['yesterday']['human']]) data['Salg']['Daglig'].insert(0, ['Varesalg']) # loop through results in data and export spreadsheet for category in data: if category != 'Tider': for when in data[category]: if when == 'Daglig': fileName = os.path.join( dirs[category], when, data['Tider']['yesterday']['YYYYMMDD'] + '.xlsx') elif when == 'Ukentlig': fileName = os.path.join( dirs[category], when, data['Tider']['yesterday']['YYYY-weekNum'] + '.xlsx') elif when == 'Maanedlig': fileName = os.path.join( dirs[category], when, data['Tider']['yesterday']['YYYYMMDD'][:6] + '.xlsx') else: fileName == False if fileName != False: exportXLSX(fileName) Log(f'Spreadsheet: exporting {fileName}')
def __init__(self): os.makedirs('%s/inventory' % os.path.dirname(os.path.realpath(__file__)), exist_ok=True) os.makedirs('%s/inventory/sessions' % os.path.dirname(os.path.realpath(__file__)), exist_ok=True) self.getTime = datetime.now() self.intDate = self.getTime.strftime("%Y%m%d") # self.timestamp = self.getTime.strftime( # "%Y-%m-%d_%H:%M:%S.%f")[:-4] self.file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'inventory') self.sessionPath = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'inventory/sessions', self.intDate) # if no session file, touch to create empty self.sessionFile = os.path.join(self.sessionPath + '.csv') if not os.path.isfile(self.sessionFile): Log('First session run, creating session: ' + self.intDate) with open(self.sessionFile, 'a'): os.utime(self.sessionFile, None) with open( '%s/mode.json' % os.path.join(os.path.dirname(os.path.realpath(__file__))), 'r') as mode: self.mode = json.load(mode) if self.mode['shutdown'] == True: from subprocess import call # load credentials for get server self.credentialsGet = loadCredentials('get') # load credentials for post server self.credentialsPost = loadCredentials('post') # read, add, save session csv self.sessions = [ file for file in os.listdir('%s/inventory/sessions' % os.path.dirname(os.path.realpath(__file__))) if os.path.splitext(file)[-1] == '.csv' ] # check for existing sessions if self.sessions == []: Log('sessions directory /inventory/sessions/ is empty' + ', new session with stamp ' + self.intDate + '.csv will be created') open(r'%s.csv' % self.sessionPath, 'a', newline='') else: if int(self.intDate) < int(max(self.sessions)[0:8]): Log('datestamp: ' + self.intDate + ' is not up to date compared to latest session file') else: Log('session'.ljust(10) + self.intDate)
for fileName in os.listdir(self.sessionPath): if os.path.splitext(fileName)[-1] == '.csv': Log(f'Fetching data.json from {fileName} ') sessions.append(int(fileName[:-4])) for file in sessions: sessionFile = os.path.join(self.sessionPath, str(file)) with open('%s.csv' % sessionFile, 'r') as csvfile: reader = csv.reader(csvfile) for row in reader: item = row[0] shelf = row[1] if item not in self.data: self.data.setdefault(item, []) self.data.setdefault(item, []).append(shelf) with open(self.dataPath, 'w', encoding='utf-8') as jsonLoad: Log('Saving data.json') json.dump(self.data, jsonLoad, indent=2) # example use if __name__ == '__main__': Log(f'executing {__file__}', '5') dataFile = Build() dataFile.runbuild()
def turnoverWeekly(self): data = [] rowSum = [0] * 25 # adding values while iterating rowSum.insert(0, 'SUM') data.append(['Omsetning']) data.append([ 'Uke-'+str(self.weekNumYesterday) + ' ' + str(self.yesterdayYYYMMDD[:4])]) data.append(columnNames['clockHoursWeekly']) for i in range(-7,0,1): record = [] record.append(self.cursor.execute( self.weekdayGet,i).fetchone()[0]) total = self.cursor.execute(''' SELECT CASE WHEN CAST(SUM(Brto_Salg_Kr) AS INT) IS NULL THEN 0 ELSE CAST(SUM(Brto_Salg_Kr) AS INT) END FROM view_HIP_salesInfo_10 WHERE DATEPART(WEEKDAY,[salesdate]) = DATEPART(WEEKDAY,DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(WEEK, [salesdate]) = DATEPART(WEEK, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND isGiftCard ='0' ''',i,self.yesterday,self.yesterday).fetchone() record.append(total[0]) for hour in range(24): # append each hours turnover for each hour of the day hourly = self.cursor.execute( ''' SELECT CASE WHEN CAST(SUM(Brto_Salg_Kr) AS INT) IS NULL THEN 0 ELSE CAST(SUM(Brto_Salg_Kr) AS INT) END FROM view_HIP_salesInfo_10 WHERE DATEPART(HOUR, [salesdate]) = (?) AND DATEPART(DAYOFYEAR, [salesdate]) = DATEPART(DAYOFYEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND DATEPART(YEAR, [salesdate]) = DATEPART(YEAR, DATEADD(DAY, (?), CURRENT_TIMESTAMP)) AND isGiftCard = '0' ''',hour,i,self.yesterday).fetchone() record.append(hourly[0]) # after appending values, calculate sum for last row for i in range(len(record)): if i != 0: rowSum[i] += record[i] data.append(record) data.append(rowSum) Log('Getconnect: Fetching from turnoverWeekly') return data
def __init__(self): from credentials import loadCredentials credentials = loadCredentials('get') driver = loadDriver() # try: if 'windows' in driver: try: Log('Getconnect: Platform: Windows') self.cnxn = pyodbc.connect('DRIVER={%s};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s' % ( driver['windows'], credentials['server'], credentials['database'], credentials['user'], credentials['password'] ) ) Log(f'Getconnect: Connected to {credentials["database"]}') except pyodbc.ProgrammingError: Log(f'Getconnect: Could not connect to {credentials["database"]}') exit() elif 'linux' in driver: Log('Getconnect: Platform: Linux') try: self.cnxn = pyodbc.connect( 'DRIVER={FreeTDS};SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s' % ( credentials['server'], credentials['port'], credentials['database'], credentials['user'], credentials['password'] ) ) Log(f'Getconnect: Connected to {credentials["database"]}') except pyodbc.ProgrammingError: Log(f'Getconnect: Could not connect to {credentials["database"]}') exit() Log(f'Getconnect: Connected to {credentials["database"]}') self.timeGet = 'SELECT CONVERT(VARCHAR(16),GETDATE(),20)' self.timestampGet = 'SELECT CONVERT(VARCHAR(20),CURRENT_TIMESTAMP,20)' self.YYYYMMDDGET = '''SELECT CONVERT(VARCHAR(10),DATEADD(DAY, (?),CURRENT_TIMESTAMP),112)''' self.weekNumGet = '''SELECT DATENAME(WEEK, DATEADD(DAY, (?), CURRENT_TIMESTAMP))''' self.dateMonthGet = '''SELECT DATENAME(DAY, DATEADD(DAY, (?), CURRENT_TIMESTAMP))''' self.weekdayGet = '''SELECT DATENAME(WEEKDAY, DATEADD(DAY, (?), CURRENT_TIMESTAMP))''' self.monthGet = '''SELECT DATENAME(MONTH, DATEADD(DAY, (?), CURRENT_TIMESTAMP))''' self.yesterday = -1 # subtract 1 day from todays date # self.days = [-1,-8,-15] # getting from last day, same weekday the week before and the week before that self.cursor = self.cnxn.cursor() self.cursor.execute('SET LANGUAGE NORWEGIAN') self.time = self.cursor.execute(self.timeGet).fetchone()[0][11:16] self.timestamp = self.cursor.execute(self.timestampGet).fetchone()[0] self.YYYYMMDD = self.cursor.execute(self.YYYYMMDDGET,0).fetchone()[0] self.weekNum = self.cursor.execute(self.weekNumGet,0).fetchone()[0] self.weekday = self.cursor.execute(self.weekdayGet,0).fetchone()[0] self.weekdayYesterday = self.cursor.execute( self.weekdayGet,self.yesterday).fetchone()[0] self.yesterdayYYYMMDD = self.cursor.execute( self.YYYYMMDDGET,self.yesterday).fetchone()[0] self.weekNumYesterday = self.cursor.execute( self.weekNumGet,self.yesterday).fetchone()[0] self.monthYesterday = self.cursor.execute( self.monthGet,self.yesterday).fetchone()[0] self.dateHuman = humanYYYYMMDD(self.YYYYMMDD) self.dateYesterdayHuman = humanYYYYMMDD(self.yesterdayYYYMMDD)