def createTableNE(nomenclature='', dicNation='', endYear='', fileLog='', indicatorSpi='', compteEurostat='', tableName='', dicsize=''): #liste avec les codes naces NE on creer un record avec la valeur '-' comme vecteur dicNaceNE = spiLib.defSelectdicNaceNE(nomenclature, compteEurostat) countrySort = dicNation.keys() countrySort.sort() keyTotal = indicatorSpi + '_TOTAL' for country in countrySort: naceSort = list(dicNaceNE.keys()) naceSort.sort() for nace in naceSort: try: #dans le cas des indicateurs avec une size sizeLst = dicsize.values() sizeSort = set(sizeLst) #valeur unique list(sizeSort) # a convertir en list for size in sizeSort: indicatorSize = indicatorSpi + '_' + size DBAccess.majDBtable(tableName, indicatorSize, country, str(endYear), nace, nomenclature, '-') except: #si pas de size DBAccess.majDBtable(tableName, indicatorSpi, country, str(endYear), nace, nomenclature, '-')
def Register(): login = input("Выберите логин\n") while 1: if DBAccess.FreeLogin(login): break else: print("Такой логин уже занят. Придумайте другой") login = input() password = getpass.getpass("Придумайте пароль\n") while 1: if len(password) >= 5: password1 = getpass.getpass("Подтверите пароль\n") if password == password1: break else: print("Пароли не совпадают") password = None password1 = None else: print("Пароль должен содержать более 5 символов") password = getpass.getpass("Придумайте пароль\n") date = input("Напишите свою дату рождения в формате DD-MM-YYYY\n") while 1: if len(date) == 10: break else: date = input("IНеверная дата! Попробуйте еще раз\n") DBAccess.AddUser(login, password, date) return login
def createTableCompetitionImportpen(nomenclature, dicGO, dicX, dicM, startYear, fileLog, tableName): res = [] refX = [] refM = [] refGO = [] for country in dicGO: for code in dicGO[country]: res = [] refGO = dicGO[country][code] try: refX = dicX[country][code] refM = dicM[country][code] except: fileLog.write('No export or import data for code ' + code + ' and country ' + country + '.\n') continue for i in range(0, len(refGO)): if refGO[i] == ':' or refX[i] == ':' or refM[i] == ':': res.append(':') elif refGO[i] == '~' or refX[i] == '~' or refM[i] == '~': res.append('~') else: ac = float(refGO[i]) * 1000000 + float(refM[i]) - float( refX[i]) try: res.append('{0:.8f}'.format(float(refM[i]) / ac)) except: res.append('~') DBAccess.majDBtable(tableName, 'importpen', country, str(startYear), code, nomenclature, ','.join(res))
def createTableExternalGeoShare(nomenclature, dicWorld, dicDestor, indicator, startYear, fileLog, tableName): res = [] refNum = [] refDen = [] for country in dicDestor: for code in dicDestor[country]: try: refDen = dicWorld[country][code] except: fileLog.write('No world data for code ' + code + ' and country ' + country + ' .\n') continue for partner in dicDestor[country][code]: res = [] refNum = dicDestor[country][code][partner] for i in range(0, len(refNum)): if refDen[i] == ':' or refNum[i] == ':': res.append(':') elif refDen[i] == '~' or refNum[i] == '~' or float( refDen[i]) == 0: res.append('~') else: res.append('{0:.8f}'.format( float(refNum[i]) / float(refDen[i]))) DBAccess.majDBtableGeo(tableName, indicator, country, str(startYear), code, nomenclature, ','.join(res), partner)
def createTableCompetitionOpen(nomenclature, dicVa, dicX, dicM, startYear, fileLog, tableName): res = [] refX = [] refM = [] refVa = [] for country in dicVa: for code in dicVa[country]: res = [] refVa = dicVa[country][code] try: refX = dicX[country][code] refM = dicM[country][code] except: fileLog.write('No export or import data for code ' + code + ' and country ' + country + '.\n') continue for i in range(0, len(refVa)): if refVa[i] == ':' or refX[i] == ':' or refM[i] == ':': res.append(':') elif refVa[i] == '~' or refX[i] == '~' or refM[ i] == '~' or float(refVa[i]) == 0: res.append('~') else: res.append('{0:.8f}'.format( (float(refX[i]) + float(refM[i])) / (2 * float(refVa[i]) * 1000000))) DBAccess.majDBtable(tableName, 'open', country, str(startYear), code, nomenclature, ','.join(res))
def createTableNomenclatureBasic(dicIndicator, indicator, nomenclature, startYear, tableName): for country in dicIndicator: for code in dicIndicator[country]: DBAccess.majDBtable(tableName, indicator, country, str(startYear), code, nomenclature, ','.join(dicIndicator[country][code]))
def createTableNacePercentage(nomenclature, dicIndicatorA, dicIndicatorB, indicator, startYear, fileLog, tableName): '''this function creates database records that are the result of a ratio : indicator A over indicator B''' res = [] a = [] b = [] for country in dicIndicatorA: for nace in dicIndicatorA[country]: res = [] a = [] b = [] try: a = dicIndicatorA[country][nace] except: continue try: b = dicIndicatorB[country][nace] except: continue for i in range(0, len(a)): if a[i] == ':' or b[i] == ':': res.append(':') elif float(b[i]) == 0: res.append('~') else: res.append(str((float(a[i]) / float(b[i])) * 100)) DBAccess.majDBtable(tableName, indicator, country, str(startYear), nace, nomenclature, ','.join(res))
def createTableExternalXMShare(indicator, nomenclature, minYear, dicIndicator, tableName, fileLog): res = [] ref = {} refTotal = {} for country in dicIndicator: try: refTotal = dicIndicator[country]['TOTAL'] except: fileLog.write('Missing TOTAL reference for country ' + country + '.\n') continue for code in dicIndicator[country]: if code != 'TOTAL': res = [] ref = dicIndicator[country][code] for i in range(0, len(ref)): if ref[i] == ':' or refTotal[i] == ':': res.append(':') elif float(refTotal[i]) == 0: res.append('~') else: res.append( str((float(ref[i]) / float(refTotal[i])) * 100)) DBAccess.majDBtable(tableName, indicator, country, str(minYear), code, nomenclature, ','.join(res))
def createTableExternalSpecialisation(indicator, nomenclature, dicIndicator, startYear, reference, fileLog, tableName): res = [] refNum = [] refDen = [] for country in dicIndicator: for code in dicIndicator[country]: res = [] refNum = dicIndicator[country][code] try: refDen = dicIndicator[reference][code] except: fileLog.write('No ' + reference + ' for code ' + code + ' and country ' + country + ' .\n') continue for i in range(0, len(refNum)): if refDen[i] == ':' or refNum[i] == ':': res.append(':') elif refDen[i] == '~' or refNum[i] == '~': res.append('~') else: res.append('{0:.8f}'.format( ((float(refNum[i]) / 100) / (float(refDen[i]) / 100)))) DBAccess.majDBtable(tableName, indicator, country, str(startYear), code, nomenclature, ','.join(res))
def createTableTradeTrbalRbal(nomenclature, dicX, dicM, startYear, fileLog, tableName): trbal = [] rbal = [] x = [] m = [] for country in dicX: for code in dicX[country]: trbal = [] rbal = [] x = dicX[country][code] try: m = dicM[country][code] except: continue for i in range(0, len(x)): if x[i] == ':' or m[i] == ':': trbal.append(':') rbal.append(':') else: tmp = float(x[i]) - float(m[i]) trbal.append('{0:.8f}'.format(tmp)) try: tmp = float(tmp) / (float(x[i]) + float(m[i])) rbal.append('{0:.8f}'.format(tmp)) except: rbal.append('~') DBAccess.majDBtable(tableName, 'trbal', country, str(startYear), code, nomenclature, ','.join(trbal)) DBAccess.majDBtable(tableName, 'rbal', country, str(startYear), code, nomenclature, ','.join(rbal))
def createTableExternalXM(indicator, nomenclature, minYear, dicIndicator, tableName, fileLog): for country in dicIndicator: for code in dicIndicator[country]: res = [] refIntra = {} refExtra = {} try: refIntra = dicIndicator[country][code]['EU27_INTRA'] except: fileLog.write('Missing intra EU27 reference for country ' + country + ' and code ' + code + '.\n') continue try: refExtra = dicIndicator[country][code]['EU27_EXTRA'] except: fileLog.write('Missing extra EU27 reference for country ' + country + ' and code ' + code + '.\n') continue for i in range(0, len(refIntra)): if refIntra[i] == ':' or refExtra[i] == ':': res.append(':') else: res.append(str((int(refIntra[i]) + int(refExtra[i])))) DBAccess.majDBtable(tableName, indicator, country, str(minYear), code, nomenclature, ','.join(res))
def defSelectdicNace(nomenclature,compteEurostat): dicNace = {} if nomenclature== 'nace1': dicNace = DBAccess.lectureNace1(dicNace,compteEurostat) else: dicNace = DBAccess.lectureNace2(dicNace,compteEurostat) return dicNace
def _apply_btn_clickked(self): if self.ComboBox.current() == 0: try: duration = int(self.durationEntry.get()) + 1 requestID = DB.AddRequest(self.user[0], (datetime.today()).strftime('%d-%m-%Y'), duration) DataSet = Bio.CalculateBiorhythmsInterval(self.user[0], self.user[3], datetime.today(), duration, requestID) except: tm.showerror("Date error", "Неверная длительность прогноза") return else: try: dateS = datetime.strptime(self.startEntry.get(), '%d-%m-%Y') dateF = datetime.strptime(self.finishEntry.get(), '%d-%m-%Y') d = timedelta(days = 1) dateF += d duration = dateF - dateS duration = duration.days if duration < 0: tm.showerror("Date error", "Неверный интервал прогноза") return requestID = DB.AddRequest(self.user[0], (dateS).strftime('%d-%m-%Y'), duration) DataSet = Bio.CalculateBiorhythmsInterval(self.user[0], self.user[3], dateS, duration, requestID) except: tm.showerror("Date error", "Неверный интервал прогноза") return DB.WriteData(DataSet) tm.showinfo("Forecast success", "Прогноз составлен успешно") self.master.destroy()
def _register_btn_clickked(self): if not DB.FreeLogin(self.userEntry.get()): tm.showerror("Login error", "Имя пользователя уже занято") return if len(self.passEntry.get()) < 5: tm.showerror("Password error", "Длинна пароля должна быть не меньше 5 символов") return if self.passEntry.get() != self.confpassEntry.get(): tm.showerror("Password error", "Не совпадают пароли") return if len(self.dateEntry.get()) != 10: tm.showerror("Date error", "Неверная дата") return lines = self.dateEntry.get().split("-") if int(lines[0]) < 0 or int(lines[0]) > 31: tm.showerror("Date error", "Неверная дата") return if int(lines[1]) < 0 or int(lines[1]) > 12: tm.showerror("Date error", "Неверная дата") return DB.AddUser(self.userEntry.get(), self.passEntry.get(), self.dateEntry.get()) self.master.destroy()
def createTableTotalShare(dicIndicator, startYear, indicator, nomenclature, fileLog, tableName): res = [] total = [] ref = [] for country in dicIndicator: try: total = dicIndicator[country]['TOTAL'] except: fileLog.write('No total for country + ' + country + '\n') continue for code in dicIndicator[country]: res = [] ref = dicIndicator[country][code] for i in range(0, len(ref)): try: curTotal = total[i] except: res.append(':') continue if ref[i] == ':' or curTotal == ':': res.append(':') elif ref[i] == '~' or curTotal == '~' or float(curTotal) == 0: res.append('~') else: res.append('{0:.8f}'.format( (float(ref[i]) / float(curTotal)) * 100)) DBAccess.majDBtable(tableName, indicator, country, str(startYear), code, nomenclature, ','.join(res))
def defSelectdicNaceSkillTech(nomenclature): dicNace = {} if nomenclature == 'nace1': dicNace = DBAccess.lectureNace1SkillTech(dicNace) else: dicNace = DBAccess.lectureNace2SkillTech(dicNace) return dicNace
def createTableCountryLevelFdi(dicIndicator, dicGdp, spiIndicator, startYear, fileLog, tableName): res = [] fdi = [] gdp = [] for country in dicIndicator: res = [] gdp = [] fdi = dicIndicator[country] try: gdp = dicGdp[country] except: fileLog.write('No gdp data for country' + country + '\n') continue for i in range(0, len(fdi)): if gdp[i] == ':' or fdi[i] == ':': res.append(':') else: res.append(str((float(fdi[i]) / float(gdp[i])) * 100)) DBAccess.majDBtable(tableName, spiIndicator, country, str(startYear), 'default', 'default', ','.join(res))
def defDicTotalNaceGrowth(dicTotalNace, indicatorSpi, startYear, nomenclature, tableName, growthTime): #write total nace vectorInit = '' for i in range(0, growthTime): vectorInit = ':,' + vectorInit for country in dicTotalNace: for nace in dicTotalNace[country]: nbrVector = len(dicTotalNace[country][nace]) vector = dicTotalNace[country][nace] vectorElement = vectorInit for i in range(growthTime, nbrVector): try: valeurVector = float( vector[i]) #si le champ n'est pas numerique on met : except: valeurVector = ':' if growthTime != 0: try: valeurVectorOld = float(vector[i - growthTime]) valeurVector = defGrowthTime(growthTime, valeurVector, valeurVectorOld) except: valeurVector = ':' vectorElement = vectorElement + str(valeurVector) + ',' DBAccess.deleteRecTable(tableName, indicatorSpi, country, str(startYear), nace, nomenclature) DBAccess.majDBtable(tableName, indicatorSpi, country, str(startYear), nace, nomenclature, vectorElement[:-1])
def defAgregatTableTotal(dicTotalNace, indicatorSpi, startYear, nomenclature, tableName): #write total nace for country in dicTotalNace: totalCountryExist = 1 if nomenclature == 'nace1': try: vectorTotalNace = dicTotalNace[country][ 'C-K_X_J'] #cle du total par pays except: totalCountryExist = 0 else: try: vectorTotalNace = dicTotalNace[country][ 'B-N_X_K'] #cle du total par pays except: totalCountryExist = 0 if totalCountryExist: for nace in dicTotalNace[country]: vector = dicTotalNace[country][nace] #le retour de va est inutile dans ce cas vectorOutput, va = defCalculAllVectors(vector, vectorTotalNace) DBAccess.deleteRecTable(tableName, indicatorSpi, country, str(startYear), nace, nomenclature) DBAccess.majDBtable(tableName, indicatorSpi, country, str(startYear), nace, nomenclature, vectorOutput)
def createTableNaceGrowth(nomenclature, dicIndicator, indicator, startYear, growthTime, fileLog, tableName): res = [] ref = [] for country in dicIndicator: for nace in dicIndicator[country]: res = [] ref = dicIndicator[country][nace] if ref[0] != '-': for i in range(growthTime, len(ref)): current = ref[i] old = ref[i - growthTime] if current == ':' or old == ':': res.append(':') elif current == '~' or old == '~': res.append(':') else: res.append( str( defGrowthTime(growthTime, float(current), float(old)))) DBAccess.majDBtable(tableName, indicator, country, str(startYear + growthTime), nace, nomenclature, ','.join(res)) else: DBAccess.majDBtable(tableName, indicator, country, str(startYear + growthTime), nace, nomenclature, '-')
def createTable(nomenclature, dicIndicator, fileLog, minStartYear, dicNace, indicatorSpi, compteEurostat, tableName, fileExt=''): lstTotal = spiLibTotal.defSelectLstTotal(nomenclature, compteEurostat, fileExt) #dicIndicator = spiLib.addValueMissing(dicIndicator, dicNace, minStartYear) dicIndicator = spiLib.addValueMissing(dicIndicator, lstTotal, minStartYear) startYear = minStartYear dicTotalNace = {} countrySort = dicIndicator.keys() countrySort.sort() for country in countrySort: dicTotalNace[country] = {} naceSort = dicIndicator[country].keys() naceSort.sort() for nace in naceSort: vector = dicIndicator[country][nace] nbrVector = len(vector) #on renvoi le vector avec des vides rempli et l'indice de depart pour le total #qui correspond au nombre de vides rempli vectorElement = '' vectorTotal = [':'] * nbrVector for i in range(0, nbrVector): valeurVector = vector[i].split(';') try: elementVector = float(valeurVector[0]) except: elementVector = ':' try: flag = ';' + valeurVector[1] except: flag = '' vectorElement = vectorElement + str(elementVector) + flag + ',' vectorTotal[i] = elementVector #calcul des totaux pour l'indicateur if lstTotal.count(nace): dicTotalNace = spiLibTotal.defTotalNace( dicTotalNace, indicatorSpi, nace, nomenclature, country, compteEurostat, vectorTotal, lstTotal, '', fileExt) #selection des code nace pour l'indicateur avant ecriture dans la base if dicNace.has_key(nace): DBAccess.majDBtable(tableName, indicatorSpi, country, str(startYear), nace, nomenclature, vectorElement[:-1]) #if indicatorSpi == 'emp': #on cree aussi l'indicateur pour la table growth #DBAccess.majDBtable('growth','emp',country,str(startYear),nace,nomenclature,vectorElement[:-1]) #write total nace, il n'est pas necessaire de selection les codes naces, ce sont uniquement les totaux defDicTotalNace(dicTotalNace, indicatorSpi, startYear, nomenclature, tableName) #on renvoie le dictionnaire des TOTAUX pour traiter les cas quand un indicateur #total derive d'un autre indicateur comme vabussh qui vient de vabus return dicTotalNace
def createTableNomenclaturePartner(dicIndicator, indicator, nomenclature, startYear, tableName): for country in dicIndicator: for code in dicIndicator[country]: for partner in dicIndicator[country][code]: DBAccess.majDBtableGeo( tableName, indicator, country, str(startYear), code, nomenclature, ','.join(dicIndicator[country][code][partner]), partner)
def MockAddEvent(title1, datee, desc1): sqlx = DBAccess.bld_add_sql(title1, datee, desc1) list_result = DBAccess.add_event(sqlx) if list_result[0] == 'error': print "MockAddEvent: error - {}".format(list_result[1]) else: print "MockAddEvent: success - {}".format(list_result[1]) return list_result[0]
def _login_btn_clickked(self): username = self.userEntry.get() password = self.passEntry.get() if DB.Control(username, password): tm.showinfo("Login success", "Welcome, %s" % username) self.master.destroy() bio.Run(DB.GetDateOfBirth(username)) else: tm.showerror("Login error", "Incorrect username or password")
def defSelectdicNace(nomenclature, compteEurostat): dicNace = {} if nomenclature == 'nace1': dicNace = DBAccess.lectureNace1(dicNace, compteEurostat) else: dicNace = DBAccess.lectureNace2(dicNace, compteEurostat) if compteEurostat == 'bd': dicNace['C15'] = 'C' dicNace['C13_C14'] = 'C' return dicNace
def createTableSize(nomenclature, dicIndicator, fileLog, minStartYear, dicNace, indicatorSpi, compteEurostat, tableName, fileExt=''): #liste avec les codes naces a selectionner pour les totaux a calculer #en fonction du nace(1 ou 2) et du code national nama ou sbs lstTotal = spiLibTotal.defSelectLstTotal(nomenclature, compteEurostat, fileExt) dicIndicator = spiLib.addValueMissingSize(dicIndicator, lstTotal, minStartYear) startYear = minStartYear dicAgregatNace = {} countrySort = dicIndicator.keys() countrySort.sort() keyTotal = indicatorSpi + '_TOTAL' for country in countrySort: dicAgregatNace[country] = {} #totalCountryExist = 1 naceSort = dicIndicator[country].keys() naceSort.sort() for nace in naceSort: sizeSort = dicIndicator[country][nace].keys() sizeSort.sort() try: vectorTotalNace = dicIndicator[country][nace][ keyTotal] #cle du total par pays except: fileLog.write('no total for country : ' + country + ' nace ' + nace + ' indicator ' + indicatorSpi + '\n') continue for size in sizeSort: #indicateur + size vector = dicIndicator[country][nace][size] vectorOutput, vectorAgregat = defCalculAllVectors( vector, vectorTotalNace) #selection des code nace pour l'indicateur avant ecriture dans la base if dicNace.has_key( nace): #on ne traite pas les totaux calculer DBAccess.majDBtable(tableName, size, country, str(startYear), nace, nomenclature, vectorOutput) #traitement des agregats on cree le dic par country et keyTotal(key des agregat) if lstTotal.count(nace): sizeTotal = ':' + size #la notion de size doit se trouver dans le champ dicAgregatNace = spiLibTotal.defTotalNace( dicAgregatNace, indicatorSpi, nace, nomenclature, country, compteEurostat, vectorAgregat, lstTotal, sizeTotal, fileExt) defTableTotalSize(dicAgregatNace, indicatorSpi, startYear, nomenclature, tableName)
def createTableCountryLevelBtechBtechgdp(dicIndicator, dicGdp, startYear, fileLog, tableName): res = [] resGdp = [] x = [] m = [] gdp = [] for country in dicIndicator: gdp = [] x = [] m = [] res = [] resGdp = [] try: gdp = dicGdp[country] except: pass try: x = dicIndicator[country]['EXP'] except: pass try: m = dicIndicator[country]['IMP'] except: pass if not x or not m: fileLog.write('no btech data for country : ' + country + '\n') continue else: if not gdp: fileLog.write('no btechgdp data for country : ' + country + '\n') for i in range(0, len(x)): if x[i] == ':' or m[i] == ':': res.append(':') resGdp.append(':') else: resCalc = float(x[i]) - float(m[i]) res.append(str(resCalc)) if gdp: try: resCalcGdp = (resCalc / float(gdp[i])) * 100 resGdp.append(str(resCalcGdp)) except: resGdp.append(':') DBAccess.majDBtable(tableName, 'btech', country, str(startYear), 'default', 'default', ','.join(res)) if gdp: DBAccess.majDBtable(tableName, 'btechgdp', country, str(startYear), 'default', 'default', ','.join(resGdp))
def _MockGetEvents(start_date, end_date, desc): """ test the web url "/get_events" """ sqlx, sqlx_count = DBAccess.bld_query_sql(start_date, end_date, desc) list_result = DBAccess.get_events(sqlx, sqlx_count) if list_result[0] == 'error': sj = {"events_error": list_result[1]} else: sj = {"events_details": list_result[1]} print sj print "MockGetEvent ended"
def _MockAddEvent(data_json): """ test the web url "/add_event" """ title1, date1, desc1 = Tg1Srvr.get_events_values(data_json) sqlx = DBAccess.bld_add_sql(title1, date1, desc1) list_result = DBAccess.add_event(sqlx) if list_result[0] == 'error': print "MockAddEvent: error - {}".format(list_result[1]) else: print "MockAddEvent: success - {}".format(list_result[1]) return list_result[0]
def _MockDeleteEvent(data_json): """ test the web url "/delete_event" """ id1, title1, date1, desc1 = Tg1Srvr.get_events_values(data_json, idp='yes') sqlx = DBAccess.bld_delete_sql(id1, title1, date1, desc1) list_result = DBAccess.delete_event(sqlx) if list_result[0] == 'error': print "MockDeleteEvent: error - {}".format(list_result[1]) else: print "MockDeleteEvent: succeded - {}".format(list_result[1]) return list_result[0]
def _MockUpdateEvents(data_json): """ test the web url "/update_event" """ id1, title1, date1, desc1 = Tg1Srvr.get_events_values(data_json, idp='yes') sqlx = DBAccess.bld_update_sql(id1, title1, date1, desc1) list_result = DBAccess.update_event(sqlx) if list_result[0] == 'error': sj = {"update_event_error": list_result[1]} else: sj = {"update_event succeded": list_result[1]} print sj print "mock_update_event ended"
def printDataIndex(connection, dataIndex): sortedUniqueSyncTimestamps = sorted( DBAccess.getUniqueSyncTimestamps(connection)) if dataIndex > len(sortedUniqueSyncTimestamps): print( f"dataIndex {dataIndex} greater than length of timestamps {len(sortedUniqueSyncTimestamps)}" ) sys.exit(2) timestamp = sortedUniqueSyncTimestamps[-dataIndex] printRadiatorSummary( connection, DBAccess.getTemperatureDataFrameForTimestamp(connection, timestamp))
def createTableDomesticIndex(nomenclature, dicIndicatorA, dicIndicatorB, indicator, startYear, baseYear, fileLog, tableName): res = [] a = [] b = [] if startYear <= baseYear: indexYear = baseYear - startYear #l annee de debut doit etre inferieure a l annee sur laquelle se base l'index sinon ca n a pas d interet else: fileLog.write( 'The start year is above the index year, the computation is impossible.' ) sys.exit() for country in dicIndicatorA: for nace in dicIndicatorA[country]: res = [] a = [] b = [] try: a = dicIndicatorA[country][nace] except: continue try: b = dicIndicatorB[country][nace] except: continue for i in range(0, len(a)): if a[i] == ':' or b[i] == ':': res.append(':') elif float(b[i]) == 0: res.append('~') else: res.append(float(a[i]) / float(b[i])) refYear = res[indexYear] for i in range(0, len(res)): if res[i] == ':' or refYear == ':': res[i] = ':' elif res[i] == '~' or refYear == '~': res[i] = '~' elif refYear == 0: res[i] = '~' else: res[i] = str((res[i] / refYear) * 100) DBAccess.majDBtable(tableName, indicator, country, str(startYear), nace, nomenclature, ','.join(res))
def traitementFichierTXT(nomenclature, fileLog, tableName): infoX = DBAccess.lectureNaceIndicatorData('x', nomenclature, tableName) infoM = DBAccess.lectureNaceIndicatorData('m', nomenclature, tableName) refDicX = infoX[0] startYearX = infoX[1] refDicM = infoM[0] startYearM = infoM[1] if startYearX != startYearM: fileLog.write('Start year for indicators x and m are different.\n') return spiLibCreateTable.createTableTradeTrbalRbal(nomenclature, refDicX, refDicM, startYearM, fileLog, tableName)
def get_events(): """ Get events function, either by dates and or description """ req = request start_date = request.args.get("start_date") end_date = request.args.get("end_date") desc = request.args.get("event_desc") sqlx, sqlx_count = DBAccess.bld_query_sql(start_date, end_date, desc) list_result = DBAccess.get_events(sqlx, sqlx_count) if list_result[0] == 'error': sj = jsonify({"events_error": list_result[1]}) else: sj = jsonify({"events_details": list_result[1]}) return sj
def update_event(): """ Update event function """ req = request data_json = json.loads(request.args.get('update_data')) id1, title1, date1, desc1 = get_events_values(data_json, idp='yes') sqlx = DBAccess.bld_update_sql(id1, title1, date1, desc1) list_result = DBAccess.update_event(sqlx) if list_result[0] == 'error': sj = jsonify({"update_event_error": list_result[1]}) else: sj = jsonify({"update_event successeded": list_result[1]}) rmsg = DBAccess.save_new_diary() return sj
def traitementFichierTXT(nomenclature, indicatorDestor, indicatorWorld, indicatorSpi, worldTableName, fileLog, tableName): infoWorld = DBAccess.lectureNaceIndicatorData(indicatorWorld, nomenclature, worldTableName) infoDestor = DBAccess.lectureNomGeoIndicatorData(indicatorDestor, nomenclature, tableName) refDicWorld = infoWorld[0] startYearWorld = infoWorld[1] refDicDestor = infoDestor[0] startYearDestor = infoDestor[1] if startYearWorld != startYearDestor: worldVec = refDicDestor while type(worldVec) is not list: worldVec = worldVec[worldVec.keys()[0]] refDicWorld = spiLib.normalizeDicSize(refDicWorld, startYearWorld, startYearDestor, startYearDestor + len(worldVec) - 1) spiLibCreateTable.createTableExternalGeoShare(nomenclature, refDicWorld, refDicDestor, indicatorSpi, startYearDestor, fileLog, tableName)
def add_event(): """ Add event function, post request thru form """ # add_data_json = request.args.get('add_data') # data_json = json.loads(add_data_json) title = request.form.get('event_title') date1 = request.form.get('event_date') desc = request.form.get('event_desc') # title, date1, desc = get_events_values(data_json) sqlx = DBAccess.bld_add_sql(title, date1, desc) list_result = DBAccess.add_event(sqlx) if list_result[0] == 'error': sj = jsonify({"add_event": list_result[1]}) else: sj = jsonify({"add_event": list_result[1]}) rmsg = DBAccess.save_new_diary() return sj
def main(): "The main method that gets invoked at command line. This method validates command line arguments and calls the ImportS57ToDB method" # Gather Start Time logger.info("******************** Process Started ***********************************") StartTime = time.time() FmtStartTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) # Workflow steps based on settings section5 = config["Workflow"] # Begin import Process passing the options Initialize() try: File = "" if "Yes" in section5["downloadFile"]: File = Utils.downloadFile() inputDirectory = "" if "Yes" in section5["extract"]: inputDirectory = Utils.extract(File).split() S57Files = [] if "Yes" in section5["gatherFilesToProcess"]: section3 = config["S57"] if not inputDirectory: inputDirectory = section3["directory"].split() filemasks = section3["filemasks"].split() S57Files = gatherFilesToProcess(inputDirectory, filemasks) if "Yes" in section5["dropDB"]: DBAccess.dropDB() if "Yes" in section5["createDB"]: DBAccess.createDB() if "Yes" in section5["createAndImportTables"]: DBAccess.createAndImportTables(S57Files, len(S57Files)) if "Yes" in section5["prepareDB"]: DBAccess.prepareDB() # Print End and Elapse Time ElapsedTime = time.time() - StartTime logger.info("Total time taken in HH:MM:SS.ms: %s", str(datetime.timedelta(seconds=ElapsedTime))) # Send email if section5["sendemail"] == "Yes": logger.info("Sending notification email") message = { "Start DateTime for Processing": FmtStartTime, "End DateTime for Processing": strftime("%Y-%m-%d %H:%M:%S", time.localtime()), "Total Time Taken to Process in HH:MM:SS.ms": str(datetime.timedelta(seconds=ElapsedTime)), "Total Number of files Processed": len(S57Files), "Link to Log file": "https://srclogix.dlinkddns.com/logs/vic.txt", } Utils.noticeEMail(message) logger.info("******************** Process Finished ***********************************") return True except Exception, e: logger.error(e) logger.error("******************** Process Errored ***********************************") return False
def main(): "The main method that gets invoked at command line. This method validates command line arguments and calls the ImportS57ToDB method" # Gather Start Time logger.info("******************** Process Started ***********************************") StartTime = time.time() FmtStartTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) #Workflow steps based on settings section5 = config['Workflow'] #Begin import Process passing the options Initialize() File = "" if "Yes" in section5['downloadFile']: File = Utils.downloadFile() inputDirectory = "" if "Yes" in section5['extract']: inputDirectory = Utils.extract(File).split() if "Yes" in section5['dropDB']: DBAccess.dropDB() if "Yes" in section5['createDB']: DBAccess.createDB() if "Yes" in section5['createAndImportTables']: S57Files = [] Chunks = [] section3 = config['S57'] inputDirectory = section3['directory'].split() filemasks = section3['filemasks'].split() if (nProcs < 0): #Single Processor server ***Test S57Files = gatherFilesToProcess(inputDirectory, filemasks) DBAccess.createAndImportTables(S57Files, len(S57Files)) elif (nProcs < 2): #Single Processor server Chunks = gatherFilesToProcessInChunks(inputDirectory, filemasks) DBAccess.createDBObjects(sum(Chunks, [])) DBAccess.importData(Chunks[0], nProcs) else: #Multi Processor server Chunks = gatherFilesToProcessInChunks(inputDirectory, filemasks) #Create the database schema logger.info("Creating S57 database -- started") #Flatten the individual lists AllS57Files = sum(Chunks, []) # Create the schema objects - tables, columns # This has to be on single core DBAccess.createDBObjects(AllS57Files) jobs = [] for i in range(nProcs): queue = Queue() process = multiprocessing.Process(target=DBAccess.importData, args=(Chunks[i],i)) jobs.append(process) process.start() for job in jobs: job.join() if "Yes" in section5['prepareDB']: DBAccess.prepareDB() # Print End and Elapse Time ElapsedTime = time.time()- StartTime logger.info('Total time taken in HH:MM:SS.ms: %s', str(datetime.timedelta(seconds=ElapsedTime))) # Send email if section5['sendemail'] == "Yes": logger.info("Sending notification email") message = {'Start DateTime for Processing': FmtStartTime , \ 'End DateTime for Processing': strftime("%Y-%m-%d %H:%M:%S", time.localtime()), \ 'Total Time Taken to Process in HH:MM:SS.ms': str(datetime.timedelta(seconds=ElapsedTime)), \ 'Total Number of files Processed': len(S57Files), \ 'Link to Log file': 'https://srclogix.dlinkddns.com/logs/vic.txt'} Utils.noticeEMail(message) logger.info("******************** Process Finished ***********************************") return True
def _mock_save_diary(): """ test saving the diary """ rmsg = DBAccess.save_new_diary() print 'mock_save_diary: ended: ' , rmsg
app = Flask(__name__, static_folder='www', template_folder='www') @app.route('/') def main_index_html(): req = request # debug only args = req.args # debug only return send_file("www/templates/index.html") @app.route('/add_event', methods=["POST"] def add_event(); title = request.args.get('title') date1 = request.args.get('date') desc = request.args.get('desc') sqlx = DBAccess.bld_add_sql(title, date1, desc) list_result = DBAccess.add_event(sqlx) if list_result[0] == 'error': sj = jsonify({"add_event_error": list_result[1]}) else: sj = jsonify({"add_event successeded": list_result[1]}) return sj @app.route('/get_events', method=["GET"]) def get_events(): req = request start_date = request.args.get("start_date") end_date = request.args.get("end_date") context = request.args.get("context")