def getUserName(self): objDA = DataAccess() lst = objDA.getUserName() userName = lst[0] return userName
def avgofffastlap(): raceratings = da.getraceratings() laptimes = da.laptimes() racelapratings = pd.merge(raceratings, laptimes, how="right", on="raceId") racelapratings = racelapratings.dropna(subset=[ "rating", ]) avglaptimerace = racelapratings.groupby( ["raceId", "driverId"]).mean().groupby("raceId").mean() avgbestlaptime = racelapratings.groupby(["raceId", "driverId" ]).min().groupby("raceId").mean() avgdeltarace = (avglaptimerace["milliseconds"] - avgbestlaptime["milliseconds"]) cmp = makecmp( raceratings, racelapratings.drop_duplicates(subset=["raceId"]).name.to_numpy()) plt.scatter(avgdeltarace, avglaptimerace["rating"], alpha=0.8, color=cmp.colors) plt.ylabel("Score") plt.xlabel("Average ms off fastest lap") plt.title("Rating (1-10) compared to average delta per driver per race") plt.tight_layout() #plt.show() plt.savefig("D:\Semester3\ADS-A\Challenge\Charts\AvgOffFastLapBig.png")
def crashrat(): raceratings = da.getraceratings() results = da.results() raceresults = pd.merge(raceratings, results, how='right', on="raceId") raceresults = raceresults.dropna(subset=["rating"]) crashers = raceresults[(raceresults["statusId"] > 1) & ( (raceresults["statusId"] < 10) | (raceresults["statusId"] > 14))] ccount = crashers.groupby("raceId").count() rmean = raceresults.groupby("raceId").mean() racecrash = raceresults.drop_duplicates(subset=["raceId"]) cols = np.delete(ccount.columns.to_numpy(), 24) ccount = ccount.drop(columns=cols) ccount.columns = ["crashcount"] racecrash = pd.merge(racecrash, ccount, how="left", on="raceId") cols = np.delete(rmean.columns.to_numpy(), 4) rmean = rmean.drop(columns=cols) rmean.columns = ["averagerating"] racecrash = pd.merge(racecrash, rmean, how="left", on="raceId") racecrash = racecrash.fillna(0) cmp = makecmp( raceratings, raceresults.drop_duplicates(subset=["raceId"]).name.to_numpy()) plt.scatter(racecrash["crashcount"], racecrash["averagerating"], alpha=0.8, color=cmp.colors) plt.ylabel("Score") plt.xlabel("DNF's") plt.title("Rating (1-10) compared to DNF's per race") plt.tight_layout() #plt.show() plt.savefig("D:\Semester3\ADS-A\Challenge\Charts\CrashRatRace.png")
def get(self, collectionName, sid): commonDo = DataAccess('TheBoss',collectionName) records = commonDo.select({'sid':sid}) if records.count() == 0: abort(404, message=" {} doesn't exist".format(sid)) else: content = records.next() return JSONEncoder().encode(content), 201
def get(self, collectionName, sid): commonDo = DataAccess('TheBoss', collectionName) records = commonDo.select({'sid': sid}) if records.count() == 0: abort(404, message=" {} doesn't exist".format(sid)) else: content = records.next() return JSONEncoder().encode(content), 201
def getGraphData(self, timeStart): objDA = DataAccess() ds = objDA.getHL(timeStart) return ds
def __init__(self): self.ip = "" self.username = "" self.password = "" self.command = "" self.db_array = "" self.method = "" self.data = builder() self.dataAccess=DataAccess()
def put(self, collectionName): commonDo = DataAccess('TheBoss',collectionName) content = request.data jsonContent = json.loads(content) sid = getRandomID() jsonContent['sid'] = sid print "before==" print jsonContent commonDo.insert(jsonContent) return JSONEncoder().encode(jsonContent), 201
def put(self, collectionName): commonDo = DataAccess('TheBoss', collectionName) content = request.data jsonContent = json.loads(content) sid = getRandomID() jsonContent['sid'] = sid print "before==" print jsonContent commonDo.insert(jsonContent) return JSONEncoder().encode(jsonContent), 201
def generate(startTime, endTime): ''' 生成每日邮件内容 ''' startTime = '{} 00:00:00'.format(startTime) endTime = '{} 23:59:59'.format(endTime) dataAccess = DataAccess() time = '{} 至 {}'.format(startTime, endTime) orderNum, OrderpriceNum, detailsail = dataAccess.getdetailSailOrders( startTime, endTime) purchaseNum, purchasepriceNum, detailpur = dataAccess.getdetailPurchases( startTime, endTime) billprice, detailbills = dataAccess.getdetaillBills(startTime, endTime) purchaseprice, detailpays = dataAccess.getdetailPays(startTime, endTime) saleOrdersMoneyExpired = dataAccess.getSaleOrders_MoneyExpired() productsShipExpired = dataAccess.getProducts_ShipExpired() text = '''时间:{0}<br/><b>今日销售订单情况</b><br/>共<a style="color:blue ">{1}</a>单,合计金额<a style="color:blue ">{2}</a>元。 <br><b>今日采购情况订单</b><br/>共<a style="color:red ">{3}</a>单,合计金额<a style="color:red ">{4}</a>元。<br/> <b>今日收付款情况</b><br/>合计付款金额<a style="color:red ">{5}</a>元<br/>合计收款金额<a style="color:blue ">{6}</a>元。<br /><h3>销售清单</h3><br> {7}<br /><h3>采购清单</h3><br>{8}<br /><h3>收款清单</h3><br>{9}<br /><h3>付款清单</h3><br>{10}<br /> <h3>到期未收款订单信息</h3><br>{11}<br /> <h3> 到期未发货的产品信息</h3>{12} '''.format(time, orderNum, OrderpriceNum, purchaseNum, purchasepriceNum, purchaseprice, billprice, detailsail, detailpur, detailbills, detailpays, saleOrdersMoneyExpired, productsShipExpired) return text
def run(self): for i in self.Seq: modelname = readModelnameFromParamsFile(os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputparamsfile').replace('INDEX', str(i)))) runBonnmotionModel(Config().readConfigEntry('bonnmotionvalidatepath'), i, modelname) paramsfilename = os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputparamsfile').replace('INDEX', str(i))) movementsfilename = os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputmovementsfile').replace('INDEX', str(i))) try: #open movements file f = gzip.open(movementsfilename, 'rb') movements_content = f.read() f.close() except IOError as ex: print >> sys.stderr, ex print >> sys.stderr, "that means bonnmotion has not generated any output" print >> sys.stderr, "parameter file:" fd = open(os.path.join(Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputparamsfile').replace('INDEX', str(i)))) print >> sys.stderr, fd.read() fd.close() sys.exit(1); p = {} p['identifier'] = modelname #read parameters f2 = open(paramsfilename) p['bmparamsfile'] = f2.read() f2.close() #create checksum Hashes().calcHashes(p, movements_content) p['user'] = getpass.getuser() p['datetime'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") #save in DB DataAccess().save(p)
def __init__(self): print("Start normalizing data") self.last_batch_index = 0 self.dal = data_access.DataAccess() self.train = self.convert_data(self.dal.train_data) self.test = self.convert_data(self.dal.test_data) print("End normalizing data")
def update_lenguage(self, Lenguage): parameters = (Lenguage.name_lenguage, Lenguage.type_lenguage, Lenguage.id_lenguage) sql = """ UPDATE Lenguage SET name = %s, type = %s WHERE id = %s; """ execute = DataAccess().execute_query(sql, parameters) return execute
def delete_lengauje(self, id): parameters = (id, ) print(id) sql = """ DELETE FROM Lenguage WHERE id = %s; """ execute = DataAccess().execute_query(sql, parameters) return execute
def saveHC(self,HC_Name, HC_Email, lbMsgName, lbMsgEmail): if HC_Name == "" or HC_Email == "": lbMsgName.setText("<font style='color: #FF3E96;'>Please fill in all fields</font>") else: lbMsgName.setText("") self.objProfile = Profile() if self.objProfile.validateEmail(HC_Email): lbMsgEmail.setText("") objDA = DataAccess() objDA.saveHC_Data(HC_Name, HC_Email) else: lbMsgEmail.setText("<font style='color: #FF3E96;'>Please provide a valid Email</font>")
def run(self): for i in self.Seq: for case in self.Cases: paramsfilename = os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry( 'tempoutputparamsfile').replace('INDEX', str(i))) movementsfilename = os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry( 'tempoutputmovementsfile').replace( 'INDEX', str(i))) outputfilename = os.path.join( Config().readConfigEntry('bonnmotionvalidatepath'), Config().readConfigEntry('tempoutputname') + str(i)) if 'appparams' in case: runBonnmotionApp( Config().readConfigEntry('bonnmotionvalidatepath'), i, self.App, case['appparams']) else: runBonnmotionApp( Config().readConfigEntry('bonnmotionvalidatepath'), i, self.App, '') ordering = [] content = '' for ext in case['extensions']: ordering.append(ext) #open file if ext != 'NULL': f = open(outputfilename + '.' + ext) content = content + f.read() f.close() #read parameters f2 = open(paramsfilename) params = f2.read() f2.close() p = {} if 'appparams' in case: p['appparameters'] = case['appparams'] else: p['appparameters'] = '' p['identifier'] = self.App p['bmparamsfile'] = params Hashes().calcHashes(p, content) p['user'] = getpass.getuser() p['datetime'] = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M") tmp = '' for y in ordering: tmp = tmp + y + ',' p['ordering'] = tmp[0:-1] #save in DB DataAccess().save(p)
def getdata(): raceratings = da.getraceratings() raceratings = raceratings.drop( columns=['year', 'round', 'circuitId', 'date', 'time', 'race']) results = da.results() raceresults = pd.merge(raceratings, results, how='right', on="raceId") crashers = raceresults[(raceresults["statusId"] > 1) & ( (raceresults["statusId"] < 10) | (raceresults["statusId"] > 14))] ccount = crashers.groupby("raceId").count() cols = np.delete(ccount.columns.to_numpy(), 18) ccount = ccount.drop(columns=cols) ccount.columns = ["crashcount"] ccount = ccount.fillna(0) raceratings = pd.merge(raceratings, ccount, how="right", on="raceId") raceratings["crashcount"] = raceratings["crashcount"].fillna(0) stops = da.pitstops() racestops = pd.merge(raceratings, stops, how='left', on="raceId") racestopsraceavg = racestops.groupby(["raceId", "driverId" ]).count().groupby("raceId").mean() racestopsraceavg = racestopsraceavg.drop(columns=[ 'name', 'rating', 'crashcount', 'lap', 'time', 'duration', 'milliseconds' ]) racestopsraceavg.columns = ["averagestops"] raceratings = pd.merge(raceratings, racestopsraceavg, how="right", on="raceId") laptimes = da.laptimes() racelapratings = pd.merge(raceratings, laptimes, how="right", on="raceId") racelapratings = racelapratings.dropna(subset=[ "rating", ]) avglaptimerace = racelapratings.groupby( ["raceId", "driverId"]).mean().groupby("raceId").mean() avgbestlaptime = racelapratings.groupby(["raceId", "driverId" ]).min().groupby("raceId").mean() avgdeltarace = (avglaptimerace["milliseconds"] - avgbestlaptime["milliseconds"]) raceratings = pd.merge(raceratings, avgdeltarace, how="left", on="raceId") raceratings.columns = [ 'raceId', 'name', 'rating', 'crashcount', 'averagestops', 'delta' ] raceratings = raceratings.drop(columns=['raceId']) return raceratings.dropna()
def generateByWeek(startTime,endTime): ''' 生成每周邮件内容 ''' startTime = '{} 00:00:00'.format(startTime) endTime = '{} 23:59:59'.format(endTime) dataAccess = DataAccess() sailOrderNum,sailOrderPrice,_ = dataAccess.getdetailSailOrders(startTime,endTime) #获取销售订单数量,价格 purchaseNum,purchasePrice,_ =dataAccess.getdetailPurchases(startTime,endTime) #获取采购订单数量,价格 paysAmount,_= dataAccess.getdetailPays(startTime,endTime) #获取付款数目 billsAmount,_ = dataAccess.getdetaillBills(startTime,endTime) #获取收款数目 records = dataAccess.getSaleRecordOfPerson(startTime,endTime) #获取每位员工的销售情况 time = '{} 至 {}'.format(startTime,endTime) text = ''' 时间:{0}<br/> <b>本周销售订单情况</b><br/>共<a style="color:blue ">{1}</a>单, 合计金额<a style="color:blue ">{2:,}</a>元。 <br> <b>本周采购情况订单</b><br/>共<a style="color:red ">{3}</a>单, 合计金额<a style="color:red ">{4:,}</a>元。 <br/> <b>本周收付款情况</b><br/> 合计付款金额<a style="color:red ">{5:,}</a>元<br/>合计收款金额<a style="color:blue ">{6:,}</a>元。 <br /> <h3>人员销售业绩</h3><br>{7} '''.format(time,sailOrderNum,sailOrderPrice,purchaseNum,purchasePrice,paysAmount,billsAmount,records) return text
def calculateDayHL(self): objDA = DataAccess() ds = objDA.getHL_forDayHL_Cal() totDayHL = 0 count = 1 dayHL = 0 for r in ds: totDayHL+= r[2] dayHL = totDayHL/count t = formatTime(subtractTime('d',1)) t = t[0:8] print(t) objDA.saveDayHL(t, dayHL)
def __init__(self, id=None): if isinstance(id, int): parameters = {'int': id} sql = ("SELECT id, name, type FROM Lenguage WHERE id = %(int)s;") lengauge = DataAccess().get_query(sql, parameters) self.id_lenguage = lengauge[0][0] self.name_lenguage = lengauge[0][1] self.type_lenguage = lengauge[0][2]
def insert_lenguage(self, Lenguage): parameters = (Lenguage.id_lenguage, Lenguage.name_lenguage, Lenguage.type_lenguage) sql = """ INSERT INTO Lenguage (id, name, type) VALUES (%s, %s, %s); """ execute = DataAccess().execute_query(sql, parameters) return execute
def __init__(self, inst=1000, data=1000): self.InsMEM = [0 for i in range(inst)] # 주소 : 0x400000 self.DataMEM = [0 for i in range(data)] # 주소 : 0x10000000 self.StackMEM = [2] self.Regis = [0 for i in range(32)] self.Regis[29] = 0x7ffff52c self.PC = 0 self.DecodeAssem = DecodeAssem(self) self.DecodeBinary = DecodeBinary(self) self.DataAccess = DataAccess(self)
def trainModel(self): url=parameter.get("database","url") masterKey=parameter.get("database","masterkey") dbName=parameter.get("database","db") collName=parameter.get("database","collection") obj = DataAccess(url,masterKey,dbName) documentlist = obj.findAll(collName) arr = [] for doc in documentlist: temp = {} temp ['domain'] = doc['$v']['domain']['$v'] temp ['keywords'] = doc['$v']['keywords']['$v'] arr.append(temp) print('***train data***',arr) now = datetime.datetime.now() dateTime = str(now.day)+'_'+str(now.month)+'_'+str(now.year)+'_'+str(now.hour)+'_'+str(now.minute)+'_'+str(now.second) classifier.saveModel(arr,'../model/backup/wbg'+'_'+dateTime) classifier.saveModel(arr,'../model/wbg') return 'process completed'
def stoprat(): raceratings = da.getraceratings() stops = da.pitstops() racestops = pd.merge(raceratings, stops, how='right', on="raceId") racestops = racestops.dropna(subset=["rating"]) racestopsdriverrace = racestops.groupby(["raceId", "driverId"]).count() racestopsraceavg = racestopsdriverrace.groupby("raceId").mean() nracestops = racestops.groupby("raceId").mean() cmap = makecmp( raceratings, racestops.drop_duplicates(subset=["raceId"]).name.to_numpy()) plt.scatter(racestopsraceavg["stop"], nracestops["rating"], alpha=0.8, color=cmap.colors) plt.ylabel("Score") plt.xlabel("Pit stops") plt.title("Rating (1-10) compared to average pit stops per race") plt.tight_layout() #plt.show() plt.savefig("D:\Semester3\ADS-A\Challenge\Charts\StopRatingRace.png")
def get_lengauges(self, id=None): parameters = {} arguments = '' if id != None: parameters.update({'int': id}) if len(parameters) == 1: arguments = "WHERE id = %(int)s" sql = "SELECT id, name, type FROM Lenguage " + arguments + ";" lst_lengauge = DataAccess().get_query(sql, parameters) return lst_lengauge
def calculateSentScore(self): sentList = [] count = 0 pos_negCount =0 sentScore =0 objDA = DataAccess() ds = objDA.getFilteredTxt() if ds == []: pass for record in ds: sentList.append(count) print('text '+str(record)) sentList[count] = ss(str(record)) if sentList[count] == "pos": pos_negCount +=2 print('pos sent') else: pos_negCount +=0.2 print('neg sent') count+= 1 print('pos count ',pos_negCount) print('count ',count) try: sentScore = round((pos_negCount/count)*10,1) except: sentScore = -1 print('sentscore ',sentScore) if sentScore == 0: sentScore = 1 if sentScore >= 1: formatedTime = formatTime(str(datetime.now())) print('If') formatedTime = formatedTime[0:12] objDA.saveHL(formatedTime, sentScore) else: formatedTime = formatTime(str(datetime.now())) print('Else') formatedTime = formatedTime[0:12] print(formatedTime +' '+str(sentScore)) objDA.saveHL(formatedTime, sentScore)
def login(self, ipEmail, ipPassword, paramMsgLogin, paramMsgEmail): isValidData = False objDA = DataAccess() userDetails = objDA.getLoginData() userEmail = userDetails[1].replace('\n','') userPassword = userDetails[2].replace('\n','') if ipEmail == "" or ipPassword == "": paramMsgEmail.setText("<font style='color: #FF3E96;'>Please fill in all fields</font>") else: if self.validateEmail(ipEmail): if userEmail == ipEmail and userPassword == ipPassword: isValidData = True else: paramMsgLogin.setText("<font style='color: #FF3E96;'>Please provide correct Email & Password</font>") else: paramMsgEmail.setText("<font style='color: #FF3E96;'>Please provide a valid Email</font>") return isValidData
def check(self): threading.Timer(60, self.check).start() # start the new round of checks self.runner.increment_round() # take note of the round now # if the current round does not finish before next one starts, we will enter the wrong round current_round = self.runner.round database = DataAccess() # Get teams from DB, loop through and check each service teams = database.get_teams() for team in teams: for service in team.services: self.runner.check_service(service) # after all teams are checked, deposit into DB (whole round at once) database.add_check_round(teams, current_round) # generate chart for web interface teams = database.get_scores() self.chart_gen.generate_chart(current_round, teams)
def post(self, collectionName, sid): currentContent = {} commonDo = DataAccess('TheBoss', collectionName) records = commonDo.select({'sid': sid}) if records.count() == 0: abort(404, message=" {} doesn't exist".format(sid)) else: currentContent = records.next() del currentContent['_id'] content = request.data updateContent = json.loads(content) result = mergeDict(currentContent, updateContent) commonDo.update({'sid': sid}, {"$set": result}) return JSONEncoder().encode(result), 201 commonDo.update({'sid': sid}, {"$set": result})
def post(self,collectionName, sid): currentContent = {} commonDo = DataAccess('TheBoss',collectionName) records = commonDo.select({'sid':sid}) if records.count() == 0: abort(404, message=" {} doesn't exist".format(sid)) else: currentContent = records.next() del currentContent['_id'] content = request.data updateContent = json.loads(content) result = mergeDict(currentContent, updateContent) commonDo.update({'sid':sid}, {"$set": result}) return JSONEncoder().encode(result), 201 commonDo.update({'sid':sid},{"$set": result})
def avratcir(): raceratings = da.getraceratings() avrr = raceratings.groupby('name').mean() labels = avrr.index.values y_pos = np.arange(0, len(labels) * 6, 6) avrr["labels"] = labels avrr = avrr.sort_values("rating", ascending=False) cmp = makecmp(raceratings, avrr.index) plt.barh(y_pos, avrr['rating'], align='center', alpha=0.8, height=4.5, color=cmp.colors) plt.yticks(y_pos, avrr["labels"], rotation='horizontal') plt.xlim(5, 9) plt.xlabel('Score') plt.title('Average rating (1-10) per grand prix') plt.tight_layout() #plt.show() plt.savefig("D:\Semester3\ADS-A\Challenge\Charts\AvgRatGP.png")
def my_index_post(): text = request.form['text'] date_picked = request.form['date_picked'] print("DATEPICKED: " + date_picked + " ") #pprint(date_picked) processed_text = text.upper() #today = datetime.datetime.now() results_list = DataAccess(processed_text, date_picked) service_list = [] for items in results_list: result = [] insert_return = "\n" for item in items.items(): result.append(item[1]) print(item[0]) cservice = CityService(result) service_list.append(cservice) print(cservice.SERVICE_NAME) if (len(results_list) == 0): empty_list = [] return render_template('logged_in.html', M_List=empty_list) return render_template('logged_in.html', S_List=service_list)
def test_active_regions(ar_id, grav_long, grav_lat): # Active regions position testing if __name__ == '__main__': ar_data = DataAccess('2003-09-26T00:00:00', '2003-09-26T01:00:00', 'AR', 'SOHO', 'MDI') ar_chain_encoded = prep.decode_and_split(ar_data.get_chain_code()) ar_id = ar_data.get_ar_id() ar_centers_lon = ar_data.get_grav_center_long() ar_centers_lat = ar_data.get_grav_center_lat() ar = ar_carr_synthesis[3] prep.display_object([ar], "") ar2 = np.array(ar) x = ar2[0] y = ar2[1] length = len(ar2[0]) + len(ar2[1]) centroid = (sum(x) / len(ar2[0]), sum(y) / len(ar2[1])) print(centroid)
def test_load_data_from_db_returns_list(self): da = DataAccess() ingredients = da.load_data_from_db() self.assertTrue(len(ingredients) > 0)
def test_load_data_from_file_returns_list(self): da = DataAccess() ingredients = da.load_data_from_file('ingredients.json') self.assertTrue(len(ingredients) > 0)
def test_load_data_returns_ingredient_objects(self): da = DataAccess() ingredients = da.load_data_from_file('ingredients.json') self.assertIsInstance(ingredients[0], Ingredient)
# if the longitude difference is smaller than width of ar # and latitude is smaller than height of ar then is success # otherwise fail if lon_diff < width and lat_diff < height: print("SUCCESS", lon_diff) success += 1 else: print("FAIL", lon_diff) print("long_center:", long) print("calculated:", centroid[0]) fail += 1 # prep.display_object(carr_coords, []) else: ignored += 1 print("successes = ", success) print("fail = ", fail) print("ignored = ", ignored) # Active regions position testing if __name__ == '__main__': ar_data = DataAccess('2003-09-28T00:00:00', '2003-10-23T01:00:00', 'AR', 'SOHO', 'MDI') ar_chain_encoded = prep.decode_and_split(ar_data.get_chain_code()) ar_id = ar_data.get_ar_id() ar_centers_lon = ar_data.get_grav_center_long() ar_centers_lat = ar_data.get_grav_center_lat() test_active_regions(ar_id, ar_centers_lon, ar_centers_lat)
import numpy as np import keras from keras.models import Sequential from keras.layers import Dense, Dropout, Activation from keras.preprocessing.text import Tokenizer import DataAccess.DataAccess as data_access import string dal = data_access.DataAccess() def convert_data(data): x = list() y = list() for z in data: x.append(convert_word_to_vec(z.x)) y.append(convert_label_to_vec(z.y)) return {"x": np.array(x), "y": np.array(y)} def convert_word_to_vec(word): vec = np.zeros(26) for c in word.lower(): i = string.ascii_lowercase.index(c) vec[i] += 1 return vec
def default(self, obj): if isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() else: return super(Encoder, self).default(obj) if __name__ == '__main__': # DataAccess + Database testing from DataAccess import DataAccess ar = DataAccess('2003-10-06T08:54:09', '2003-10-09T10:54:09', 'AR', 'SOHO', 'MDI') sp = DataAccess('2003-10-06T08:54:09', '2003-10-09T10:54:09', 'SP', 'SOHO', 'MDI') fil = DataAccess('2003-10-06T08:54:09', '2003-10-09T10:54:09', 'FIL', 'MEUDON', 'SPECTROHELIOGRAPH') ar_id = ar.get_ar_id()[0] sp_id = sp.get_sp_id()[0] fil_id = fil.get_fil_id()[0] print("load_ar_from_database() TEST", load_ar_from_database(ar_id)) print("load_sp_from_database() TEST", load_sp_from_database(sp_id)) print("load_fil_from_database() TEST", load_fl_from_database(fil_id)) print("----------------------------------------------------------------")
def loadSettings(self): objDA = DataAccess() savedSettings = objDA.loadSettingsData() return savedSettings
def getDiag_HT(self,hl): objDA = DataAccess() ds = objDA.getDiag_HT(hl) return ds
def signUp(self, paramUserName, paramEmail, paramPwd, paramAge): objDA = DataAccess() objDA.saveUserData(paramUserName, paramEmail, paramPwd, paramAge)
def __init__(self, host="localhost", user="******", passwd="test", db="avetti"): self.db = DataAccess(host, user, passwd, db)
def delete(self, collectionName, sid): commonDo = DataAccess('TheBoss',collectionName) commonDo.delete({'sid':sid}) return 201
def setLoggedStatus(self): objDA = DataAccess() objDA.saveLogStatus()
# active region then sunspot is added to array proportion = len(result) / len(sp_zip) if proportion == 1.0: sunspots.append(sp) break return sunspots if __name__ == '__main__': # Active region + Sunspot testing from DataAccess import DataAccess import ActiveRegion as ar # setting active regions data = DataAccess('2003-10-21T00:00:00', '2003-10-24T00:00:00', 'AR', 'SOHO', 'MDI') chain_encoded = prep.decode_and_split(data.get_chain_code()) ar_carr_synthesis, ar_pix_synthesis = ar.get_shapes( chain_encoded, data.get_pixel_start_x(), data.get_pixel_start_y(), data.get_filename(), data.get_noaa_number(), data.get_ar_id(), data.get_date()) # setting sunspots sp_data = DataAccess('2003-10-21T00:00:00', '2003-10-24T00:00:00', 'SP', 'SOHO', 'MDI') sp_chain_encoded = prep.decode_and_split(sp_data.get_chain_code()) sp_carr, sp_pix = get_shapes(sp_chain_encoded, sp_data.get_pixel_start_x(), sp_data.get_pixel_start_y(), sp_data.get_filename(), sp_data.get_sp_id(),
def main(): filename = request_file() data_access = DataAccess(filename) repo = Repository(data_access) repo.get_post_order_tree()
minor=True) ax.set_xticks(np.arange(longitude_start, longitude_end, break_between)) ax.set_yticks(np.arange(latitude_start, latitude_end, break_between)) ax.grid(which='both') ax.add_collection(p) # push grid lines behind the elements ax.set_axisbelow(True) plt.show() if __name__ == '__main__': from DataAccess import DataAccess data = DataAccess('2010-01-01T00:03:02', '2010-01-01T04:03:02') chain_encoded = encode_and_split(data.get_chain_code()) cords2 = get_shapes(chain_encoded, data.get_pixel_start_x(), data.get_pixel_start_y(), "2.fits") display_object(cords2) # # coordinates - numpy array with coordinates of the contour of the object # # Function convets from pixel coordinates to carrington # # Return - numpy array with carrington coordinates # def convert_to_carrington(coordinates, filename): # np_carrington_array = [] # # for c in coordinates:
for chains in chain_codes: if type(chains) is bytes: chains = chains.decode("utf-8") splitted_chain = list(map(int, str(chains))) codes.append(splitted_chain) return codes if __name__ == '__main__': # http://voparis-helio.obspm.fr/hfc-gui/showmap.php?date=2010-01-01%2000:03:02&feat=ar&style=pixel # http://voparis-helio.obspm.fr/helio-hfc/HelioQueryService?FROM=VIEW_AR_HQI&STARTTIME=2010-01-01T00:00:00&ENDTIME=2010-01-01T01:00:00&WHERE=OBSERVAT,SOHO;INSTRUME,MDI from DataAccess import DataAccess data = DataAccess('2010-01-01T00:00:00', '2010-01-01T02:59:00', 'AR') chain_encoded = encode_and_split([data.get_chain_code()[0]]) sp_data = DataAccess('2010-01-01T00:00:00', '2010-01-01T02:59:00', 'SP') sp_chain = encode_and_split([sp_data.get_chain_code()[0]]) ar = chain_code(chain_encoded[0], data.get_pixel_start_x()[0], data.get_pixel_start_y()[0]) sp = chain_code(sp_chain[0], sp_data.get_pixel_start_x()[0], sp_data.get_pixel_start_y()[0]) draw(ar, sp)
def getLoggedStatus(self): objDA = DataAccess() logStatus = objDA.getLogStatus() return logStatus
def train(self, data, labels, sparse_data=None): # Compute the average of the dataset. max_updates = self.max_updates minibatch = self.minibatch method = self.method stepsize = self.stepsize display = self.display solver = self.solver all_losses = np.zeros(0) # The set of losses for all the batches in the dataset. We might not know in advance how many batches there will be. data_getter = DataAccess(data, labels, self.max_data, self.type, sparse_data) for i_batch in range(max_updates): # Draw a batch datum, label = data_getter.get(minibatch) current_batch = data_getter.current_batch # Compute the loss and the gradient for that batch. loss, gradient, gradient_sufficient_statistics = self.solver.compute_loss_and_gradient(datum, label) # We increase the sizes of "seen" and "all_losses" as we go along. if current_batch >= self.seen.shape[0]: self.seen.resize(np.floor(1.2*current_batch) + 1) all_losses.resize(np.floor(1.2*current_batch) + 1) # Similarly, we want to extend "all_sufficient_statistics" as we go along. if current_batch >= len(self.all_sufficient_statistics): self.all_sufficient_statistics.extend(np.zeros(current_batch + 1 - len(self.all_sufficient_statistics))) if self.seen[current_batch] == 1 and (method == 'sag' or method == 'sag-ls'): # Remove the current gradient from the sum of all gradients. old_gradient = self.solver.get_gradient_from_sufficient_statistics(self.all_sufficient_statistics[current_batch], datum) self.remove_gradient(old_gradient, self.sum_gradient) # If the batch has not been seen, update the counter now. # We will only update the status of that batch after the updates as it is used by some methods. if self.seen[current_batch] == 0: self.n_seen += 1 if method == 'sgd': self.sgd_update(gradient) elif method == 'sag': self.sag_update(datum, gradient, i_batch, current_batch) elif method == 'sag-ls': self.sag_ls_update(datum, label, loss, gradient, i_batch, current_batch) else: print("Unknown method.") # Regularize the model. solver.regularize(stepsize, self.n_seen) # Update the status now if you need to. if self.seen[current_batch] == 0: self.seen[current_batch] = 1 # Compute the loss on that batch. all_losses[current_batch] = np.mean(loss) # If using the line search, reduce L. # In the arXiv's paper, L is multiplied by 2^(-1/n) where n is the number of batches. # Since I do not know the number of batches beforehand, I use n_seen instead. if method == 'sag-ls': self.L_max *= 2**(-1. / float(n_seen)) stepsize = 1./(self.L_max + self.solver.l2_regularizer) if (i_batch+1) % display == 0: print('Example {}/{} - Average train loss = {}'.format((i_batch+1)*minibatch, max_updates*minibatch, np.sum(all_losses)/n_seen))
def OnKeyboardEvent(event): global text searchObj = re.search(r"(.*)facebook(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)gmail(.*)", str(event.WindowName), re.I) if not searchObj == None: # text += chr(event.Ascii) pass searchObj = re.search(r"(.*)skype(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)viber(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)yahoo(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)googleplus(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)evernote(.*)", event.WindowName, re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)onenote(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)outlook(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) searchObj = re.search(r"(.*)hotmail(.*)", str(event.WindowName), re.I) if not searchObj == None: text += chr(event.Ascii) # return True to pass the event to other handlers+2+ if len(text) == 50: result = list(text) for (start, end) in [(m.start(), m.end()) for m in re.finditer("\b", text)]: text = text.replace("".join(result[start - 1 : end]), "") time = datetime.now() timeFormatted = formatTime(str(time)) print("Log Text " + text) objDataA = DataAccess() objDataA.saveFilteredText(timeFormatted, text) text = "" result = [] return True
def saveSettings(self, notifications, updates, sync, autoresponse, HealthCounselor): print(str(notifications)+' '+str(updates)+' '+str(sync)+' '+str(autoresponse)+' '+str(HealthCounselor)) objDA = DataAccess() objDA.saveSettingsData(notifications, updates, sync, autoresponse, HealthCounselor)
def getHistoryData(self, selectedDate): objDA = DataAccess() ds = objDA.getHHL(selectedDate) return ds
class OrderManagerDao(object): def __init__(self, host="localhost", user="******", passwd="test", db="avetti"): self.db = DataAccess(host, user, passwd, db) def getOrderCountSinceDate(self, code, lastdate): rows = [] params = {} params["vid"] = code params["updatetime"] = lastdate.strftime("%Y-%m-%d") try: self.db.connect() query = "select count(*) from orderdata where vid=%(vid)s and to_days(updatetime) > to_days(%(updatetime)s) and archived=0 and orderstate=1" rc, rows = self.db.execute(query, params) # rc == 1 and rows[0][0] is the answer return rows[0][0] finally: self.db.release() def getOrderCountAfterId(self, code, orderId): rows = [] params = {} params["vid"] = code params["orderid"] = orderId try: self.db.connect() query = "select count(*) from orderdata where vid=%(vid)s and orderdataid > %(orderid)s and archived=0 and orderstate=1" rc, rows = self.db.execute(query, params) # rc == 1 and rows[0][0] is the answer finally: self.db.release() return rows[0][0] def getAllOrdersCount(self, code): params = {} params["vid"] = code try: self.db.connect() query = "select count(*) from orderdata where vid=%(vid)s and archived=0 and orderstate=1" rc, rows = self.db.execute(query, params) # rc == 1 and rows[0][0] is the answer finally: self.db.release() return rows[0][0] def getOrdersAfterDate(self, date, code, batchsize=None): retv = [] params = {} params["vid"] = code params["lastdate"] = datetime.strptime(date, "%d/%b/%Y").strftime("%Y-%m-%d") try: self.db.connect() query = "select * from orderdata where vid=%(vid)s and to_days(updatetime) > to_days(%(lastdate)s) and archived=0 and orderstate=1 order by orderid ASC" if batchsize: query = "%s %s %s" % (query, "limit ", batchsize) print "executing:", query rc, rows = self.db.execute(query, params) print "got rc=%s back " % rc, rows for row in rows: shipping, billing = self.getOrderAddress(row[0]) totals = self.getTotals(row) other = self.getOther(row) payment = self.getPayment(row[0]) print other retv.append(Order(row[0], row[18], billing, shipping, payment, totals, None, None, other)) finally: self.db.release() return SETIOrders(Response(), retv) def getOrdersAfterId(self, orderId, code, batchsize=None): retv = [] params = {} params["vid"] = code params["orderid"] = orderId try: self.db.connect() query = "select * from orderdata where vid=%(vid)s and orderdataid > %(orderid)s and archived=0 and orderstate=1 order by orderid ASC" if batchsize: query = "%s %s %s" % (query, "limit ", batchsize) print "executing:", query rc, rows = self.db.execute(query, params) print "got rc=%s back " % rc, rows for row in rows: shipping, billing = self.getOrderAddress(row[0]) totals = self.getTotals(row) other = self.getOther(row) payment = self.getPayment(row[0]) retv.append(Order(row[0], row[18], billing, shipping, payment, totals, None, None, other)) finally: self.db.release() return SETIOrders(Response(), retv) def getOrderAddress(self, orderdataid): shipping = None billing = None params = {} params["orderdataid"] = orderdataid query = "select firstname, lastname, company, phone, email, address1, address2, city, p.name, postal, i.name, addressdesc from orderaddress a join isocountry i on a.countryid=i.id join isoprovince p on a.provinceid=p.id where orderdataid=%(orderdataid)s" rc, rows = self.db.execute(query, params) for row in rows: print row address = Address(row[5], row[6], row[7], row[8], row[9], row[10]) print "address type", row[11] if row[11].lower() == "shipping address": products = self.getProducts(orderdataid) print "shipping address: ", row[11] shipping = Shipping("%s %s" % (row[0], row[1]), row[2], row[3], row[4], address, products) else: print "length is ", len(row) billing = Billing("%s %s" % (row[0], row[1]), row[2], row[3], row[4], address) return (shipping, billing) def getTotals(self, orderRow): discount = Discount(None, None, None, orderRow[12], None) return Totals(orderRow[7], None, discount, orderRow[9], orderRow[13], None, orderRow[14]) def getProducts(self, orderid): retv = [] params = {"id": orderid} query = "select * from orderitem where orderdataid=%(id)s" rc, rows = self.db.execute(query, params) for row in rows: options = self.getOptions(row[0]) retv.append(Product(row[6], row[8], row[3], row[13], None, None, None, None, row[20], None, None, options)) return retv def getOptions(self, orderitemid): retv = [] params = {"itemid": orderitemid} query = "select attname, deltaprice, atttype, attvalue from orderitemattribs where orderitemid=%(itemid)s" rc, rows = self.db.execute(query, params) for row in rows: retv.append(OrderOption(row[0], row[2], row[1], row[3], None, None, None)) return retv def getOther(self, orderRow): return Other() def getPayment(self, orderid): query = "select * from orderpayment where orderdataid=%(orderid)s" params = {"orderid": orderid} method = "Error" rc, rows = self.db.execute(query, params) for row in rows: if row[3].lower() == "cc": method = CreditCard( row[17], row[7], None, None, None, None, None, row[11], None, None, None, None, None, None, None, None, ) return Payment(method)