def __init__(self, parent_window, caller_window, modify_type, clicked_time): self.database_interface = DatabaseInterface() # Get the window of the caller self.caller_window = caller_window # Initial values of times if modify_type == "update_or_delete": self.init_begin_time = self.database_interface.get_begin_row_from_time( clicked_time) self.init_end_time = self.database_interface.get_end_row_from_time( clicked_time) current_project_name = self.init_begin_time["project_name"] elif modify_type == "add": current_project_name = None # Create the window begin_printed_time, end_printed_time = self.get_printed_times( clicked_time, modify_type) project_names = self.database_interface.get_project_names() self.modify_calendar_window = ModifyCalendarWindow( parent_window, modify_type, begin_printed_time, end_printed_time, project_names, current_project_name) # Modify buttons if modify_type == "update_or_delete": self.modify_calendar_window.set_update_button_command( self.update_time_row) self.modify_calendar_window.set_delete_button_command( self.delete_time_row) elif modify_type == "add": self.modify_calendar_window.set_add_button_command( self.add_time_row)
def random(): message = '' all_body_part_from_db = generate_all_body_part_from_db_lst('exercise.db') if request.method == 'POST': '''all_body_part_from_db - список для всех body_part из базы, разделенный по одной и отсортированный''' body_parts_lst_from_checkbox = generate_body_parts_lst_from_checkbox_lst(all_body_part_from_db) '''body_parts_lst_from_checkbox - список с отмеченными чекбоксами body_part''' select_str = generate_select_string_for_random(body_parts_lst_from_checkbox) '''select_str - селект-запрос с отмеченными чекбоксами body_part''' '''Проверка на корректное amount''' amount = request.form.get('amount') if amount.isdigit() and amount != 0: connect_to_db = DatabaseInterface('exercise.db') table = connect_to_db.select_query(select_str) s = '' rand_lst = [] if int(amount) <= len(table): while len(rand_lst) != int(amount): rand_lst.append(choice(table)) rand_lst = set(rand_lst) rand_lst = list(rand_lst) return render_template('random.html', items=all_body_part_from_db, random_list=rand_lst, table=1) else: message = f'IN DATABASE YOU HAVE ONLY {len(table)} STRINGS. PLEASE ENTER LESS THAN {len(table) + 1}.' else: message = "You need to enter only positive digits" return render_template('random.html', items=all_body_part_from_db, message=message)
def __init__(self, parent_window, caller_window, window_type): self.database_interface = DatabaseInterface() # Get the window of the caller self.caller_window = caller_window # Create the window project_names = self.database_interface.get_all_project_names() self.modify_project_names_window = ModifyProjectNamesWindow( parent_window, window_type, project_names) if window_type == "modify": self.update_is_hidden() # Add and delete buttons if window_type == "add": self.modify_project_names_window.set_modify_button_command( self.add_project) elif window_type == "modify": self.modify_project_names_window.set_update_is_hidden_command( self.update_is_hidden) self.modify_project_names_window.set_show_hide_button_command( self.show_hide_project) self.modify_project_names_window.set_modify_button_command( self.delete_project) # Click enter in the entry field if window_type == "add": self.modify_project_names_window.set_modify_project_entry_command( self.add_project)
def __init__(self): self.TemperatureSetPoint = float(0.0) self.Temperature = 0.0 self.StepsPerRevolution = 0 self.brewageId = None self.db = DatabaseInterface(databaseName) # self.db.createDefaultConfiguration() # self.db.createDefaultMashing() # self.db.createDefaultBrewage() self.P = 10 self.I = 1 self.D = 1 self.PID_Output = 0 self.outputPV = 0 self.regelaarActive = False self.prevOutputPV = 0 self.pid = PID.PID(self.P, self.I, self.D) self.pid.setSampleTime(1) self.motor = StepperMotor() self.thread = threading.Thread(target=self.run, args=()) self.runGetTempThread = True self.thread.start() # Start the execution
def get_step(self,key): db=DatabaseInterface() col_name='currentinfo' clause={} field=[key] res=db.getone_db(self.db_name,col_name,clause,field) return res[key]
def run(self): prevOutputPv = -1 dbInterface = DatabaseInterface(databaseName) while(self.runGetTempThread): self.Temperature = self.ReadDS18B20("28-000008717fea") if self.regelaarActive == True: self.pid.setKp (self.P) self.pid.setKi (self.I) self.pid.setKd (self.D) self.pid.SetPoint = self.TemperatureSetPoint self.pid.update(float(self.Temperature)) self.PID_Output = self.pid.output self.outputPV = max(min( int(self.PID_Output), 100 ),0) if self.prevOutputPV != self.outputPV: self.motor.setOutput(self.outputPV) # Only change motor when changed dbInterface.insertMeasurement(self.brewageId,self.TemperatureSetPoint,self.Temperature,self.outputPV) self.prevOutputPV = self.outputPV # print ( "Target: %.1f C | Current: %.1f C | OutputPV: %d" % (self.TemperatureSetPoint, self.Temperature, self.outputPV)) time.sleep(1)
def updatesignal(pair, timeframe, usedb, exchangename=None): db = DatabaseInterface(usedb) pair, timeframe = format_input(pair, timeframe, exchangename) db.db_updateone({ 'timeframe': timeframe, 'pair': pair }, {'sig': '1'}, 'DATAUPDATESIG', upserts=True)
def start_message(message): connect_to_db = DatabaseInterface('exercise.db') connect_to_db.select_from_db() s = '' for i in connect_to_db.select_from_db(): s = s + '\n' for j in i: s = s + str(j) + ' ' bot.send_message(message.chat.id, s)
def update_currentinfo(self): col_name='currentinfo' db=DatabaseInterface() stlget=StockGetAll(self.token) res=stlget.get_api_tickers() clause={} updates={'$set':{'currentTickerList':res}} db.update_db(self.db_name,col_name,clause,updates) db.update_db_date(self.db_name,'ticker')
def update_new_ticker(self): col_name='stockbase' stlget=StockGetAll(self.token) db=DatabaseInterface() #获取新增的ticker new_tickers=self.get_newticker() #写入新ticker的信息 stocks_gene=stlget.get_api_stockbase(new_tickers) db.write_db(self.db_name,col_name,stocks_gene)
def getdata(pair, bolllen): db = DatabaseInterface('data_bitmex') data = db.db_find([], 'KLINE5m', filter_dic={'pair': pair}, sort=[('time', -1)], limit=bolllen) data.index = [base.timestamp_toDatetime(float(t)) for t in data['time']] data = data.sort_values('time') return data
def update_stockbase_Equ(self,fields): col_name='stockbase' stkif=StockInterface(self.token) db=DatabaseInterface() tickers=db.get_db_tickers() for t in tickers: #获取字典 Equ_dics=stkif._getEqu(fields,t)[0] db.update_db(self.db_name,col_name,{"ticker":t},{"$set":Equ_dics}) print 'update stockbase data with ticker.no'+t+'....'
def write_trade_data(self): stockbase_data=['ticker','secShortName','industryID1', 'industryID2','industryID3'] #在tradedata中的字段 stocktrade_data=['date','close'] db=DatabaseInterface() tickers=db.get_db_tickers() for t in tickers: db.getone_db(self.db_name,'stockbase',{'ticker':t}, stockbase_data)
def update_stockbase_SecTips(self,tips): col_name='stockbase' fields='ticker' stkif=StockInterface(self.token) db=DatabaseInterface() tickers_dic=stkif._getSecTips([fields],tips) tickers=self.unpack_dic(fields,tickers_dic) updates={"$set":{'tipsTypeCD':tips}} for t in tickers: print t+'today turn to'+tips db.update_db(self.db_name,col_name,{'ticker':t},updates)
def generate_all_body_part_from_db_lst(db): connect_to_db = DatabaseInterface(db) a = connect_to_db.select_parts() all_body_part_from_db = [] for i in a: for j in i[0].split(','): all_body_part_from_db.append(j) all_body_part_from_db = set(all_body_part_from_db) all_body_part_from_db = list(all_body_part_from_db) all_body_part_from_db.sort() return all_body_part_from_db
def load_metadata(): di = DatabaseInterface() meta = di.getMetaData() meta['orphaned_groups'] = meta['groups_count'] % ( meta['matches_complete_count'] * 2) meta['orphaned_players'] = meta['players_count'] % (meta['groups_count'] * 5) li.dump_meta(meta) del di s.enter(schedule_time, 1, load_metadata)
class WebServer(object): logging.basicConfig(level=logging.INFO) def __init__(self, configMap): self.db = DatabaseInterface(configMap['data_dir']) self.numberToServe = configMap['numberToServe'] self.log = logging.getLogger(__name__) # numberToServe: the number of items finally served to the users def start(self): # each object here simulates the API calls through network # passing an object A to the constructor of B means A will communication to B self.db.startEngine() self.ranker = Ranker(self.numberToServe, self.db) self.user_analyzer = UserAnalyzer() self.model_store = ModelStore() self.online_learner = OnlineLearner(self.db, self.model_store) self.offline_learner = OfflineLearner(self.db, self.model_store) self.increment() self.rec_engine = RecEngine( self.user_analyzer, self.model_store, self.db.connTable[DatabaseInterface.USER_ACTIVITY_KEY]) def getAction(self, action): assert (isinstance(action, Action)) #analyze user type user_type = self.user_analyzer.analyzeAction(action) self.online_learner.trainModel(action) if user_type == "registered": self.log.info("Recording action %s", action) self.db.putAction(action) def provideRecommendation(self, request): # return the ID's for the recommended items assert (isinstance(request, Request)) recommendations = self.rec_engine.provideRecommendation(request) item_ids = self.ranker.rerank(recommendations) return item_ids def renderRecommendation(self, request): assert (isinstance(request, Request)) item_ids = self.provideRecommendation(request) return self.getFromInventory(item_ids).sort_index() def increment(self): self.log.info("incrementing the system, update the models") # increment the whole system by one day, trigger offline training self.model_store.cleanOnlineModel() self.offline_learner.trainModel() def getFromInventory(self, itemId): return self.db.extract(DatabaseInterface.INVENTORY_KEY).loc[itemId]
def get_newticker(self): col_name='stockbase' key='ticker' db=DatabaseInterface() tickers_new=db.get_db_tickers() print 'getting new tickers with length:'+str(len(tickers_new))+'....' clause={} field=[key] stockbase_data=db.get_db(self.db_name,col_name,clause,field) tickers_old=self.unpack_dic(key,stockbase_data) print 'getting old tickers with length:'+str(len(tickers_old))+'....' tickers_addition=list(set(tickers_new)-set(tickers_old)) return tickers_addition
def select_parts(): connect_to_db = DatabaseInterface('exercise.db') body_parts_lst = [] for i in connect_to_db.select_parts(): split_body_part = i[0].split(',') for j in split_body_part: body_parts_lst.append(j) body_parts_lst = set(body_parts_lst) body_parts_lst = list(body_parts_lst) body_parts_lst.sort() number_of_ids = [] for i in range(1, len(body_parts_lst) + 1): number_of_ids.append(i) a = list(zip(number_of_ids, body_parts_lst)) return render_template('index.html', table=2, items=a)
def initdata_from_okex(pair, timeframe, usedb): db = DatabaseInterface(usedb) api = OKCoinFuture() fpair, ftimeframe = format_input(pair, timeframe) kline = api.future_kline(pair, 'quarter', timeframe=timeframe, period={'months': -3}, size=5000) if not kline.empty: kline = kline[[ 'high', 'low', 'open', 'close', 'time', 'volume', 'pct_change' ]] kline['pair'] = fpair db.db_insertdataframe(kline, 'KLINE' + ftimeframe) return True else: return False
def update_stockbase(self): col_name='stockbase' db=DatabaseInterface() #将新增的ticker信息写入 self.update_new_ticker() print 'new tickers update!' #更新stockbase数据 fields=["nonrestFloatShares","totalShares" ,"nonrestfloatA"] #更新stockbase数据:更新Equ接口中字段 self.update_stockbase_Equ(fields) print 'shares data update!' #更新stockbase数据:更新停牌、复牌状态 self.update_stockbase_SecTips('H') print 'H state update!' self.update_stockbase_SecTips('R') print 'R state update!' db.update_db_date(self.db_name,col_name) print 'stockbase update finished....'
class WebServer(object): logging.basicConfig(level=logging.INFO) def __init__(self, configMap): self.db = DatabaseInterface(configMap['data_dir']) # numberToServe: the number of items finally served to the users self.numberToServe = configMap['numberToServe'] self.log = logging.getLogger(__name__) def start(self): # each object here simulates the API calls through network # passing an object A to the constructor of B means A will communication to B self.db.startEngine() self.ranker = Ranker(self.numberToServe, self.db) self.userAnalyzer = UserAnalyzer() self.modelStore = ModelStore() self.offlineLearner = OfflineLearner(self.db, self.modelStore) self.onlineLearner = OnlineLearner(self.db, self.modelStore) self.offlineLearner.trainModel() # when we start the webserver, we should let offline learner to train the models, # such that, after the start(), we can start to give recommendation self.recEngine = RecEngine(self.userAnalyzer, self.modelStore, self.db.extract(DatabaseInterface.USER_ACTIVITY_KEY)) def getAction(self, action): assert (isinstance(action, Action)) # taking the action from users self.onlineLearner.trainModel(action) # analyze action type, and save the registered user's action actionType = self.userAnalyzer.analyzeAction(action) if actionType == "registered": self.log.info("Recording action %s" % action) self.db.putAction(action) def provideRecommendation(self, request): # return the ID's for the recommended items assert (isinstance(request, Request)) # provide recommendations to user self.log.info("responding to request: %s" % request) recommendations = self.recEngine.provideRecommendation(request) recsReranked = self.ranker.rerank(recommendations) return recsReranked # a list of item ids def renderRecommendation(self, request): assert (isinstance(request, Request)) recsReranked = self.provideRecommendation(request) # for the purpose of testing, we sort the index, output item names # output is ordered by the id value return self.db.extract(DatabaseInterface.INVENTORY_KEY).loc[recsReranked].sort_index() def increment(self): self.log.info("incrementing the system, update the models") # increment the whole system by one day, trigger offline training self.offlineLearner.trainModel() self.modelStore.cleanOnlineModel() self.recEngine.resetCache() def getFromInventory(self, itemId): return self.db.extract(DatabaseInterface.INVENTORY_KEY).loc[itemId]
def write_currentinfo(self): col_name='currentinfo' api=StockGetAll(self.token) db=DatabaseInterface() db.drop_db_docs(self.db_name,col_name) currentinfo_data=api.get_api_currentinfo() db.write_db_withlog(self.db_name,col_name,currentinfo_data) db.update_date(self.db_name,'ticker')
def __init__(self, parent_window): # Database interface self.database_interface = DatabaseInterface() # Create the window project_names = self.database_interface.get_project_names() times_df = self.database_interface.get_dataframe_times() self.calendar_window = CalendarWindow(parent_window, project_names, times_df) # Prev and next week buttons self.calendar_window.set_prev_week_button_command(self.set_prev_week) self.calendar_window.set_next_week_button_command(self.set_next_week) # Click inside the canvas self.calendar_window.set_project_blocks_command( self.update_or_delete_project_block) self.calendar_window.set_add_project_blocks_command( self.add_project_block)
def updateklinedata(pair, timeframe, exchangename, usedb, rl, tf_asminute): #取当前数据库中最晚的一条数据的时间加上一秒为起始,获取到当前时间的数据# db = DatabaseInterface(usedb) fpair, ftimeframe = format_input(pair, timeframe, exchangename) lastrow = db.db_findone('KLINE' + ftimeframe, filter_dic={'pair': fpair}, sel_fields=[], sort=[("time", -1)]) print(ftimeframe + fpair + usedb) if lastrow is None: if exchangename == 'okex': return initdata_from_okex(pair, timeframe, usedb) else: print(ftimeframe + fpair + usedb) assert False, '数据库中没有找到数据' print(type(tf_asminute)) if rl == 'l': shiftsec = 60 * int(tf_asminute) * 1000 else: shiftsec = 0 lasttime = base.timestamp_toStr(float(lastrow['time']) + 1000, dateformat="%Y%m%d %H:%M:%S") endtime = base.timestamp_toStr( time.time() * 1000 - shiftsec, dateformat="%Y%m%d %H:%M:%S") if rl == 'l' else None #交易所获取k线函数 # print(lasttime) print(lasttime + ',' + endtime) df = getkline(pair, timeframe, exchangename, lasttime, end=endtime) print(df.shape[0]) # df=None #没有新数据,不更新 if not df is None: #对于原始数据进行修正 df['pct_change'].iloc[0] = (df['close'].iloc[0] - lastrow['close']) * 100 / lastrow['close'] # if rl=='l': # df['time']=[t+shiftsec for t in df['time']] db.db_insertdataframe(df, 'KLINE' + ftimeframe) return True else: return False
def insert_exercise(): message = '' if request.method == 'POST': name = request.form.get('name') body_part = request.form.get('body_part') about = request.form.get('about') pic_link = request.form.get('pic_link') connect_to_db = DatabaseInterface('exercise.db') all_rows = connect_to_db.select_for_insert() if name == '' or body_part == '' or about == '' or pic_link == '': message = 'You need to enter all values' return render_template('insert.html', message=message) else: if (name, body_part, about, pic_link) not in all_rows: connect_to_db.add_exercise(name, body_part, about, pic_link) message = f'String with {name}, {body_part}, {about}, {pic_link} added to DB' return render_template('insert.html', message=message) else: message = 'This data is already in table' return render_template('insert.html', message=message) return render_template('insert.html', message=message)
def get_db_stocktradeadj(self,step,beginDate,endDate=''): #如果没有输入截止日期,就取得今天的日期 if not endDate: endDate=self.today_as_str() #获取tickers列表 db=DatabaseInterface() tickers=db.get_db_tickers() #获取上次中断步数 tickers=tickers[step+1:] print 'started from ticker: '+tickers[0] col_name='stockbase' for t in tickers: ticker_trade={} ticker_trade['ticker']=t ticker_trade['tradedata']=self.stkif._getTradeDataAdj(t,beginDate=beginDate,endDate=endDate) ind=db.getone_db(self.db_name,col_name,{"ticker":t},['industryID3']) ticker_trade['industryID3']=ind['industryID3'] print 'get ticker:'+t+'.....' yield ticker_trade
def __init__(self, root): # Variables self.database_interface = DatabaseInterface() self.root = root # Time format self.time_format = '%Y-%m-%d %H:%M:%S' # Get variables to create the main window last_action = self.database_interface.get_last_action() if last_action is None: last_action = { "time": "N/A N/A", "project_name": "N/A", "action_type": "N/A" } project_names = self.database_interface.get_project_names() # Create the main window self.main_window = MainWindow(self.root, project_names, last_action) # Start and stop buttons self.main_window.set_start_button_command(self.add_work_time) self.main_window.set_stop_button_command(self.end_work_time) # Add and delete project buttons self.main_window.set_add_project_button_command( self.create_add_project_window) self.main_window.set_modify_project_button_command( self.create_modify_project_window) # self.main_window.set_delete_project_button_command(self.create_delete_project_window) # Summary button self.main_window.set_summary_button_command(self.create_summary_window) # Calendar button self.main_window.set_calendar_button_command( self.create_calendar_window)
def load_upcoming(): di = DatabaseInterface() try: up_matches = getUpcomingMatches(20) for m in up_matches: match_id = m[0] start_time = m[1] if(not di.checkUpcomingMatchInDatabase(match_id)): #li.log(match_id.split('/')[2] + " available", type="success") # weird that the except warrants success. That's fine though try: di.writeMatch(match_id) except LineupIncompleteException as err: #li.log(traceback.format_exc(), type='traceback') pass except WriteMatchException as err: li.log(traceback.format_exc(), type='traceback') pass else: li.log("already collected " + match_id) except Exception as err: li.log(traceback.format_exc(), type='traceback') li.log(type(err).__name__, type='error') del di s.enter(schedule_time, 1, load_upcoming)
def delete_exercise(): message = '' if request.method == 'POST': id_number = request.form.get('id_number') connect_to_db = DatabaseInterface('exercise.db') connect_to_db.select_id() if id_number.isdigit(): if (int(id_number),) in connect_to_db.select_id(): connect_to_db.delete_exercise(id_number) message = f'String with ID = {id_number} deleted from DB' else: message = f'String with ID = {id_number} does not exists!' else: message = 'You need to enter positive digit!' return render_template('delete.html', message=message)
class WebServer(object): logging.basicConfig(level=logging.INFO) def __init__(self, configMap): self.db = DatabaseInterface(configMap['data_dir']) self.numberToServe = configMap['numberToServe'] self.log = logging.getLogger(__name__) # numberToServe: the number of items finally served to the users def start(self): # each object here simulates the API calls through network # passing an object A to the constructor of B means A will communication to B def getAction(self,action): def provideRecommendation(self, request): # return the ID's for the recommended items def renderRecommendation(self, request): def increment(self): self.log.info("incrementing the system, update the models") # increment the whole system by one day, trigger offline training def getFromInventory(self, itemId): return self.db.extract(DatabaseInterface.INVENTORY_KEY).loc[itemId] # simulate a web request class Request(object): def __init__(self, userId): self.userId = userId def __str__(self): return "request for user: "******"user: %s, item: %s, rating %s" %(self.userId, self.itemId, self.rating)
def write_stocktradeadj(self,beginDate,endDate=''): col_name='stocktrade' api=StockGetAll(self.token) db=DatabaseInterface() #获取上次中断步数 key=col_name+'step' step=self.get_step(key) #如果存在中断,就继续抓取; #如果不存在中断,数据库重抓,清空历史数据 if step==-1: db.drop_db_docs(self.db_name,col_name) print col_name+' all clear!' print 'scraping from'+str(step+1)+' step....' data=api.get_db_stocktradeadj(step,beginDate=beginDate,endDate=endDate) db.write_db_withlog(self.db_name,col_name,data) db.update_db_date(self.db_name,col_name)
class SummaryController: def __init__(self, parent_window): # Database interface self.database_interface = DatabaseInterface() summary_table = self.create_summary_table() self.summary_window = SummaryWindow(parent_window, summary_table) def create_summary_table(self): # Read the time database time_df = self.database_interface.get_dataframe_times() if time_df.empty: return time_df time_df["time"] = pd.to_datetime(time_df["time"]) # Get the begining time and ending time on the same line to get the time range time_df["last_time"] = time_df["time"].shift() time_df["time_range"] = time_df["time"] - time_df["last_time"] # Keep the interesting lines and columns time_df = time_df[time_df["action_type"] == "END"] time_df = time_df[["project_name", "time", "time_range"]] # Group by month and project name time_df.set_index("time", inplace=True) time_df = time_df.groupby([pd.Grouper(freq='M'), "project_name"]).sum() # Get the time range in hours time_df["time_range"] = time_df["time_range"] / pd.Timedelta(hours=1) time_df = time_df.round(2) time_df.reset_index(inplace=True) time_df["time"] = time_df['time'].dt.month.apply( lambda x: "%s (h)" % (calendar.month_abbr[x])) time_df.rename(columns={ "time": "month", "project_name": "Project Name" }, inplace=True) # Place the months as columns time_df = time_df.pivot(index='Project Name', columns='month', values='time_range') time_df.reset_index(inplace=True) time_df.fillna(0, inplace=True) return time_df
def write_fundstockinfo(self,beginYear,endYear=''): col_name='stockfunds' api=StockGetAll(self.token) db=DatabaseInterface() db.drop_db_docs(self.db_name,col_name) data=api.get_db_fundstocksinfo(beginYear=beginYear,endYear=endYear)
# dataId, clusterId # 1 1 # 2 1 # 3 2 # 4 2 # and we want {1:[1,2],2:[3,4]}, called self.groups for k, v in zip(self.labels, itemFeatures.index.tolist()): self.groups.setdefault(k, []).append(v) self.trained = True def predict(self, itemFeatures): centers = self.model.predict(itemFeatures) # based on the predicted centers, find the corresponding cluster members return centers, [self.groups[c] for c in centers] if __name__ == "__main__": from DatabaseInterface import DatabaseInterface db = DatabaseInterface("../DATA") db.startEngine() itemFeatureTable = db.extract( DatabaseInterface.ITEM_FEATURE_KEY).loc[:, "unknown":] model = ClusteringModel() model.train(itemFeatureTable) print model.predict(itemFeatureTable.loc[1].values.reshape(1, -1)) print itemFeatureTable.loc[[1, 422]] print model.labels[:20]
# X must be a dataframe, with the second key as itemID, and third key as ratings itemID = list(history)[1] ratings = list(history)[2] # what if only an item only got rated by one user, and the rating is 5, are we confident it is most popular? nLimit = int(history.shape[0] * self.N_Freq_limit) itemRatingGrouped = history.groupby(itemID) itemRatingGroupedCount = itemRatingGrouped[ratings].count() # print itemRatingGrouped[ratings].mean() self.mostPopular = itemRatingGrouped[ratings].mean()[ itemRatingGroupedCount > nLimit].sort_values(ascending=False) def predict(self, X): # X can only be a list of itemID's return [self.mostPopular.index.get_loc(x) for x in X] def provideRec(self): return self.mostPopular.index.tolist() if __name__ == "__main__": from DatabaseInterface import DatabaseInterface db = DatabaseInterface("DATA") db.startEngine() df = db.extract("history") print df.head() model = MostPopularModel() model.train(df) print model.mostPopular print model.predict([408]) print model.provideRec()
#!/usr/bin/env python #!/usr/bin/python import cgi, cgitb import sys import json sys.path.append('/var/www/elo/database_interface') from DatabaseInterface import DatabaseInterface import elo form = cgi.FieldStorage() player_id = form.getvalue('player_id') league_id = form.getvalue('league_id') db = DatabaseInterface() playerId = db.addPlayerToLeague(player_id, league_id) print "Content-type:text/plain\n\n"
def __init__(self, configMap): self.db = DatabaseInterface(configMap['data_dir']) # numberToServe: the number of items finally served to the users self.numberToServe = configMap['numberToServe'] self.log = logging.getLogger(__name__)
#!/usr/bin/env python #!/usr/bin/python import cgi, cgitb import sys import json import os sys.path.append(os.path.join(os.path.dirname(sys.path[0]),'database_interface')) from DatabaseInterface import DatabaseInterface from constants import LEAGUE_ID form = cgi.FieldStorage( ) p1_id = int( form.getvalue( 'p1_id' ) ) p2_id = int( form.getvalue( 'p2_id' ) ) p3_id = int( form.getvalue( 'p3_id' ) ) p4_id = int( form.getvalue( 'p4_id' ) ) #league_id = form.getvalue( 'league_id' ) league_id = LEAGUE_ID; print "Content-type:text/plain\n\n" db = DatabaseInterface( ) print json.dumps( db.getExpectedScore( p1_id, p2_id, p3_id, p4_id, league_id ) )
@author: warriorzhai """ import os os.chdir('C:/Users/warriorzhai/Desktop/project/stockAPI/stockAPI') from DatabaseInterface import DatabaseInterface #更换数据库修改以下三项 user_name='root' psw='root' MONGODB_URI ='mongodb://%s:%[email protected]:41394/tempdb' MONGODB_URI=MONGODB_URI % (user_name,psw) dbin=DatabaseInterface(MONGODB_URI=MONGODB_URI) db_name='tempdb' col_name='tempcol' #测试connect_db connection=dbin.connect_db(db_name,col_name) connection.count() #测试update_db clause={"fund_name":"test"} updates={"$set":{"fund_return":"2"}} dbin.update_db(db_name,col_name,clause,updates,upsert=False, multi=False)
ratings = list(history)[2] # what if only an item only got rated by one user, and the rating is 5, are we confident it is most popular? nLimit = int(history.shape[0] * self.N_Freq_limit) itemRatingGrouped = history.groupby(itemID) itemRatingGroupedCount = itemRatingGrouped[ratings].count() # print itemRatingGrouped[ratings].mean() self.mostPopular = itemRatingGrouped[ratings].mean()[itemRatingGroupedCount > nLimit].sort_values( ascending=False) def predict(self, X): # X can only be a list of itemID's return [self.mostPopular.index.get_loc(x) for x in X] def provideRec(self): return self.mostPopular.index.tolist() if __name__ == "__main__": from DatabaseInterface import DatabaseInterface db = DatabaseInterface("DATA") db.startEngine() df = db.extract("history") print df.head() model = MostPopularModel() model.train(df) print model.mostPopular print model.predict([408]) print model.provideRec()
def get_api_stockbase_all(self,step): #获取当前全部股票代码 db=DatabaseInterface() tickers=db.get_db_tickers() tickers=tickers[step+1:] return self.get_api_stockbase(tickers)
#!/usr/bin/env python import sys from DatabaseInterface import DatabaseInterface x = DatabaseInterface() print x.addLeague(sys.argv[1])
indices = self.clustering_model.predict(itemFeature) if rating >= self.THRESHOLD: self.recommendations = indices else: self.recommendations = [] def predict(self, itemFeature): # X should be item's category feature, only single record # return the similar items # itemFeature = itemFeature.values.reshape(1, -1) indices = self.clustering_model.predict(itemFeature) return indices def provideRec(self): return self.recommendations if __name__ == "__main__": connector = DatabaseInterface("../DATA") connector.startEngine() itemFeatures = connector.connTable["item_feature"] cluster_model = ClusteringModel() cluster_model.train(itemFeatures) simularity_item_model = SimilarItemModel(cluster_model) item_feature = itemFeatures[0:1] simularity_item_model.train(item_feature, 3) print simularity_item_model.provideRec()
#!/usr/bin/env python #!/usr/bin/python import cgi, cgitb import sys import json sys.path.append('/var/www/elo/database_interface') from DatabaseInterface import DatabaseInterface import elo form = cgi.FieldStorage( ) name = form.getvalue( 'name' ) db = DatabaseInterface( ) playerId = db.addPlayer( name ) print json.dumps( playerId )
except IndexError: print('时间段数据不存在,起始时间向后推迟一个月,重试...') start = base.date_togapn(start, dateformat="%Y%m%d %H:%M:%S", months=1) time.sleep(10) continue break # res['time']=[str(base.datetime_toTimestamp(t)) for t in res.index] res = res[selectfields] res = res.reset_index(drop=True) db.db_insertdataframe(res, conf.collnam) if __name__ == '__main__': # api=BitfinexAPI() # api=BitmexAPI() db = DatabaseInterface(conf.usedb) pairs = [conf.pair] # allpairs=['t'+p.upper() for p in api.symbols() if ('usd' in p)] # allpairs=[p for p in allpairs if (not p in pairs)] selectfields = conf.selectfields timeframe = conf.timeframe contract_type = conf.contract_type period = conf.period start, end = conf.start, conf.end for p in pairs: crawldata(p, timeframe, start, end, selectfields, contract_type, period)
class CalendarController: def __init__(self, parent_window): # Database interface self.database_interface = DatabaseInterface() # Create the window project_names = self.database_interface.get_project_names() times_df = self.database_interface.get_dataframe_times() self.calendar_window = CalendarWindow(parent_window, project_names, times_df) # Prev and next week buttons self.calendar_window.set_prev_week_button_command(self.set_prev_week) self.calendar_window.set_next_week_button_command(self.set_next_week) # Click inside the canvas self.calendar_window.set_project_blocks_command( self.update_or_delete_project_block) self.calendar_window.set_add_project_blocks_command( self.add_project_block) # Prev and next week buttons def set_prev_week(self): reference_day = self.calendar_window.reference_day - timedelta(days=7) times_df = self.database_interface.get_dataframe_times() self.calendar_window.update_calendar_week(reference_day, times_df) def set_next_week(self): reference_day = self.calendar_window.reference_day + timedelta(days=7) times_df = self.database_interface.get_dataframe_times() self.calendar_window.update_calendar_week(reference_day, times_df) # Add, update and remove project blocks def get_clicked_time(self, event): # Get clicked time x, y = event.x, event.y # Get the current day start_of_week = self.calendar_window.reference_day.replace( hour=0, minute=0, second=0, microsecond=0) - timedelta( days=self.calendar_window.reference_day.weekday()) current_day_in_week = int( (x - self.calendar_window.left_width_offset) / self.calendar_window.between_days_range) clicked_day = start_of_week + timedelta(days=current_day_in_week) # Get the current hour first_hour = timedelta(hours=self.calendar_window.first_hour) last_hour = timedelta(hours=self.calendar_window.last_hour) clicked_hour = first_hour + ( (last_hour - first_hour) * (y - self.calendar_window.up_heigth_offset)) / ( self.calendar_window.grid_heigth) # Merge in the current time clicked_time = clicked_day + clicked_hour return clicked_time def add_project_block(self, event): clicked_time = self.get_clicked_time(event) modify_calendar_controller = ModifyCalendarController( self.calendar_window.w, self.calendar_window, "add", clicked_time) def update_or_delete_project_block(self, event): clicked_time = self.get_clicked_time(event) modify_calendar_controller = ModifyCalendarController( self.calendar_window.w, self.calendar_window, "update_or_delete", clicked_time)
# 1 1 # 2 1 # 3 2 # 4 2 # and we want {1:[1,2],2:[3,4]}, called self.groups for k, v in zip(self.labels, itemFeatures.index.tolist()): self.groups.setdefault(k, []).append(v) self.trained = True def predict(self, itemFeatures): centers = self.model.predict(itemFeatures) # based on the predicted centers, find the corresponding cluster members return centers, [self.groups[c] for c in centers] if __name__ == "__main__": from DatabaseInterface import DatabaseInterface db = DatabaseInterface("../DATA") db.startEngine() itemFeatureTable = db.extract(DatabaseInterface.ITEM_FEATURE_KEY).loc[:, "unknown":] model = ClusteringModel() model.train(itemFeatureTable) print model.predict(itemFeatureTable.loc[1].values.reshape(1, -1)) print itemFeatureTable.loc[[1, 422]] print model.labels[:20]
distances, indices = self.knnModel.kneighbors(userFeature) # indices are the nearest neighbors' index in the matrix, which is different from userId. return self.userIds[indices[0]] def provideRec(self, userId): # data is a tuple of (user feature, item feature) userIds = self.predict(self.userFeatureTable.loc[userId].as_matrix().reshape(1, -1)) # remove himself as a nearest neighbor userIds = np.array(list(set(userIds) - set([userId]))) # for all nearest neighbors, compute the the average score, sorted from large to small # then report the item ids return self.ratingsMat[userIds - 1].mean(axis=0).argsort()[::-1] + 1 if __name__ == "__main__": from DatabaseInterface import DatabaseInterface from Learners.OfflineLearner import OfflineLearner db = DatabaseInterface("../DATA") db.startEngine() history = db.extract("history") userFeatureTable = db.extract(DatabaseInterface.USER_FEATURE_KEY).loc[:, "age":] ratingsMat = OfflineLearner.transformToMat(history) model = KNNmodel() model.train(userFeatureTable, ratingsMat) print model.provideRec(97)[:20] print ratingsMat[96, model.provideRec(97) - 1][:20]
if "online" in recommendations: # online exists as long as user has been active results.extend(recommendations["online"][:self.numberToServe]) # should only has one if "offline" in recommendations: # offline exist only if user are registered, the recs could be from CF or LR results.extend(recommendations["offline"][:self.numberToServe]) if "popular" in recommendations: # most popular should always exist # if there is no personalized recs, the remaining should be filled by most popular results.extend(recommendations["popular"][:self.numberToServe]) else: self.log.error("recommendations do not contain popular items") try: # remove the already visited items results = np.random.choice(list(set(results) - usedItems), self.numberToServe, replace=False) except ValueError: # sometimes the user may watched a lot # this is apparently not a good strategy, why? results = np.random.choice(results, self.numberToServe, replace=False) return results if __name__ == "__main__": from DatabaseInterface import DatabaseInterface db = DatabaseInterface("DATA") db.startEngine() ranker = Ranker(numberToServe=10, database=db) print sorted(ranker._getUsedItems(1))