async def lazy_sessions(bot: Bot): """ 处理延迟会话 """ async with database.connection() as conn: proxy = await conn.execute(SQL_FETCH_SESSIONS) rows = [i for i in await proxy.fetchall()] await proxy.close() if not rows: return async with database.connection() as conn: for row in rows: id = row[0] chat = row[1] msg = row[2] member = row[3] session_type = row[4] func = manager.events.get(session_type) if func and callable(func): await func(bot, chat, msg, member) await conn.execute("delete from lazy_sessions where id=$1", (id, )) logger.info(f"lazy session is touched:{id} {session_type}") await conn.commit()
def STsearch(): try: try: email = request.form["email"] try: c, conn = connection() c.execute( "SELECT * FROM teachers WHERE email = '{}' and secret_key!='{}'".format(email, "super")) x = c.fetchall() if len(x) > 0: return render_template('remove_teachers.html', teachers=x) else: flash("No teacher with an email {}".format(email)) return redirect('/show_teachers') except: flash("Something wrong with database server please try again") return redirect('/show_teachers') except: usn = request.form["usn"] try: c, conn = connection() c.execute( "SELECT * FROM student_data WHERE usn = '{}'".format(usn)) x = c.fetchall() if len(x) > 0: return render_template('remove_students.html', students=x) else: flash("No Student with an USN {}".format(usn)) return redirect('/show_students') except: flash("Something wrong with database server please try again") return redirect('/show_students') except: flash("Something went wrong try again") return redirect('/show_students')
def update_job(context): with db.connection() as cursor: updates = jobs.check_updates( jobs.get_last_data(db.get(cursor, "*", "country", multiple=True)), jobs.get_current(), ) for country in updates.keys(): for data in updates[country].keys(): if "difference" not in data and "today" not in data: with db.connection() as cursor: db.update( cursor, "country", data, updates[country][data]["total"], "name", country, ) with db.connection() as cursor: users = db.get_users(cursor) messages = jobs.generate_update_message(updates) for user in users: if user[1] in messages.keys(): try: context.bot.send_message(chat_id=user[0], text=messages[user[1]]) except telegram.error.Unauthorized: with db.connection() as cursor: db.delete_user(cursor, user[0])
def subscribe(update, context): query = update.callback_query tid = update.effective_user.id callback_to = "subscribe" # Show continents menu if query is None: continents_menu(update, context, query, "subscribe") else: query_data = query.data.split("_")[1] # Show continents menu if query_data == "main": continents_menu(update, context, query, "subscribe") # Show countries in selected continent elif query_data == "Antarctica": antarctica_menu(context, query, callback_to) elif query_data in CONTINENTS: with db.connection() as cursor: nonsubscribed = db.get_nonsubscribed_by_continent( cursor, tid, query_data) countries_menu(context, query, query_data, nonsubscribed, callback_to) # Subscribe to selected country elif query_data in COUNTRIES: with db.connection() as cursor: db.save_subscription(cursor, tid, query_data) context.bot.edit_message_text( chat_id=query.message.chat_id, message_id=query.message.message_id, text=texts.after_subscription(query_data), reply_markup=keyboards.after_subscription(), )
def update_stock_factor_specific_return(df_sret, last_date=None): if last_date is None: db = database.connection('asset') Session = sessionmaker(bind=db) session = Session() record = session.query( func.max(stock_factor_specific_return.trade_date)).first() last_date = record[0].strftime( '%Y-%m-%d') if record[0] is not None else '1900-01-01' session.commit() session.close() df_sret = df_sret[df_sret.index >= last_date] db = database.connection('asset') Session = sessionmaker(bind=db) session = Session() session.query(stock_factor_specific_return).filter( stock_factor_specific_return.trade_date >= last_date).delete() session.commit() session.close() df_sret = df_sret.stack() df_sret = df_sret.reset_index() df_sret.columns = ['trade_date', 'stock_id', 'sret'] df_sret = df_sret.set_index(['stock_id', 'trade_date']) db = database.connection('asset') t = Table('stock_factor_specific_return', MetaData(bind=db), autoload=True) database.batch(db, t, df_sret, pd.DataFrame())
def give(): email = session['email'] try: c, conn = connection() c.execute("SELECT * FROM teachers where email = '{}'".format(email)) teacher = c.fetchone() classname = teacher["classname"] subject = teacher["subject"] teachername = teacher["name"] teacheremail = teacher["email"] c.execute("SELECT * FROM student_data WHERE classname = '{}'" .format(classname)) students_data = c.fetchall() if request.method == 'POST': c.execute("SELECT * FROM students WHERE date = CURRENT_DATE and subject = '{}' and classname = '{}' " .format(subject, classname)) x = c.fetchall() if len(x) > 0: flash("Attendence is already taken") return redirect('/dashboard') else: for i, student in enumerate(students_data): check = request.form[f"{i + 1}"] usn = student["usn"] name = student["name"] classname = student["classname"] if "present" in check: c.execute( "INSERT INTO students (usn,name,classname,present,subject,date,teacheremail,teachername) VALUES ('{}','{}','{}',{},'{}',CURRENT_DATE,'{}','{}')" .format(usn, name, classname, 1, subject, teacheremail, teachername)) else: c.execute( "INSERT INTO students (usn,name,classname,present,subject,date,teacheremail,teachername) VALUES ('{}','{}','{}',{},'{}',CURRENT_DATE,'{}','{}')" .format(usn, name, classname, 0, subject, teacheremail, teachername)) conn.commit() flash("attendence submitted") return redirect('/dashboard') else: return render_template('give_attendence.html', students=students_data) except Exception as e: try: c, conn = connection() c.execute("SELECT * FROM teachers where email = '{}'".format(email)) teacher = c.fetchone() classname = teacher["classname"] c.execute("SELECT * FROM student_data WHERE classname = '{}'" .format(classname)) students_data = c.fetchall() flash("Please give the attendence for everyone") return render_template('give_attendence.html', students=students_data) except: flash("something went wrong") return redirect('/dashboard')
def newuser(): if checkAdmin() == 0: return render_template('404.html') if request.method != "POST": return redirect('dashboard') # if checkAdmin() == 0: # return redirect('dashboard') # adminname = session['user'] try: c, conn = connection() hostname = escaper(request.form['hostname']) username = escaper(request.form['hostlogin']) hostrank = escaper(request.form['hostrank']) salt = uuid.uuid4().hex password = hashlib.sha512(str(request.form['password'] + salt)).hexdigest() x = c.execute("SELECT * FROM hoster WHERE hostName = (%s) OR loginName = (%s)", (escaper(hostname), escaper(username))) if int(x) > 0: flash("That username is already taken, please choose another!") return redirect('admin') else: c.execute("INSERT INTO hoster (hostName, loginName, hostPassword, hostSalt, hostRank) VALUES (%s, %s, %s, %s, %s)", (escaper(hostname), escaper(username), escaper(password), escaper(salt), escaper(hostrank))) conn.commit() newhostid = getHostId(escaper(hostname)) c.execute("INSERT INTO tempid (hostid, tempid) VALUES (%s, %s)", (int(newhostid), str("temp"))) conn.commit() conn.close() return redirect('admin') except Exception as e: return redirect('500')
def run(self): # Create the DB connection conn = db.connection() cur = conn.cursor() # Create the run information cur.execute('call amazon_review.proc_create_rundata(%s)', ('ExtractProduct', )) conn.commit() # get the run id generated for the job cur.execute('SELECT * FROM amazon_review.func_return_runid(%s)', ('ExtractProduct', )) run_id = cur.fetchone() prod.extract_product() with self.output().open('w') as outfile: outfile.write('Product extraction task ExtractProduct is done!\n') # Update the run status cur.execute('call amazon_review.proc_update_rundata(%s)', (run_id, )) conn.commit() # closing db connection db.close(conn, cur)
def run(self): # tcpCliSock, addr = tcpSerSock.accept() # print('Connected from: {}'.format(addr)) # while True: # data = tcpCliSock.recv(BUFSIZ) # data.decode('utf8') # if data: # print(data) # if data == b'1': # self.f.message.emit() # if data == b'Moscow': # self.f.message2.emit('Moscow') cur, con = db.connection() while True: c, addr = s.accept() print("\nconnection successful with " + str(addr) + "\n\n") data = c.recv(1024) # decoded_data=data.decode("utf-8") decoded_data = data.decode() recieved_dict = json.loads(decoded_data) db.update_reminders(cur, con, recieved_dict['text'], recieved_dict['destination']) if not decoded_data: print("connection with " + str(addr) + " broken\n") else: print("-> " + decoded_data + "\n")
def load_ohlcavntt(globalid): secode = asset.StockAsset.secode_dict()[globalid] engine = database.connection('caihui') Session = sessionmaker(bind=engine) session = Session() sql = session.query(tq_qt_skdailyprice.tradedate, tq_qt_skdailyprice.topen, tq_qt_skdailyprice.thigh, tq_qt_skdailyprice.tlow, tq_qt_skdailyprice.tclose, tq_qt_skdailyprice.vol, tq_qt_skdailyprice.amount, tq_qt_skdailyprice.negotiablemv, tq_qt_skdailyprice.totmktcap, tq_qt_skdailyprice.turnrate).filter( tq_qt_skdailyprice.secode == str(secode)).statement df = pd.read_sql(sql, session.bind, index_col=['tradedate'], parse_dates=['tradedate']) session.commit() session.close() df.turnrate = df.turnrate / 100 return df
def load(gid, included_online_id=False): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('on_online_fund', metadata, autoload=True) columns = [ t1.c.on_date, t1.c.on_fund_type, t1.c.on_fund_code, t1.c.on_fund_ratio, ] index_col = ['on_date'] if included_online_id: columns.insert(0, t1.c.on_online_id) index_col.insert(0, 'on_online_id') s = select(columns) if gid is not None: s = s.where(t1.c.on_online_id == gid) else: return None # if xtypes is not None: # s = s.where(t1.c.on_type.in_(xtypes)) df = pd.read_sql(s, db, index_col=index_col, parse_dates=['on_date']) #df = df.unstack() #df.columns = df.columns.droplevel(0) return df
def load(timings): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('tc_timing_signal', metadata, autoload=True) columns = [ t1.c.tc_timing_id, t1.c.tc_date, t1.c.tc_signal, ] s = select(columns) if timings is not None: if hasattr(timings, "__iter__") and not isinstance(timings, str): s = s.where(t1.c.tc_timing_id.in_(timings)) else: s = s.where(t1.c.tc_timing_id == timings) df = pd.read_sql(s, db, index_col = ['tc_date', 'tc_timing_id'], parse_dates=['tc_date']) df = df.unstack().fillna(method='pad') df.columns = df.columns.droplevel(0) return df
def load(gids=None, codes=None, xtype=5): db = database.connection('base') metadata = MetaData(bind=db) t = Table('fund_fee', metadata, autoload=True) columns = [ t.c.ff_fund_id, t.c.ff_code, # t.c.ff_type, # t.c.ff_min_value, t.c.ff_max_value, # t.c.ff_min_value_equal, # t.c.ff_max_value_equal, # t.c.ff_value_type, t.c.ff_fee, t.c.ff_fee_type, ] s = select(columns).where(t.c.ff_type == xtype) if gids is not None: s = s.where(t.c.ff_fund_id.in_(gids)) if codes is not None: s = s.where(t.c.ff_code.in_(codes)) df = pd.read_sql(s, db, index_col=['ff_fund_id']) df['ff_max_value'].fillna(np.inf, inplace=True) return df
def where_highlow_id(highlow_id, risks=None, xtypes=None): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('mz_highlow_alloc', metadata, autoload=True) columns = [ t1.c.globalid, t1.c.mz_type, t1.c.mz_risk, t1.c.mz_highlow_id, t1.c.mz_markowitz_id, t1.c.mz_name, ] s = select(columns) if highlow_id is not None: s = s.where(t1.c.mz_highlow_id == highlow_id) if risks is not None: s = s.where(t1.c.mz_risk.in_(risks)) if xtypes is not None: s = s.where(t1.c.mz_type.in_(xtypes)) df = pd.read_sql(s, db) return df
def load_product_also_bought_dimension(run_id): # Creating the spark session spark = SparkSession \ .builder \ .appName("Loading product also bought dimension") \ .config("spark.driver.extraClassPath",spark_driver_extraClassPath) \ .config("spark.executor.extraClassPath",spark_executor_extraClassPath) \ .getOrCreate() # Source data file path path = input_file # Read product also bought parquet files productAlsoBoughtDF = spark.read.parquet(path) # Loading data into temp table in DWH for further processing productAlsoBoughtDF.select('asin','also_bought').write \ .format(conn_format) \ .option("url", url) \ .option("dbtable", "amazon_review.temp_product_also_bought") \ .option("user", user) \ .option("password", password) \ .option("driver", driver) \ .mode("overwrite")\ .save() # Calling the stored proc to load d_product_also_bought conn = db.connection() cur = conn.cursor() cur.execute('call amazon_review.proc_load_product_also_bought(%s)', (run_id, )) conn.commit() db.close(conn, cur)
def deleteCallback(): #creating a registration object of StudentRegistration registrationObj = StudentRegistration() #Storing studentID value entered by the user in the studentID of the registrationObj registrationObj.studentId = studentIdVar_delete.get() #creating a database connection and storing in a db connection variable dbConnection = database.connection() #quering the database records with the studentID and storing query results in the 'records' variable records = database.recordQuery(dbConnection, "id", registrationObj.studentId) if (len(records) > 0): # If the above condition is met, we are deleting the record in the database by using the above dbConnection variable and registrationObj status = database.recordDelete(dbConnection, registrationObj) #If the record deletion db operation returns a successful status, a status alert message will display a success message if not it displays error message if (status == True): statusVar_delete.set("Successfully deleted student id: " + registrationObj.studentId) else: statusVar_delete.set("Error in deleting details of student id: " + registrationObj.studentId) #if the above if condition is not met, a status message called "Invalid Student id" will be displayed else: statusVar_delete.set("Invalid Student id: " + registrationObj.studentId) # Once all the above db operations are completed, we are closing the database connection database.disConnection(dbConnection)
def load_roe_roa(globalid): compcode = asset.StockAsset.compcode_dict()[globalid] engine = database.connection('caihui') Session = sessionmaker(bind=engine) session = Session() sql = session.query( tq_fin_prottmindic.publishdate, tq_fin_prottmindic.roedilutedcut, tq_fin_prottmindic.roa).filter( and_(tq_fin_prottmindic.reporttype == 3, tq_fin_prottmindic.compcode == compcode)).statement df = pd.read_sql(sql, session.bind, index_col=['publishdate'], parse_dates=['publishdate']) session.commit() session.close() df = df.loc[~df.index.duplicated(keep='last')] df = df[df.index > '1990'] df = df.fillna(np.nan) return df
def load(gid, included_portfolio_id=False): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('ra_portfolio_pos', metadata, autoload=True) columns = [ t1.c.ra_date, t1.c.ra_asset_id, t1.c.ra_ratio, ] index_col = ['ra_date', 'ra_asset_id'] if included_portfolio_id: columns.insert(0, t1.c.ra_portfolio_id) index_col.insert(0, 'ra_portfolio_id') s = select(columns) if gid is not None: s = s.where(t1.c.ra_portfolio_id == gid) else: return None # if xtypes is not None: # s = s.where(t1.c.ra_type.in_(xtypes)) df = pd.read_sql(s, db, index_col=index_col, parse_dates=['ra_date']) df = df.unstack().fillna(0.0) df.columns = df.columns.droplevel(0) return df
def load_ccdfg(globalid): compcode = asset.StockAsset.compcode_dict()[globalid] engine = database.connection('caihui') Session = sessionmaker(bind=engine) session = Session() sql = session.query( tq_fin_proindicdata.firstpublishdate, tq_fin_proindicdata.currentrt, tq_fin_proindicdata.cashrt, tq_fin_proindicdata.ltmliabtota, tq_fin_proindicdata.equtotliab, tq_fin_proindicdata.sgpmargin).filter( tq_fin_proindicdata.reporttype == 3).filter( tq_fin_proindicdata.compcode == compcode).statement df = pd.read_sql(sql, session.bind, index_col=['firstpublishdate'], parse_dates=['firstpublishdate']) session.commit() session.close() df = df.loc[~df.index.duplicated(keep='last')] df = df[df.index > '1990'] df = df.fillna(np.nan) return df
def load(gids): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('mz_markowitz_asset', metadata, autoload=True) columns = [ t1.c.mz_markowitz_id, t1.c.mz_asset_id, t1.c.mz_markowitz_asset_id, t1.c.mz_asset_type, t1.c.mz_upper_limit, t1.c.mz_lower_limit, t1.c.mz_sum1_limit, t1.c.mz_sum2_limit, t1.c.mz_asset_name, t1.c.mz_markowitz_asset_name, ] s = select(columns) if gids is not None: s = s.where(t1.c.mz_markowitz_id.in_(gids)) # df = pd.read_sql(s, db, index_col=['mz_markowitz_id', 'mz_markowitz_asset_id']) df = pd.read_sql(s, db) return df
def save(gid, risk, df): # fmt_columns = ['mz_nav', 'mz_inc'] # fmt_precision = 6 # if not df.empty: # df = database.number_format(df, fmt_columns, fmt_precision) # # 保存择时结果到数据库 # db = database.connection('asset') t2 = Table('mz_highlow_limit', MetaData(bind=db), autoload=True) columns = [literal_column(c) for c in (df.index.names + list(df.columns))] s = select(columns, (t2.c.mz_highlow_id == gid)).where(t2.c.mz_risk == risk) df_old = pd.read_sql( s, db, index_col=['globalid', 'mz_highlow_id', 'mz_risk', 'mz_date'], parse_dates=['mz_date']) # if not df_old.empty: # df_old = database.number_format(df_old, fmt_columns, fmt_precision) # 更新数据库 # print df_new.head() # print df_old.head() database.batch(db, t2, df, df_old, timestamp=False)
def load(gids, xtypes=None): db = database.connection('asset') metadata = MetaData(bind=db) t = Table('rm_riskmgr', metadata, autoload=True) columns = [ t.c.globalid, t.c.rm_type, t.c.rm_algo, t.c.rm_asset_id, t.c.rm_timing_id, t.c.rm_start_date, t.c.rm_argv, t.c.rm_name, ] s = select(columns) if gids is not None: s = s.where(t.c.globalid.in_(gids)) if xtypes is not None: s = s.where(t.c.rm_type.in_(xtypes)) df = pd.read_sql(s, db) return df
def load_series(gid, xtype, reindex=None, begin_date=None, end_date=None): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('ra_portfolio_nav', metadata, autoload=True) columns = [ t1.c.ra_date, t1.c.ra_nav, ] s = select(columns) s = s.where(t1.c.ra_portfolio_id == gid).where(t1.c.ra_type == xtype) if begin_date is not None: s = s.where(t1.c.ra_date >= begin_date) if end_date is not None: s = s.where(t1.c.ra_date <= end_date) df = pd.read_sql(s, db, index_col=['ra_date'], parse_dates=['ra_date']) if reindex is not None: df = df.reindex(reindex, method='pad') return df['ra_nav']
def load_ack(gids=None, codes=None, xtype=5): db = database.connection('base') metadata = MetaData(bind=db) t = Table('fund_infos', metadata, autoload=True) columns = [ t.c.fi_globalid, t.c.fi_code, t.c.fi_yingmi_confirm_time, t.c.fi_yingmi_to_account_time ] s = select(columns) if gids is not None: s = s.where(t.c.fi_globalid.in_(gids)) if codes is not None: s = s.where(t.c.fi_code.in_(codes)) df = pd.read_sql(s, db, index_col=['fi_globalid']) df['buy'] = df['fi_yingmi_confirm_time'] + 1 # 实际中,购买完成,份额可赎回时间是订单确认时间 + 1天 sr = df['fi_yingmi_to_account_time'] - 2 # 实际中,赎回到盈米宝的到账时间是赎回到银行卡的到账时间 - 2天 df['redeem'] = sr.where(sr >= 1, 1) df['code'] = df['fi_code'].apply(lambda x: "%06d" % x) return df[['code', 'buy', 'redeem']]
def load(gids, xtypes=None): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('mz_highlow', metadata, autoload=True) columns = [ t1.c.globalid, t1.c.mz_type, t1.c.mz_algo, t1.c.mz_markowitz_id, t1.c.mz_high_id, t1.c.mz_low_id, t1.c.mz_persistent, t1.c.mz_name, ] s = select(columns) if gids is not None: s = s.where(t1.c.globalid.in_(gids)) if xtypes is not None: s = s.where(t1.c.mz_type.in_(xtypes)) df = pd.read_sql(s, db) return df
def save(gid, criteria_id, df): fmt_columns = ['is_value'] fmt_precision = 6 if not df.empty: df = database.number_format(df, fmt_columns, fmt_precision) # # 保存择时结果到数据库 # db = database.connection('asset') t2 = Table('is_investor_criteria', MetaData(bind=db), autoload=True) columns = [literal_column(c) for c in (df.index.names + list(df.columns))] s = select( columns, (t2.c.is_investor_id == gid)).where(t2.c.is_criteria_id == criteria_id) df_old = pd.read_sql( s, db, index_col=['is_investor_id', 'is_criteria_id', 'is_date'], parse_dates=['is_date']) if not df_old.empty: df_old = database.number_format(df_old, fmt_columns, fmt_precision) # 更新数据库 # print df_new.head() # print df_old.head() database.batch(db, t2, df, df_old, timestamp=False)
def run(self): # Create the DB connection conn = db.connection() cur = conn.cursor() # Create the run information cur.execute('call amazon_review.proc_create_rundata(%s)', ('LoadProductReviewFact', )) conn.commit() # get the run id generated for the job cur.execute('SELECT * FROM amazon_review.func_return_runid(%s)', ('LoadProductReviewFact', )) run_id = cur.fetchone()[0] print(run_id) # Call the program for data loading into DWH p_review_fact.load_product_review_fact(run_id) with self.output().open('w') as outfile: outfile.write('The task LoadProductReviewFact is done') # Update the run status cur.execute('call amazon_review.proc_update_rundata(%s)', (run_id, )) conn.commit() # closing db connection db.close(conn, cur)
def run(self): # Create the DB connection conn = db.connection() cur = conn.cursor() # Create the run information cur.execute('call amazon_review.proc_create_rundata(%s)', ('LoadProdBuyAfterViewingDimension', )) conn.commit() # get the run id generated for the job cur.execute('SELECT * FROM amazon_review.func_return_runid(%s)', ('LoadProdBuyAfterViewingDimension', )) run_id = cur.fetchone() p_buy_after_view_dim.load_product_buy_after_viewing(run_id) with self.output().open('w') as outfile: outfile.write('The task LoadProdBuyAfterViewingDimension is done') # Update the run status cur.execute('call amazon_review.proc_update_rundata(%s)', (run_id, )) conn.commit() # closing db connection db.close(conn, cur)
def load(globalids=None, codes=None): db = database.connection('base') metadata = MetaData(bind=db) t = Table('ra_fund', metadata, autoload=True) columns = [ t.c.globalid, t.c.ra_code, t.c.ra_name, t.c.ra_type, t.c.ra_type_calc, t.c.ra_regtime, t.c.ra_volume, ] s = select(columns) if globalids is not None: s = s.where(t.c.globalid.in_(globalids)) if codes is not None: s = s.where(t.c.ra_code.in_(codes)) df = pd.read_sql(s, db) return df
def save(gid, df): fmt_columns = ['ra_fund_ratio'] fmt_precision = 4 if not df.empty: df['ra_fund_type'] = df['ra_fund_type'].astype(int) df = database.number_format(df, fmt_columns, fmt_precision) # # 保存择时结果到数据库 # db = database.connection('asset') t2 = Table('ra_portfolio_pos', MetaData(bind=db), autoload=True) columns = [literal_column(c) for c in (df.index.names + list(df.columns))] s = select(columns, (t2.c.ra_portfolio_id == gid)) df_old = pd.read_sql( s, db, index_col=['ra_portfolio_id', 'ra_date', 'ra_pool_id', 'ra_fund_id'], parse_dates=['ra_date']) if not df_old.empty: df_old = database.number_format(df_old, fmt_columns, fmt_precision) # 更新数据库 # print df_new.head() # print df_old.head() database.batch(db, t2, df, df_old, timestamp=True)
def load(timings, xtypes=None): db = database.connection('asset') metadata = MetaData(bind=db) t1 = Table('tc_timing', metadata, autoload=True) columns = [ t1.c.globalid, t1.c.tc_type, t1.c.tc_method, t1.c.tc_index_id, t1.c.tc_begin_date, t1.c.tc_argv, t1.c.tc_name, ] s = select(columns) if timings is not None: s = s.where(t1.c.globalid.in_(timings)) if xtypes is not None: s = s.where(t1.c.tc_type.in_(xtypes)) s = s.where(t1.c.tc_method != 0) df = pd.read_sql(s, db) return df
def getHostId(hostname): c, conn = connection() x = c.execute("SELECT hostid FROM hoster WHERE hostName=%s", (str(hostname))) if(int(x) > 0): hostid = c.fetchone() conn.close() return hostid[0] conn.close() return 0
def uploadhost(win, arena): checker = checkLogin() if(checker == 2): session.clear() return "gg" hostid = session['hostid'] totalBet = float(request.form["totalbet"]) totalPot = float(request.form["totalpot"]) if totalBet <= 0.0: return "No bet" if win == "1": lose = 0 else: lose = 1 ourTimeStamp = datetime.datetime.strftime(datetime.datetime.now(),'%m/%d/%Y') final = "" c, conn = connection() c.execute("INSERT INTO totalbet (hostId) VALUES (%s)", (int(hostid))) totalbetid = c.lastrowid for i in range(1, 14): bettername = str(request.form["bettor" + str(i)]).lower() if(bettername != ""): betamount = request.form["bet" + str(i)] potamount = request.form["pot" + str(i)] if(betamount == ""): continue #return bettername + " bet " + betamount + " with the pot " + potamount x = c.execute("SELECT betterid FROM better WHERE betterName=%s", (str(bettername))) if(int(x) > 0): betterid = (c.fetchone())[0] if win == "1": c.execute("UPDATE better SET totalGames = totalGames + 1, totalWins = totalWins + 1, totalBet = totalBet + %s WHERE betterid=%s", (float(betamount), int(betterid))) else: c.execute("UPDATE better SET totalGames = totalGames + 1, totalLoses = totalLoses + 1, totalBet = totalBet + %s WHERE betterid=%s", (float(betamount), int(betterid))) else: c.execute("INSERT INTO better (betterName, totalBet, totalGames, totalWins, totalLoses) VALUES (%s, %s, 1, %s, %s)", (str(bettername), float(betamount), int(win), int(lose))) y = c.execute("SELECT betterid FROM better WHERE betterName=%s", (str(bettername))) betterid = (c.fetchone())[0] if win == "1": lose = 0 ourprofit = float(potamount) - float(betamount) else: lose = 1 ourprofit = 0 - float(betamount) c.execute("INSERT INTO bet (betAmount, betResult, betProfit, arena, timestamp) VALUES (%s, %s, %s, %s, %s)", (float(betamount), int(win), float(ourprofit), str(arena), str(ourTimeStamp))) betid = c.lastrowid c.execute("INSERT INTO bettotalbet (totalbetid, betid) VALUES (%s, %s)", (int(totalbetid), int(betid))) c.execute("INSERT INTO betterbet (betterid, hostid, betid) VALUES (%s, %s, %s)", (int(betterid), int(hostid), int(betid))) final = final + " NEXT ID " + str(betterid) + " LAST BET ID WAS " + str(betid) conn.commit() conn.close() return final + " TIMESTAMP " + str(ourTimeStamp) + " TOTAL " + str(totalBet) + " POT " + str(totalPot)
def checkRandom(): c, conn = connection() if 'auth' in session: c.execute("SELECT tempid FROM tempid WHERE hostid=%s", (str(session['hostid']))) datatemp = c.fetchone() if(datatemp[0] != session['auth']): session.clear() conn.close() return 2 else: conn.close() return 1 conn.close() return 0
def cpuid(db): conn = database.connection(db) cursor = conn.execute('select ip,host_id from hosts join ports on hosts.id=ports.host_id and portid=22 and hosts.connect_ssh=1') conn.execute('delete from cpuid') conn.commit() re_vendor_id = re.compile('(^vendor_id)([\s]*):(.*)$') re_model_name = re.compile('(^model\ name)([\s]*):(.*)$') re_cpuMhz = re.compile('(^cpu\ MHz)([\s]*):(.*)$') re_cache = re.compile('(^cache\ size)([\s]*):(.*)$') re_physical_id = re.compile('(^physical\ id)([\s]*):(.*)$') for host in cursor: ncores = 0 physicalid=-1 ip = host[0] host_id = host[1] try: ssh = ssh_connect.ssh_22(ip) except: continue stdin, stdout, stderr = ssh.exec_command('cat /proc/cpuinfo') for line in stdout.readlines(): vendor_id = re_vendor_id.match(line) model_name = re_model_name.match(line) cpuMhz = re_cpuMhz.match(line) cache = re_cache.match(line) physical_id = re_physical_id.match(line) if vendor_id: vendorid = vendor_id.group(3) if model_name: modelname = model_name.group(3) if cpuMhz: cpumhz = cpuMhz.group(3) if cache: ccache = cache.group(3) if physical_id: old_phyid = physicalid physicalid = physical_id.group(3) if (old_phyid == physicalid): ncores+=1 conn.execute ('update cpuid set ncore=? where host_id=? and physical_id=?', (ncores, host_id, physicalid,)) else: ncores=1 conn.execute ('insert into cpuid (host_id, vendor_id, model_name,cpuMhz, cache, ncore,physical_id) values (?,?,?,?,?,?,?)',(host_id,vendorid,modelname,cpumhz,ccache,ncores,physicalid,)) if physicalid == -1: ncores=1 conn.execute ('insert into cpuid (host_id, vendor_id, model_name,cpuMhz, cache, ncore,physical_id) values (?,?,?,?,?,?,?)',(host_id,vendorid,modelname,cpumhz,ccache,ncores,1,)) conn.commit() cursor.close()
def loadavg(db): conn = database.connection(db) cursor = conn.execute('select ip,host_id from hosts join ports on hosts.id=ports.host_id and portid=22 and hosts.connect_ssh=1') conn.execute('delete from loadavg') conn.commit() for host in cursor: ip = host[0] host_id = host[1] try: ssh = ssh_connect.ssh_22(ip) except: continue stdin, stdout, stderr = ssh.exec_command('cat /proc/loadavg| cut -f1-3 -d" "') output = stdout.read() load1,load5,load15 = output.split() conn.execute('insert into loadavg (host_id, load1, load5, load15) values (?,?,?,?)', (host_id, load1, load5, load15,)) conn.commit() cursor.close()
def update(db): conn = database.connection(db) cursor = conn.execute('select ip,osdistribution, osrelease, uname, hostname from hosts join ports on hosts.id=ports.host_id and portid=22') ## Some check for open ports or not for host in cursor: ip,osdistribution,osrelease,uname,hostname=host try: ssh = ssh_connect.ssh_22(ip) except: conn.execute('update hosts set connect_ssh=0 where ip=?', (ip,)) print ("Host not accesible: ", ip) continue try: conn.execute('update hosts set connect_ssh=1 where ip=?', (ip,)) stdin, stdout, stderr = ssh.exec_command('cat /etc/debian_version') if not stderr.read(): new_osrelease=stdout.read().strip() if (new_osrelease and ( osrelease!=new_osrelease or "2"!=osdistribution)): conn.execute('update hosts set osdistribution=?, osrelease=? where ip=?', ("2", new_osrelease,ip,)) else: continue stdin, stdout, stderr = ssh.exec_command('cat /etc/redhat-release') new_osrelease=stdout.read().strip() if not stderr.read(): if (new_osrelease and (osdistribution!="1" or osrelease!=new_osrelease)): conn.execute('update hosts set osdistribution=?, osrelease=? where ip=?', ("1", new_osrelease,ip,)) conn.commit() stdin, stdout, stderr = ssh.exec_command('uname -a') new_uname = stdout.read().strip() stdin, stdout, stderr = ssh.exec_command('uname -m') new_arch = stdout.read().strip() stdin, stdout, stderr = ssh.exec_command('hostname -f') new_hostname = stdout.read().strip() if (uname!=new_uname or hostname!=new_hostname or arch != new_arch ): conn.execute('update hosts set uname=?, hostname=?,arch=? where ip=?', (new_uname, new_hostname, new_arch, ip,)) conn.commit() ssh.close except: None cursor.close()
def backup(db): conn = database.connection(db) cursor = conn.execute('select ip,id from hosts where (osdistribution=1 or osdistribution=2) or connect_ssh=1') conn.execute('delete from backup') conn.commit() for host in cursor: ip = host[0] host_id = host[1] try: ssh = ssh_connect.ssh_22(ip) except: continue stdin, stdout, stderr = ssh.exec_command('pgrep caagentd') output = stdout.read() if not output: active=0 else: active=1 conn.execute('insert into backup (hostid, active) values (?,?)', (host_id, active,)) conn.commit() cursor.close()
def add_user(**args): """ Adds a user to the database. Throws UserDetailException on failure. """ required = ["username", "password", "email"] for r in required: if not r in args: raise UserDetailException("A required field is missing: %s" % r) validate_username(args["username"]) validate_email(args["email"]) validate_password(args["password"]) conn = database.connection() cur = database.cursor(conn) pwhash = generate_hash(args["password"]) screen_name = args["username"] query = """ INSERT INTO Member (username, screen_name, password_hash, hash_salt, email) VALUES (%s, %s, %s, %s, %s) """ try: cur.execute(query, (args["username"], screen_name, pwhash[0], pwhash[1], args["email"])) except Exception as e: print("Can't insert user: " + str(e)) raise conn.commit() cur.close() conn.close()
def attempt(): if request.method == "GET": return redirect('dashboard') else: username = request.form['username'] password = request.form['password'] try: c, conn = connection() x = c.execute("SELECT * FROM hoster WHERE loginName = (%s)", (escaper(username))) if int(x) == 0: return render_template('login.html', result='User not found! Please contact an admin!') else: datalogin = c.fetchone() hashed_pass = hashlib.sha512(str(password + datalogin[4])).hexdigest() if(str(hashed_pass) != datalogin[3]): return render_template('login.html', result='Invalid username/password. Please try again!') else: session.clear() session['user'] = datalogin[1] session['hostid'] = datalogin[0] session['rank'] = datalogin[5] if(datalogin[5] == 'admin'): session['admin'] = 1 ts = time.time() st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') c.execute("INSERT INTO logintracking (hostid, ip, timestamp) VALUES (%s, %s, %s)", (int(datalogin[0]), str(request.remote_addr), str(st))) randomstring = uuid.uuid4().hex c.execute("UPDATE tempid SET tempid=%s WHERE hostid=%s", (str(randomstring), int(datalogin[0]))) session['auth'] = randomstring conn.commit() c.close() conn.close() return redirect('calculator') except Exception as e: return redirect('500')
def update(db): conn = database.connection(db) conn.execute('delete from packages') conn.commit() cursor=conn.execute('select ip,id from hosts where osdistribution="2" and connect_ssh=1') for ip in cursor: try: ssh = ssh_connect.ssh_22(ip[0]) stdin, stdout, stderr = ssh.exec_command("dpkg -l | grep ^ii | awk '{print $2, $3}'") output=stdout.read() for a in output.split("\n"): if not a: continue pkgname, pkgversion = a.split(" ") cursor = conn.execute ('select * from packages where pkg_host_id=? and pkg_name=?', (ip[1], pkgname,)) if not cursor.fetchone(): conn.execute ('insert into packages (pkg_name, pkg_version, pkg_host_id) values (?,?,?)', (pkgname, pkgversion, ip[1],)) else: conn.execute('update packages set pkg_version=? where pkg_name=? and pkg_host_id=?', (pkgversion, pkgname, ip[1],)) except: None conn.commit()
def disk_use(db): conn = database.connection(db) cursor = conn.execute('select ip,host_id from hosts join ports on hosts.id=ports.host_id and portid=22 and hosts.connect_ssh=1') conn.execute('delete from disks') conn.commit() for host in cursor: ip = host[0] host_id = host[1] try: ssh = ssh_connect.ssh_22(ip) except: continue stdin, stdout, stderr = ssh.exec_command('df -Ph | grep ^/') output = stdout.read() for a in output.split("\n"): if not a: continue device,size,used,available,use,mount = a.split() usenpc = use.split("%")[0] conn.execute('insert into disks (host_id, device, size,used, available, use,mount) values (?,?,?,?,?,?,?)', (host_id, device,size,used,available,usenpc,mount,)) conn.commit() cursor.close()
def meminfo(db): conn = database.connection(db) cursor = conn.execute('select ip,host_id from hosts join ports on hosts.id=ports.host_id and portid=22 and hosts.connect_ssh=1') conn.execute('delete from mem') conn.commit() re_memory_free = re.compile('(^MemFree:)([\s]*)([0-9\s]+)([kmKMBbytesYTES]+)$') re_memory_total = re.compile('(^MemTotal:)([\s]*)([0-9\s]+)([kmKMBbytesYTES]+)$') re_swap_free = re.compile('(^SwapFree:)([\s]*)([0-9\s]+)([kmKMBbytesYTES]+)$') re_swap_total = re.compile('(^SwapTotal:)([\s]*)([0-9\s]+)([kmKMBbytesYTES]+)$') for host in cursor: ip = host[0] host_id = host[1] try: ssh = ssh_connect.ssh_22(ip) except: continue stdin, stdout, stderr = ssh.exec_command('cat /proc/meminfo') memoryFree = memoryTotal = swapFree = swapTotal = 0 memory_free = memory_total = swap_free = swap_total = 0 for line in stdout.readlines(): memory_free = re_memory_free.match(line) memory_total = re_memory_total.match(line) swap_free = re_swap_free.match(line) swap_total = re_swap_total.match(line) if memory_free: memoryFree = int(memory_free.group(3)) memoryFUnit = memory_free.group(4) if memory_total: memoryTotal = int(memory_total.group(3)) memoryTUnit = memory_total.group(4) if swap_free: swapFree = int(swap_free.group(3)) swapFUnit = swap_free.group(4) if swap_total: swapTotal = int(swap_total.group(3)) swapTUnit = swap_total.group(4) conn.execute('insert into mem (host_id, mem_total, mem_free,swap_total,swap_free) values (?,?,?,?,?)', (host_id, memoryTotal, memoryFree, swapTotal, swapFree,)) conn.commit() cursor.close()
def setUp(self): try: self.conn = database.connection() except Exception as e: raise
def process_xml(filexml, db): tcusers = ET.parse(filexml) hosts = tcusers.findall('/host/') conn = database.connection(db) for a in hosts: reason=a.find('status').get('reason') if (reason == 'reset'): continue ip=a.find('address').get('addr') cursor = conn.execute('insert into hosts (id,ip) values (NULL,?);', (ip,)) conn.commit() conn.close() for a in hosts: reason=a.find('status').get('reason') if (reason == 'reset'): continue ip=a.find('address').get('addr') cursor=conn.execute('select * from hosts where ip=?', (ip,)) if not cursor.fetchone(): try: hostname=a.find('hostnames').find('hostname').get('name') except: hostname=ip cursor = conn.execute("insert into hosts (id, ip, server_name) values (NULL, ?,?);", (ip, hostname,)) print "New Host: " + ip conn.commit() cursor = conn.execute("select id from hosts where ip=?", (ip,)) host_id=cursor.fetchone()[0] cursor = conn.execute("insert into nmap (hostid) values (?);", (host_id,)) conn.commit() ## Check host_os ## Update if os_name or vendor have change host_os = a.find('os') vendor = osfamily = osgen = name = ostype = None try: host_osclass = host_os.find('osclass') vendor = host_osclass.get('vendor') osfamily = host_osclass.get('osfamily') osgen = host_osclass.get('osgen') ostype = host_osclass.get('type') host_osmatch = host_os.find('osmatch') name = host_osmatch.get('name') except: None cursor = conn.execute("select id from hosts where ip=?", (ip,)) host_id=cursor.fetchone()[0] cursor = conn.execute ("update nmap set vendor=?, os_family=?, os_gen=?, os_type=?, os_name=? where hostid=?", (vendor, osfamily, osgen, ostype, name, host_id,)) conn.commit() ## Check ports ## Just insert new port, no updates cursor = conn.execute("select id from hosts where ip=?", (ip,)) host_id=cursor.fetchone()[0] host_port = a.find('ports') for i in host_port: portid = name = product = version = extrainfo = ostype = None portid = i.get('portid') cursor=conn.execute("select portid from ports where portid=? and host_id=?", (portid, host_id,)) if cursor.fetchone(): continue service = i.find('service') try: name = service.get('name') product = service.get('product') version = service.get('version') extrainfo = service.get('extrainfo') ostype = service.get('ostype') except: None if portid: print host_id, portid cursor = conn.execute ("insert into ports (host_id, portid, name, product,version, extrainfo,ostype) values (?, ?, ?, ?, ?, ?, ?);",\ (host_id, portid, name, product,version, extrainfo,ostype)) conn.commit() print "New port: " + ip + " " + portid
def __init__(self): self.cursor = db.connection()