class Meta: declarative_base = Base session = SessionLocal()
import warnings import schemas from database import SessionLocal with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) from main import app from fastapi.testclient import TestClient client = TestClient(app) sess = SessionLocal() username = "******" password = "******" token_headers = {} def test_logout_without_auth(): response = client.get("/api/logout") assert response.status_code == 401 #assert response.json() == dict(message="ok") def test_login_missing_username(): response = client.post("/api/login", json=dict(password=password)) assert response.status_code == 422
from sqlalchemy.orm import Session from database import SessionLocal, engine import proccess import models, schemas db = SessionLocal() session = Session() import time import wget #while True: # try: # result = db.query(models.PdfInfo).filter(models.PdfInfo.status =="In Proccess") # for row in result : # r=db.query(models.PdfInfo).filter(models.PdfInfo.id ==row.id).first() # r.status=None # db.commit() # except: # pass #------------------------------------------------ #time.sleep(1) print("Procceing......") #try: try: result = db.query(models.PdfInfo).filter(models.PdfInfo.status ==None) result = db.query(models.PdfInfo) except: print('error')
import datetime import sys from datetime import date from random import randint import models import updates from answers import answers from api_handler import send_message from database import SessionLocal session = SessionLocal() def notify(): now_time = datetime.datetime.now() lists = session.query(models.List, models.NotifyTime) \ .add_columns(models.User.t_id) \ .join(models.NotifyTime, models.List.id == models.NotifyTime.list_id) \ .join(models.User, models.User.id == models.List.user_id) \ .filter(models.NotifyTime.status == 'pending') \ .filter(models.List.status == 'saved') \ .filter(models.List.date == datetime.date.today()) \ .filter(models.NotifyTime.time.between( now_time - datetime.timedelta(minutes=2), now_time + datetime.timedelta(minutes=2))) lists = lists.all() for l in lists: words = '\n'.join(list(map(lambda w: f'[{w.word}]', l.List.words))) send_message(l.t_id, 'The today\'s list of the words:\n' + words)
import pandas as pd from models import COVID19News from database import SessionLocal from datetime import datetime df = pd.read_csv("resource/corpus/news.csv") news_list = [] for row in df.itertuples(): title = getattr(row, "title") publish_date_str = getattr(row, "publish_date") publish_date = datetime.strptime(publish_date_str, "%Y-%m-%d %H:%M:%S") text = getattr(row, "text") news = COVID19News(title=title, publish_date=publish_date, text=text) news_list.append(news) # 写入数据库 try: session = SessionLocal() session.add_all(news_list) except Exception as e: print(e) session.close() else: session.commit()
def write_video_comments_sql(isVideoExists, video_info, comments_json_array): """ 将评论数据写入数据库 """ if isVideoExists: result = delete_video_by_oid(video_info['oid']) if not result: return False try: session = SessionLocal() # 写入bilibiliVideos video_record = BilibiliVideo(oid=video_info['oid'], url=video_info['url'], type=video_info['type'], title=video_info['title']) session.add(video_record) session.flush() # 写入bilibiliVideoComments vid = video_record.id commentRecords = [] for page_json in comments_json_array: page_comment = json.loads(page_json) top_level_replies = page_comment['data']['replies'] for reply in top_level_replies: # 去除表情符号 text = re.sub(r'\[\S+\]', '', reply['content']['message']) comment = BilibiliVideoComment(vid=vid, text=text) commentRecords.append(comment) session.add_all(commentRecords) except Exception as e: print(e) session.close() return False else: session.commit() return True
import numpy as np import tensorflow as tf import pandas as pd import crud from sqlalchemy.orm import Session from database import SessionLocal import models m_id = 1 db: Session = SessionLocal() # Original methods from DL Notebook # method run input as size (4097x??) def testAug1(testSet): Divide1 = [] for m in range(5): for k in range(4): divide1 = testSet.iloc[:, m][k * 1024:1024 + k * 1024].values.flatten() Divide1.append(divide1) CUT1 = [] for r in Divide1: for p in range(3): chop1 = r[p * 256:512 + 256 * p] CUT1.append(chop1) return pd.DataFrame(tf.keras.utils.normalize(np.array(CUT1).T))
async def get_db(): db = SessionLocal() try: yield db finally: db.close()
import csv import datetime import models.item_model from database import SessionLocal, engine db = SessionLocal() models.item_model.Base.metadata.create_all(bind=engine) with open("Item.csv", "r") as f: csv_reader = csv.DictReader(f) for row in csv_reader: db_record = models.item_model.Item( id=row["id"], name=row["name"], location=row["location"], description=row["description"], date=datetime.datetime.strptime(row["date"], "%Y-%m-%d"), #pic=row["pic"], ) db.add(db_record) db.commit() db.close()
from database import SessionLocal, engine from sqlalchemy.sql import func from globals import * from model import * # Set up logging LOGFILE = 'main.log' logging.basicConfig(filename=LOG_DIR + LOGFILE, datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG, format='%(asctime)s %(levelname)s: %(message)s', filemode='w') # Set up database db = SessionLocal() Base.metadata.create_all(bind=engine) # Get app globals appglobals = db.query(AppGlobals).first() procs_active = [] ############################################################################################ ### ### ### mine() - compares number of mined comets to minimum required, mines them as needed ### ### ### ############################################################################################ def mine():
def hw_update_voo(request): session = SessionLocal() voo_old = session.query(Voo).filter_by(id=request["id"]).first() session.delete(voo_old) voo = Voo( id=request["id"], data=datetime.strptime(request["data"], "%d/%m/%Y %H:%M:%S"), destino=request["destino"], companhia=request["companhia"], capacidade=request["capacidade"], ocupacao=request["ocupacao"], preco=request["preco"], ) session.add(voo) session.commit() voo_json = hw_get_voo(voo.id) session.close() return voo_json
def hw_get_voo(id): session = SessionLocal() voo = session.query(Voo).filter_by(id=id).all() voo_json = populate_voo(voo) session.close() return voo_json
def hw_get_voos_aeroportos(): session = SessionLocal() voos = session.query(Voo).order_by(Voo.data.asc()).all() voos_json = populate_voo_aeroporto(voos) session.close() return voos_json
import models from database import SessionLocal, engine import json # Creating local db session db = SessionLocal() # Creating all the tables in model models.Base.metadata.create_all(bind=engine) with open('path_to_file/person.json') as f: data = json.load(f)
def fetch_stock_data(symbol: str): db = SessionLocal() stock = db.query(Stock).filter(Stock.symbol == symbol).first() yahoo_data = yfinance.Ticker(stock.symbol) exp_list = list(yahoo_data.options) strike = {} contract = {} price = {} profit = {} in_the_money = {} for i in exp_list: opt = yahoo_data.option_chain(i) strike[i] = opt.calls['strike'] strike_value = strike.values() strike_list = list(strike_value) strike_price = [element * 100 for element in strike_list] contract[i] = opt.calls['ask'] contract_value = contract.values() contract_list = list(contract_value) contract_price = [element * 10 for element in contract_list] price = yahoo_data.info['previousClose'] price_total = price * 100 price_to_execute = list(map(add, strike_price, contract_price)) profit = [element - price_total for element in price_to_execute] profit_large = profit * 100 percent_profit = list(map(truediv, profit_large, price_to_execute)) in_the_money[i] = opt.calls['inTheMoney'] itm_value = in_the_money.values() itm_list = list(itm_value) stock.price = price_total exps = [] for date in exp_list: exps.append(Expiration(symbol=stock.symbol, exp_list=date)) df = pd.DataFrame(strike_price) strike_listee = df.values.tolist() strike_final = [[x for x in y if not np.isnan(x)] for y in strike_listee] c_df = pd.DataFrame(contract_price) contract_listee = c_df.values.tolist() contract_final = [[x for x in y if not np.isnan(x)] for y in contract_listee] p2e_df = pd.DataFrame(price_to_execute) p2e_listee = p2e_df.values.tolist() p2e_final = [[x for x in y if not np.isnan(x)] for y in p2e_listee] p_profit_df = pd.DataFrame(percent_profit) p_profit_listee = p_profit_df.values.tolist() p_profit_final = [[x for x in y if not np.isnan(x)] for y in p_profit_listee] itm_df = pd.DataFrame(itm_list) itm_listee = itm_df.values.tolist() itm_final = [[x for x in y if not np.isnan(x)] for y in itm_listee] strk = [] for strike_group, contract_group, p2e_group, p_profit_group, itm_group, date in zip(strike_final, contract_final, p2e_final, p_profit_final, itm_final, exp_list): for strikes, contracts, p2es, p_profits, itms in zip(strike_group, contract_group, p2e_group, p_profit_group, itm_group): strk.append(Strike(symbol=stock.symbol, exp_list=date, strike_price=strikes, contract_price=contracts, price_to_execute=p2es, percent_profit=p_profits, in_the_money=itms)) instances = [stock] instances.extend(exps) instances.extend(strk) db.add_all(instances) db.commit()
def get_db(): try: db = SessionLocal() yield db finally: db.close()
music_json = json.load(music_file) except IOError as e: print('文件读取异常!', e.msg) else: song_list = [] for song in music_json: # 歌手 singer = song["singer"] # 歌名 song_name = song["song"] # 专辑名 album = song["album"] # 歌词 lyric = '\n'.join(song["geci"]) lyric = f"{song_name}\n\t{singer} - {album}\n{lyric}" song_list.append( ChineseLyric(singer=singer, song=song_name, album=album, text=lyric)) # 写入数据库 try: session = SessionLocal() session.add_all(song_list) except Exception as e: print(e) session.close() else: session.commit()