async def authenticate_user(token: str): """ Extract the username from a JSON web token, and return the corresponding entry from the database. """ db = SessionLocal() # open database session try: payload = jwt.decode(token, SECRET_KEY, algorithms=[JWT_ALGORITHM]) username: str = payload.get('sub') # the subject is the username if username is None: raise credentials_exception # obtain user from database user = get_user_by_username(db, username=username) except jwt.PyJWTError: logging.warn('PyJWTError') raise credentials_exception except: logging.warn('Unknown error') raise credentials_exception finally: db.close() # close database session if user is None: raise credentials_exception return user
def test_get_user(self): username = '******' db = SessionLocal() try: user = crud.get_user_by_username(db, username) assert user.username == 'thilo' finally: db.close()
def queryUncompleteShop(): session = SessionLocal() wangwangName = session.query(Shops).filter(Shops.numberOfFans == -1).first().sellerName if wangwangName: return wangwangName else: message = '请获取更多淘宝店铺' return message
def get_db(): """ Function that 'yields' a database session for CRUD operations. Fastapi resolves this session as a dependency that injects the database session into functions that need it. """ db = SessionLocal() try: # need python >3.6 for the yield dependency to work, see the fastapi # docs for a backport yield db finally: # make sure the database closes even if there was an exception db.close()
def test_add_user(self): user = _user.UserCreate( username='******', password='******' # hashed_password='******' ) # have to use the same Base that is imported there ??? db_models.Base.metadata.create_all(bind=engine) db = SessionLocal() try: crud.create_user(db, user) finally: db.close()
def get_results_by_keywords( session: SessionLocal, keywords: str, owner_id: int ) -> List[Event]: """Returns possible results for a search in the 'events' database table Args: keywords (str): search string owner_id (int): current user id Returns: list: a list of events from the database matching the inserted keywords Uses PostgreSQL's built in 'Full-text search' feature (doesn't work with SQLite)""" keywords = get_stripped_keywords(keywords) try: return session.query(Event).filter( Event.owner_id == owner_id, Event.events_tsv.match(keywords)).all() except (SQLAlchemyError, AttributeError): return []
def getShopInfoFromKDBSoup(wangwangName, driverContent): soup = BeautifulSoup(driverContent, 'lxml') mt10 = soup.select('.mt10')[0].find('tbody') mt10Dic = { '店铺粉丝数':0, '所在地区': None, '店铺宝贝数': 0, '创店时间': None, 'DSR': 0, '主营类目': None } for row in mt10.find_all('tr'): for col in row.children: curGrid = str(col.string) if curGrid in mt10Dic: for elem in col.next_siblings: if str(elem).find('<td>') != -1: allString = '' for string in elem.stripped_strings: allString += string mt10Dic[curGrid] = allString break pattern = re.compile(r'[\d]+') mt10Dic['店铺宝贝数'] = pattern.findall(mt10Dic['店铺宝贝数'])[0] if mt10Dic['所在地区'] == '': mt10Dic['所在地区'] = '-' session = SessionLocal() shop = session.query(Shops).filter(Shops.sellerName == wangwangName).first() shop.numberOfFans = mt10Dic['店铺粉丝数'] shop.shopZone = mt10Dic['所在地区'] shop.numberOfItems = mt10Dic['店铺宝贝数'] shop.openingDate = (datetime.strptime(mt10Dic['创店时间'], '%Y-%m-%d %H:%M:%S')) shop.dsr_value = mt10Dic['DSR'] shop.shopCategory = mt10Dic['主营类目'] session.commit() return 0
def get_events_per_dates(session: SessionLocal, user_id: int, start: Optional[date], end: Optional[date]) -> List[Event]: """Read from the db. Return a list of all the user events between the relevant dates.""" if start > end: return [] try: events = (session.query(Event).filter( Event.owner_id == user_id).filter( Event.start.between(start, end + timedelta(days=1))).order_by( Event.start).all()) except SQLAlchemyError: return [] else: return events
def getShopListFromTaobaoSoup(driverContent): soup = BeautifulSoup(driverContent, 'lxml') # soup = soup.find('shopsearch-shoplist') soup = soup.find('ul', id='list-container') if soup == None: return 'Login Error' pattern = re.compile(r'&fromid=.+') for item in soup.select(".list-info"): try: shopH4 = item.find('h4') shopName = str(shopH4.a.string).replace(' ','').replace('\n','') shopUrl = 'https:' + shopH4.a['href'] shopInfoList = item.select('.shop-info-list')[0] sellerName = str(shopInfoList.a.string).replace(' ','').replace('\n','') sellerUrl = 'https:' + shopInfoList.a['href'] sellerUid = str(shopInfoList.a['trace-uid']).replace(' ','').replace('\n','') wangwangInfo = item.select('.ww-inline')[0] sellerWangWangIsOnline = str(wangwangInfo.span.string).replace(' ','').replace('\n','') sellerWangWangUrl = str(wangwangInfo['href']).replace(pattern.findall(wangwangInfo['href'])[0], '') valuation = item.select('.valuation')[0] goodCommentRatio = str(valuation.select('.good-comt')[0].string).replace(' ','').replace('\n','') isConsumerInsure = str(valuation).find('消费者保障') != -1 isGoldenSeller = str(valuation).find('金牌卖家') != -1 except IndexError: raise("Taobao Data Grasp Error") except AttributeError: raise("CSS Selector Error") shop = Shops(shopName=shopName, shopURL=shopUrl, sellerName=sellerName, \ sellerURL=sellerUrl, sellerUid=sellerUid,sellerWangWangOnlineStatus = sellerWangWangIsOnline,\ sellerWangWangURL=sellerWangWangUrl, goodCommentRatio=goodCommentRatio,\ isConsumerInsure=isConsumerInsure, isGoldSeller =isGoldenSeller) session = SessionLocal() isSellerExist = session.query(Shops).filter(Shops.sellerUid == sellerUid).first() if not isSellerExist: session.add(shop) session.commit() session.close() return 0
def get_db(): try: db = SessionLocal() yield db finally: db.close()
def get_db(): db = SessionLocal() try: yield db finally: db.close() # pylint: disable=no-member
def create_dog( name) -> str: db = SessionLocal() dog_crud.create_dog(name,db) db.close() sleep(5) return None
def session(): Base.metadata.create_all(bind=engine) session = SessionLocal() yield session session.close() Base.metadata.drop_all(bind=engine)
from typing import List, Optional from fastapi import Depends, APIRouter, HTTPException, Query from sqlalchemy.orm import Session from yeelight import Bulb, BulbException from app.database.database import SessionLocal, get_db from . import schemas, crud router = APIRouter() bulbs = {} bulb_db = SessionLocal() try: bulb_lights = crud.get_lights(bulb_db) for bulb_light in bulb_lights: bulbs[bulb_light.name] = Bulb(bulb_light.ip) except: print('information cannot be retrieved') finally: bulb_db.close() def change_light(db, name: str, func: str, arguments=None): db_light = crud.get_light_by_name(db, name=name) if db_light is None: raise HTTPException(status_code=404, detail="Light not found") res = vars(db_light) try: if arguments: getattr(bulbs[db_light.name], func)(*arguments)