コード例 #1
0
def get_db():
    db = None
    try:
        db = SessionLocal()
        yield db
    finally:
        db.close()
コード例 #2
0
def bootstrap_new_stonk(ticker: str):
    db = SessionLocal()

    stonk_data = get_stonk_data(ticker)

    stonk_db_obj = crud.create_stonk(db=db,
                                     stonk=schemas.StonkCreate(
                                         name=stonk_data['name'],
                                         ticker=ticker))

    for datestring in stonk_data['historical_daily_prices']['Close'].keys():
        # Coindesk is missing prices for some days, not a lot but a few - Right now it's not worth solving this so a few gaps is OK
        try:
            price_datetime = datestring.to_pydatetime()
            price_in_usd = stonk_data['historical_daily_prices']['Close'][
                datestring]
            price_in_btc = convert_usd_to_btc(
                price_in_usd, get_bitcoin_price_on_day(price_datetime))
            print(
                f'Saving Price for {ticker} on {datestring} @ {str(price_in_btc)}'
            )
            crud.create_price(db=db,
                              price=schemas.PriceCreate(
                                  price=price_in_btc,
                                  datetime=price_datetime,
                                  stonk_id=stonk_db_obj.id))
        except Exception as e:
            print(e)

    # Converts all USD prices to BTC
    db.close()
コード例 #3
0
def start_db():
    try:
        db_structure.Base.metadata.create_all(bind=engine)
        db = SessionLocal()
        yield db
    finally:
        db.close()
コード例 #4
0
def update_stonk(ticker: str):
    db = SessionLocal()
    file_name = 'btc_price_dict.pkl'
    if os.path.isfile(file_name):
        os.remove(file_name)

    stonk = yf.Ticker(ticker)
    stonk_obj = crud.get_stonk_by_ticker(db, ticker)
    prices = get_stonk_price_since_date_in_usd(
        stonk,
        datetime.datetime.now() - datetime.timedelta(days=4))
    price_in_usd = prices['Close'][len(prices['Close']) - 1]

    price_in_btc = convert_usd_to_btc(price_in_usd,
                                      get_current_bitcoin_price())
    print(
        f'Saving Price for {ticker} on {datetime.datetime.now()} @ {str(price_in_btc)}'
    )
    crud.create_price(db=db,
                      price=schemas.PriceCreate(
                          price=price_in_btc,
                          datetime=datetime.datetime.now(),
                          stonk_id=stonk_obj.id))

    db.close()
コード例 #5
0
ファイル: routes.py プロジェクト: shreyas-s-k/Things-Platform
def get_db():
    start = time.time()
    db = SessionLocal()
    try:
        yield db
    finally:
        db.close()
        stop = time.time()
        print(f"time taken : {stop - start}")
コード例 #6
0
async def db_session_middleware(request: Request, call_next):
    response = Response("Internal server error", status_code=500)
    try:
        request.state.db = SessionLocal()
        response = await call_next(request)
    finally:
        request.state.db.close()
    return response
コード例 #7
0
ファイル: populate.py プロジェクト: DannyAziz/stonks-in-btc
def run():
    db = SessionLocal()
    stonk = crud.create_stonk(
        db=db,
        stonk=schemas.StonkCreate(
            name='STONK',
            ticker='STONK'
        )
    )
    crud.create_price(
        db=db,
        price=schemas.PriceCreate(
            price=420.69,
            datetime=datetime.datetime.now(),
            stonk_id=stonk.id
        )
    )
    db.close()
コード例 #8
0
def startup_event():
    print('startup event triggered')
    models.Base.metadata.create_all(bind=dbconf.engine)

    db = SessionLocal()

    uid = str(uuid.uuid4().hex)

    user = schemas.UserCreate(
        id=uid,
        email="admin",
        password="******",
        first_name="admin",
        is_admin=True,
        is_active=True,
    )

    data = crud_base.get_user(email=user.email, db=db)
    if data is None:
        data = crud_users.create_user(user=user, created_by_userid=uid, db=db)
コード例 #9
0
async def run():
    """
    Application runs forever, algorithm is:
    1. Query the remote node for chaintip(most recent block)
    2. If the most recent block in the database(db_tip) is less than the chaintip.
           - the remote node is queried for the block results from db_tip to chaintip
           - these results are stored in postgresql
       else:
           - wait 1 second, then repeat from Step 1.
    """
    remote_node = Node(os.environ['REMOTE_NODE'])
    chain_id = os.environ['CHAIN_ID']
    db = SessionLocal()
    while True:
        db_tip = crud.get_db_tip(db, chain_id)
        if db_tip:
            db_tip += 1
        else:
            db_tip = 1
        chain_tip = (await remote_node.get_chaintip())['chaintip']
        if db_tip < chain_tip:
            await remote_node.iter_blocks(db_tip, chain_tip, db, chain_id)
        else:
            time.sleep(1)
コード例 #10
0
import uuid
import models
import json
import re

from sqlalchemy import and_, func

from models import EvaluationRecord, EvaluationItems, OperationRecord, DevicesTag, SystemInfor
from db import SessionLocal, engine

models.Base.metadata.create_all(engine)
db = SessionLocal()

record_1 = models.EvaluationRecord(
    uuid=str(uuid.uuid1()),
    count=0,
    name="HRM系统",
    level=2,
    devices_address="['200.0.0.1', '200.0.0.2', '200.0.0.3']",
    address="['192.168.0.1', '192.168.0.2']",
    status="finished",
    non_compliance=3,
    high_risk=1,
    is_delete=False)

record_1.evaluation_items = [
    models.EvaluationItems(eval="A0001",
                           device="AF",
                           device_address="200.0.0.1",
                           device_id="abcd",
                           conformity=3,
コード例 #11
0
async def startup_event():
    db = SessionLocal()
    global_var.total_level0 = get_free_qualified_teacher(db, level_id=0)
    global_var.total_level1 = get_free_qualified_teacher(db, level_id=1)
    db.close()
コード例 #12
0
ファイル: populate.py プロジェクト: DannyAziz/stonks-in-btc
def teardown():
    db = SessionLocal()
    db.query(models.Price).delete()
    db.query(models.Stonk).delete()
    db.commit()
    db.close()
コード例 #13
0
class Acquirer:

    def __init__(self, acquirer: str):
        self.acquirer = acquirer
        self.session = SessionLocal()
        self.merchants = []

    def get_pendent_webhooks(self, acquirer: str) -> List["WebHooks"]:
        webhooks = []
        try:
            webhooks = self.session.query(WebHooks)\
                            .join(QueueDetail, isouter=True)\
                            .filter(QueueDetail.webhook_id==None)\
                            .filter(WebHooks.is_valid, WebHooks.acquirer==acquirer)\
                            .all()
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return webhooks

    def get_failed_download(self, acquirer: str):
        queue_files = []
        try:
            queue_files = self.session.query(QueueDetail)\
                            .join(WebHooks)\
                            .filter(WebHooks.is_valid, 
                                    WebHooks.acquirer==acquirer,
                                    QueueDetail.is_done==False,
                                    QueueDetail.error.notin_(['0', '200']),
                                    QueueDetail.retrys <= environ.get('RETRYS', 10),
                                    QueueDetail.error.like('%https://ca-live.adyen.com/reports/download/MerchantAccount%'))\
                            .all()
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return queue_files 

    def get_reports_download(self, start_date: datetime, end_date: datetime):
        df = []
        mapped_response = {}
        # {
        #     "data": [
        #         {
        #             "STONE": [{
        #                 "day": "01", 
        #                 "MerchantNotFound": 0, 
        #                 "Success": 0, 
        #                 "InternalServeErrors": 0
        #             }]
        #         }
        #     ],
        #     "mapKeys": ["MerchantNotFound", "Success", "InternalServeErrors"]
        # }
        try:
            queue_files = self.session.query(func.to_char(QueueDetail.createdAt, 'YYYY-MM-DD').label("day"), 
                                             func.count(QueueDetail.detailId).label("quantity"),
                                             QueueDetail.statusCode,
                                             QueueProcesses.acquirer)\
                                      .join(QueueProcesses, QueueDetail.processId==QueueProcesses.process_id)\
                                      .group_by("day", QueueDetail.statusCode, QueueProcesses.acquirer)\
                                      .filter(between(QueueDetail.createdAt, start_date, end_date))
            df = pd.read_sql(queue_files.statement, self.session.bind)
            # pivot table
            df['quantity'] = df['quantity'].astype(int)
            # print(df)
            df_pivot = df.pivot_table(values=['quantity'], index=df.index, columns=['status_code']).fillna('0').astype(int)
            df = df.merge(df_pivot, left_index=True, right_index=True, how='outer').sort_values('day')
            df = df.groupby(by=['acquirer', 'day']).sum().reset_index().drop(['quantity'], axis=1)
            responses = {
                '200': "BAIXADOS",
                '201': "BAIXADOS",
                '400': "BADREQUEST",
                '500': "ERRO INTERNO DA ADQUIRENTE",
                '401': "SEM CONCESSÃO",
                '403': "NÃO AUTORIZADO",
                '503': "TIMEOUT",
                '0': 'SEM CAD CONCIL',
                'REPC': 'AGUAR.REPROCESSAMENTO',
                'CANC': 'CANCELADO',
            }
            df.rename(columns={(name, status): responses[status] if status in responses else name for name, status  in df.columns[2:]}, inplace=True)     
            acquirers = df['acquirer'].unique().tolist()
            mapped_response = {
                acquirer: df.loc[df['acquirer']==acquirer].to_dict(orient='records') for acquirer in acquirers
            }
            mapkeys = {}
            for acquirer in acquirers:
                mapkeys[acquirer] = []
                for data in mapped_response[acquirer]:
                    mapkeys[acquirer].extend([key for key, value in data.items() if value and key not in ('day', 'acquirer')])
                    mapkeys[acquirer] = list(set(mapkeys[acquirer]))
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return mapped_response, mapkeys  
    
    def make_search_query(self, filters: list, query):
        for params in filters:
            if len(params['value']) and params['name'] == 'acquirer':
                query = query.filter(getattr(QueueProcesses, params['name']).ilike(f"%{params['value']}%"))
            elif len(params['value']):  # only fill values
                query = query.filter(getattr(QueueDetail, params['name']).ilike(f"%{params['value']}%"))
        return query

    def get_reports_download_detail(self, start_date: datetime, end_date: datetime, acquirer: str, filters: list, page:int, pageSize: int):
        df = []
        keys = []
        meta= {
            "total": 0,
            "current": page,
            "pageSize": pageSize
        }
        try:
                
            queue_files = self.session.query(QueueDetail, QueueProcesses.acquirer.label("acquirer"))\
                                      .select_from(QueueDetail)\
                                      .join(QueueProcesses, QueueDetail.processId==QueueProcesses.process_id)\
                                      .filter(
                                          between(QueueDetail.createdAt, start_date, end_date)
                                        )
            if acquirer != 'ALL':
                queue_files= queue_files.filter(QueueProcesses.acquirer==acquirer)
            
            if filters:
                queue_files = self.make_search_query(filters, queue_files)
            paginator = Paginator(queue_files, meta["pageSize"])
            page = paginator.page(meta["current"])
            meta["total"] = page.paginator.count
            meta["countPages"] = page.paginator.total_pages
            meta["previous_page_number"] = page.previous_page_number
            meta["next_page_number"] = page.next_page_number
            # queue_files = queue_files.limit(50)
            # print(meta)
            # print(page.object_list)

            # df = pd.read_sql(queue_files.statement, self.session.bind)
            objects = [    
                {
                    "detailId": item.detailId,
                    "clientCode": item.clientCode,
                    "shortName": item.shortName,
                    "storeCode": item.storeCode,
                    "merchantCode": item.merchantCode,
                    "text": item.text,
                    "error": item.error,
                    "statusCode": item.statusCode,
                    "fileDate": item.fileDate,
                    "createdAt": item.createdAt.isoformat() if item.createdAt else item.createdAt,
                    "endAt": item.endAt.isoformat() if item.endAt else item.endAt,
                    "isDone": item.isDone,
                    "isRunning": item.isRunning,
                    "retrys": item.retrys,
                    "webhookId": item.webhookId,
                    "acquirer": acquirer
                } for item, acquirer in page.object_list
            ]
            # print(objects)
            if not objects: return
            df = pd.DataFrame(objects)
            # df.rename(columns={name: "".join([name.split('_')[0], name.split('_')[1].capitalize()]) if len(name.split('_'))>1 else name for name in df.columns}, inplace=True)
            # print(len(df))
            df['webhookId'] = df['webhookId'].fillna(0).astype(int)
            keys = df.columns.tolist()
            df = df.to_dict(orient='records')
            # print(df)
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return df, keys, meta

    def describ_request(self, detail: QueueDetail):
        print(f"{detail.text} -> Client_code: {detail.client_code}, Merchant: {detail.merchant_code}, File Date: {detail.file_date}, retrys: {detail.retrys}, status_code: {detail.status_code} webhooK_id {detail.webhook_id}")

    def reprocess_files(self, files: list, status: str) -> None:
        try:
            queue_files = self.session.query(QueueDetail).filter(QueueDetail.detailId.in_(files)).\
                                       update({QueueDetail.statusCode: status}, synchronize_session=False)
            self.session.commit()
            files = self.session.query(QueueDetail).filter(QueueDetail.detailId.in_(files)).all()
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return files, []

    def get_acquirers_count(self, start_date: datetime, end_date: datetime):
        df = []
        # const data = [
        # { name: 'Adyen 25%', value: 25 },
        # { name: 'Group B', value: 25 },
        # { name: 'Group C', value: 25 },
        # { name: 'Group D', value: 25 },
        # ];
        try:
            queue_files = self.session.query(func.count(QueueDetail.detailId).label("quantity"),
                                             QueueProcesses.acquirer)\
                                      .join(QueueProcesses, QueueDetail.processId==QueueProcesses.process_id)\
                                      .group_by(QueueProcesses.acquirer)\
                                      .filter(between(QueueDetail.createdAt, start_date, end_date),
                                              QueueDetail.statusCode == '200')
            df = pd.read_sql(queue_files.statement, self.session.bind)
            # pivot table
            df['quantity'] = df['quantity'].fillna(0).astype(float)
            total = df['quantity'].sum()
            df['name'] = [f"{acquirer} {int((quantity/total*100))}%" for quantity, acquirer in df.itertuples(index=False)]
            df = df.drop('acquirer', axis=1).to_dict(orient="records")
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return df, []     
        
    def get_prevision_vs_current(self, start_date: datetime, end_date: datetime):
        df = []
        try:
            interval = (end_date - start_date)
            queue_files = self.session.query(func.count(QueueDetail.detailId).label("baixado"),
                                             QueueProcesses.acquirer.label("name"))\
                                      .join(QueueProcesses, QueueDetail.processId==QueueProcesses.process_id)\
                                      .group_by(QueueProcesses.acquirer, func.to_char(QueueDetail.createdAt, 'YYYY-MM-DD'))\
                                      .filter(between(QueueDetail.createdAt, start_date, end_date))\
                                      .filter(QueueDetail.statusCode == '200')
            df = pd.read_sql(queue_files.statement, self.session.bind)
            for acquirer in ["AME", "STONE", "PAYPAL", "ADYEN"]:
                df = df.append({"name": acquirer, "baixado": 0}, ignore_index=True)
            df = df.groupby('name').sum().reset_index()
            df['prev'] = [self.get_average((interval.days+1) or 1, acquirer) for acquirer, baixado in df.itertuples(index=False)]
            # print(df)
            # df['baixado'] = df['baixado'] *1000
        except Exception as error:
            print(str(error))
        finally:
            self.session.close()
            return df.to_dict(orient="records"), []

    def get_average(self, interval: int, acquirer: str):
        # nested querys
        sums = self.session.query(func.count(func.distinct(QueueDetail.detailId)).label("baixado"))\
                                      .join(QueueProcesses, QueueDetail.processId==QueueProcesses.process_id)\
                                      .group_by(func.to_char(QueueDetail.createdAt, 'YYYY-MM-DD'))\
                                      .filter(QueueDetail.statusCode == '200', QueueProcesses.acquirer==acquirer)\
                                      .subquery()
        average = self.session.query(func.avg(sums.c.baixado)).scalar() or 0
        return int(average * interval)

    def load_merchants(self, acquirer: str) -> list:
        df = []
        try:
            region = 'us-east-1'
            keys ={
                "AME": {
                    "region": "sa-east-1"
                }
            }
            if acquirer in keys:
                region = keys[acquirer]["region"]
            
            s3 = S3(p_bucket=f"concil-{environ.get('STAGE', 'dev')}-blackbox-{acquirer.lower()}", region_name=region)
            merchants = s3.get_object(p_key=f"establishment/CFR_ESTABELECIMENTOS_{acquirer.upper()}.csv")
            df = pd.read_csv(StringIO(merchants), sep=";")
            df = df.query("FILIAL_CODIGO != 'FILIAL_GERAL'", engine="python")
            df['ESTABELECIMENTO'] = df['ESTABELECIMENTO'].map(str)
            self.merchants = df
            # self.merchants.to_csv('test_establishments.csv', sep=',')
            df.rename(columns={"ESTABELECIMENTO": "merchantCode", "PFJ_CODIGO": "clientCode"}, inplace=True)
        except Exception as error:
            print(str(error))
        finally:
            return df[["merchantCode", "clientCode"]].to_dict(orient="records"), []

    def get_available_files(self, acquirer: str, start_date: datetime, end_date: datetime, merchant_code: str):
        df = []
        acquirers = {
            "ADYEN": Adyen,
            "STONE": Stone
        }
        self.load_merchants(acquirer)
        # print(self.merchants)
        if acquirer in ["ADYEN"]:
            merchant = self.merchants.query(f"merchantCode=='{merchant_code}' and FILIAL_CODIGO != 'FILIAL_GERAL' and USERNAME.str.len() and PASSWORD.str.len()", engine="python")
        else:
            merchant = self.merchants.query(f"merchantCode=='{merchant_code}'", engine="python")
            # print(merchant)
        df = acquirers[acquirer]().get_available_files(
                                        start_date=start_date, 
                                        end_date=end_date,
                                        merchant_code=merchant_code, 
                                        df_merchants=merchant
                                        )
        # print(results)
        return df.to_dict(orient="records"), []

    def reprocess(self, acquirer: str, files):
        reprocess = []
        message = "Arquivos processados com sucesso process id {queu_id}"
        try:
            acquirers = {
                "ADYEN": Adyen
            }
            queue = acquirers[acquirer]().reprocess(files)
            self.session.add(queue)
            self.session.commit()
            message = message.format(queu_id=queue.process_id)
        except Exception as error:
            message = str(error)
        finally:
            self.session.close()
            return message, []
    
    """Get reports availables values columns based on query filters"""
    def get_names(self, column: str, start_date: datetime, end_date: datetime, filters: list) -> List:
        #  start_time = time()
        mapped_values = {
            "installment_amount": lambda x: float(x) if x else 0,
            "createdAt": lambda x: x.isoformat() if x else None,
            "endAt": lambda x: x.isoformat() if x else None
        }
        response = {
            "meta": {
                # "limit": data['limit'],
                # "offset": data['offset'],
                "column": column,
                "start_date": start_date.isoformat(),
                "end_date": end_date.isoformat(),
                # "has_next": False,
                "filters": filters,
                "column": column
            },
            "data": []
        }
        status = 200
        try:
            # Only for conciliation_id status PENDENTE/CONCILIADO
            queue_files = self.session.query(func.distinct(getattr(QueueDetail if column != 'acquirer' else QueueProcesses, column)))\
                                      .select_from(QueueDetail)\
                                      .join(QueueProcesses, QueueDetail.processId==QueueProcesses.process_id)\
                                      .filter(between(QueueDetail.createdAt, start_date, end_date))
            if filters:
                queue_files = self.make_search_query(filters, queue_files)
            
            queue_files = queue_files.all()
            results = [mapped_values[column](x[0]) if column in mapped_values else x[0] for x in queue_files]
            response["data"] =  [{"value": value} for value in results]
        except Exception as error:
            print(error)
            status = 500
        finally:
            self.session.close()
            return response["meta"], response["data"], status
コード例 #14
0
def update_stonks():
    db = SessionLocal()
    stonks = crud.get_stonks(db)
    for stonk in stonks:
        update_stonk.delay(stonk.ticker)
コード例 #15
0
from sqlalchemy import Column, Integer, String, ForeignKey
from db import Base, SessionLocal, engine


class User(Base):
    __tablename__ = "users"
    id = Column('id', Integer, primary_key=True)
    name = Column('name', String, unique=True)


Base.metadata.create_all(bind=engine)

session = SessionLocal()
user = User()
# user.id = 1
# user.name = "shemin"
users = session.query(User).all()

for user in users:
    print(user.name)
# session.add(user)
# session.commit()
# session.close()
コード例 #16
0
 def __init__(self, acquirer: str):
     self.acquirer = acquirer
     self.session = SessionLocal()
     self.merchants = []
コード例 #17
0
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import asyncio
import time
import sys
from fastapi.encoders import jsonable_encoder

from db import SessionLocal
from deps import get_db
from databases import Database

from models import Process
from settings import settings
db = SessionLocal()


async def sleep(val):
    await asyncio.sleep(val)


def logger(func):
    def wrapper(instance, db):
        start = time.time()
        result = func(instance, db)
        end = time.time()
        print(
            f'{func.__name__} is completed in time: {end - start:.2f} sec for id {instance.id}\n'
        )
        return result

    return wrapper
コード例 #18
0
 async def db_session_middleware(request: Request, call_next):
     request.state.db = SessionLocal()
     response = await call_next(request)
     request.state.db.close()
     return response