Пример #1
0
async def create_user(request: SignupRequest):
    bearer_token: str = uuid4().__str__()
    hashed_password = hash_password(request.password)
    async with database.connection():
        result = await database.execute(query="""
                CALL create_user(:token, :email, :password, :first_name, :last_name)
            """,
                                        values={
                                            "token": bearer_token,
                                            "email": request.email,
                                            "password": hashed_password,
                                            "first_name": request.first_name,
                                            "last_name": request.last_name
                                        })
    if result is None:
        raise "User already exists"

    async with database.connection():
        token = await database.fetch_one(query="""
                SELECT * FROM token WHERE bearer=:token
            """,
                                         values={"token": bearer_token})
    if token is None:
        raise "Token not found"
    return token
Пример #2
0
    def load_pool_nav(self, gids, sdate, edate):
        db = database.connection('asset')
        t = self.tabs.setdefault(1, Table('ra_pool_nav', MetaData(bind=db), autoload=True))
        
        for asset_id in gids:
            #
            # 基金池资产
            #
            asset_id %= 10000000
            (pool_id, category) = (asset_id / 100, asset_id % 100)
            ttype = pool_id / 10000

            columns = [
                t.c.ra_date,
                t.c.ra_nav,
            ]

            s = select(columns) \
                .where(t.c.ra_pool == id_) \
                .where(t.c.ra_category == category) \
                .where(t.c.ra_type == xtype)

            if sdate is not None:
                s = s.where(t.c.ra_date >= sdate)
            if edate is not None:
                s = s.where(t.c.ra_date <= edate)

            df = pd.read_sql(s, db, index_col = ['ra_date'], parse_dates=['ra_date'])

            data[asset_id] = df['ra_nav']

        return pd.DataFrame(data)
Пример #3
0
def corr_update(corr, codes):
    ra_index = base_ra_index.find(corr['ra_index_id'])
    if ra_index is None:
        click.echo(click.style(
            "unknown index [%s]for calc corr!" % (corr['ra_index_id']), fg="yellow"))
        return False

    yesterday = (datetime.now() - timedelta(days=1)); 
    enddate = yesterday.strftime("%Y-%m-%d")        
    
    #
    # 加载指数数据
    #
    index_code = ra_index['ra_code']
    if corr['ra_date_type'] == 1:
        df_nav_index = DBData.db_index_value_daily('2015-10-08', enddate, codes=[index_code])
    else:
        df_nav_index = DBData.db_index_value('2015-10-08', enddate, codes=[index_code])
    df_inc_index = df_nav_index.pct_change().fillna(0.0)

    #
    # 加载基金列表
    #
    df_fund = base_ra_fund.load(codes=codes)
    
    data = []
    with click.progressbar(length=len(df_fund.index), label=('update corr for corr %d' % (corr['globalid'])).ljust(30)) as bar:
        for _,fund in df_fund.iterrows():
            bar.update(1)
            tmp = corr_update_fund(corr, fund, df_inc_index)
            if tmp is not None:
                data.append([
                    corr['globalid'],
                    fund['globalid'],
                    fund['ra_code'],
                    "%.4f" % (tmp),
                ])

    df_new = pd.DataFrame(data, columns=['ra_corr_id', 'ra_fund_id', 'ra_fund_code', 'ra_corr'])
    df_new = df_new.set_index(['ra_corr_id', 'ra_fund_id'])
    
    db = database.connection('base')
    # 加载旧数据
    t2 = Table('ra_corr_fund', MetaData(bind=db), autoload=True)
    columns2 = [
        t2.c.ra_corr_id,
        t2.c.ra_fund_id,
        t2.c.ra_fund_code,
        t2.c.ra_corr,
    ]
    stmt_select = select(columns2, (t2.c.ra_corr_id == corr['globalid']))
    if codes is not None:
        stmt_select = stmt_select.where(t2.c.ra_fund_code.in_(codes))
        
    df_old = pd.read_sql(stmt_select, db, index_col=['ra_corr_id', 'ra_fund_id'])
    if not df_old.empty:
        df_old['ra_corr'] = df_old['ra_corr'].map("{:.4f}".format)

    # 更新数据库
    database.batch(db, t2, df_new, df_old, timestamp=True)
def index_value(start_date, end_date, ra_index_id):
    db = database.connection('base')
    t1 = Table('ra_index_nav', MetaData(bind=db), autoload=True)
    t2 = Table('trade_dates', MetaData(bind=db), autoload=True)

    columns = [t1.c.ra_date.label('date'), t1.c.ra_index_id, t1.c.ra_nav]

    s2 = select([t2.c.td_date]) \
            .where(between(t2.c.td_date, start_date, end_date)) \
            .where(t2.c.td_type.op('&')(0x02) | (t2.c.td_date == end_date)) \
            .alias('E')

    s = select(columns) \
            .select_from(s2) \
            .where(t1.c.ra_date == s2.c.td_date) \
            .where(t1.c.ra_index_id == ra_index_id) \
            .order_by(t1.c.ra_date)
    # s = select(columns).select_from(t1.join(s2, t1.c.ra_date == s2.c.td_date)).where(t1.c.ra_index_id == ra_index_id).order_by(t1.c.ra_date)
    logger.debug("index_value:" + str(s))

    df = pd.read_sql(s,
                     db,
                     index_col=['date', 'ra_index_id'],
                     parse_dates=['date'])
    df = df.unstack().fillna(method='pad')
    df.columns = df.columns.droplevel(0)

    return df
Пример #5
0
def ts_order_stream(ctx):

    engine = database.connection('trade')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql = session.query(trade.ts_order.ts_uid, trade.ts_order.ts_txn_id, trade.ts_order.ts_portfolio_id ,trade.ts_order.ts_trade_type, trade.ts_order.ts_trade_status, trade.ts_order.ts_trade_date, trade.ts_order.ts_risk, trade.ts_order.ts_placed_percent, trade.ts_order.ts_placed_amount, trade.ts_order.ts_placed_date, trade.ts_order.ts_placed_time, trade.ts_order.ts_acked_date, trade.ts_order.ts_acked_amount, trade.ts_order.ts_acked_fee).filter(trade.ts_order.ts_trade_type.in_([3,4,5,6])).statement
    ts_order_df = pd.read_sql(sql, session.bind)
    session.commit()
    session.close()

    from pymongo import MongoClient
    conn = MongoClient('127.0.0.1', 27017)
    db = conn.user_analysis
    ts_order_collection = db.ts_order


    for record in ts_order_df.to_dict('records'):
        ts_placed_date = record['ts_placed_date']
        ts_placed_time = record['ts_placed_time']
        if ts_placed_date is None:
            continue
        ts_placed_datetime = datetime(ts_placed_date.year, ts_placed_date.month, ts_placed_date.day) + ts_placed_time
        record['ts_placed_datetime'] = ts_placed_datetime
        print(record)
        ts_order_collection.insert(json.dumps(record))
Пример #6
0
async def create_artist(current_user: Profile, artist: Artist):
    async with database.connection():
        response = await database.execute(query="""
                INSERT INTO artist (
                    is_manager, 
                    max_bookings, 
                    minimum_price, 
                    profile_id
                ) VALUES (
                    :is_manager, 
                    :max_bookings, 
                    :minimum_price, 
                    :profile_id
                )
            """,
                                          values={
                                              "profile_id":
                                              current_user.profile_id,
                                              "is_manager": artist.is_manager,
                                              "max_bookings":
                                              artist.max_bookings,
                                              "minimum_price":
                                              artist.minimum_price
                                          })
    assert response is not None
    return response
Пример #7
0
def load_pools(pools, pool_type=None):
    db = database.connection('asset')
    metadata = MetaData(bind=db)
    ra_pool = Table('ra_pool', metadata, autoload=True)

    columns = [
        ra_pool.c.id,
        ra_pool.c.ra_type,
        ra_pool.c.ra_algo,
        ra_pool.c.ra_date_type,
        ra_pool.c.ra_fund_type,
        ra_pool.c.ra_lookback,
        ra_pool.c.ra_index_id,
        ra_pool.c.ra_name,
    ]

    s = select(columns).where(ra_pool.c.ra_type != -1)
    if pools is not None:
        s = s.where(ra_pool.c.id.in_(pools))
    if pool_type is not None:
        if hasattr(pool_type, "__iter__") and not isinstance(pool_type, str):
            s = s.where(ra_pool.c.ra_fund_type.in_(pool_type))
        else:
            s = s.where(ra_pool.c.ra_fund_type == pool_type)

    df_pool = pd.read_sql(s, db)

    return df_pool
def load_ohlcav(id_, reindex=None, begin_date=None, end_date=None, mask=None):
    db = database.connection('base')
    metadata = MetaData(bind=db)
    t1 = Table('ra_index_nav', metadata, autoload=True)

    columns = [
        t1.c.ra_date,
        t1.c.ra_open,
        t1.c.ra_high,
        t1.c.ra_low,
        t1.c.ra_nav.label('ra_close'),
        t1.c.ra_amount,
        t1.c.ra_volume,
    ]

    s = select(columns).where(t1.c.ra_index_id == id_)

    if begin_date is not None:
        s = s.where(t1.c.ra_date >= begin_date)
    if end_date is not None:
        s = s.where(t1.c.ra_date <= end_date)
    if mask is not None:
        if hasattr(mask, "__iter__") and not isinstance(mask, str):
            s = s.where(t1.c.ra_mask.in_(mask))
        else:
            s = s.where(t1.c.ra_mask == mask)

    df = pd.read_sql(s, db, index_col=['ra_date'], parse_dates=['ra_date'])
    if reindex is not None:
        df = df.reindex(reindex, method='pad')

    return df
Пример #9
0
async def get_user_bookings(current_user: Profile):
    async with database.connection():
        response = await database.fetch_all(
            query="""
                SELECT 
                    artist_id, 
                    booking.booking_id, 
                    client_id, 
                    design_description, 
                    design_approved, 
                    price, 
                    price_approved, 
                    CONCAT(profile.first_name, " ", profile.last_name) as artist_name, 
                    (
                        SELECT date_time 
                        FROM timeslot
                        WHERE timeslot.booking_id = booking.booking_id AND timeslot.selected = True
                        LIMIT 1
                    ) as selected_date
                FROM booking 
                JOIN profile 
                ON booking.artist_id = profile.profile_id
                WHERE client_id=:profile_id
            """,
            values={"profile_id": current_user.profile_id})
    return response
def load_re_price_yoy():

    engine = database.connection('wind')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql = session.query(
        mc_real_estate.mc_re_date,
        mc_real_estate.mc_re_value,
    ).filter(mc_real_estate.globalid == 'MC.RE0001').statement

    repy = pd.read_sql(
        sql,
        session.bind,
        index_col=['mc_re_date'],
        parse_dates=['mc_re_date'],
    )
    session.commit()
    session.close()
    repy = repy.sort_index()

    dates = repy.index
    redates = []
    for day in dates:
        redates.append(day + timedelta(18))

# today = datetime.today()
# if redates[-1] > today:
#     redates[-1] = today
    repy.index = redates

    repy.columns = ['repy']

    return repy
Пример #11
0
def load_gold_indicator():
    #function:读取数据
    feature_names = {
        'MC.GD0013': 'LD_sg',
        'MC.GD0015': 'UScpi',
        'MC.GD0017': 'USndi',
        'MC.GD0018': 'USnrty',
        'MC.GD0027': 'comex_pos_fundlong',
        'MC.GD0028': 'comex_pos_fundshort',
    }
    engine = database.connection('wind')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql = session.query(
        mc_gold_indicator.globalid,
        mc_gold_indicator.mc_gold_date,
        mc_gold_indicator.mc_gold_value,
    ).filter(mc_gold_indicator.globalid.in_(feature_names.keys())).statement
    gdi = pd.read_sql(
        sql,
        session.bind,
        index_col=['mc_gold_date', 'globalid'],
        parse_dates=['mc_gold_date'],
    )
    session.commit()
    session.close()
    gdi = gdi.unstack()
    gdi.columns = gdi.columns.levels[1]
    gdi = gdi.rename(columns=feature_names)
    return gdi
def load_ngdp_yoy():

    engine = database.connection('caihui')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql = session.query(t_macro_qgdp.nyear, t_macro_qgdp.nmonth,
                        t_macro_qgdp.value).statement
    ngdp = pd.read_sql(sql, session.bind)

    session.commit()
    session.close()

    dates = []
    for y, m in zip(ngdp.nyear.values, ngdp.nmonth.values):
        d = monthrange(y, m)[1]
        date = datetime(y, m, d)
        dates.append(date)

    ngdp.index = dates
    ngdp = ngdp.sort_index()
    ngdp = ngdp.resample('m').last().fillna(method='pad')
    ngdp['ngdp_yoy'] = ngdp.value.pct_change(12)
    ngdp = ngdp.dropna()

    dates = ngdp.index
    redates = []
    for day in dates:
        redates.append(dates + timedelta(15))
    ngdp.index = redates
    #ngdp.to_csv('data/ngdp.csv', index_label = 'date')

    return ngdp
def load_social_finance():

    engine = database.connection('wind')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql = session.query(
        mc_social_finance.mc_sf_date,
        mc_social_finance.mc_sf_value,
    ).filter(mc_social_finance.globalid == 'MC.SF0001').statement

    sf = pd.read_sql(sql,
                     session.bind,
                     index_col=['mc_sf_date'],
                     parse_dates=['mc_sf_date'])

    session.commit()
    session.close()
    sf = sf.sort_index()

    dates = sf.index
    redates = []
    for day in dates:
        redates.append(day + timedelta(15))

# today = datetime.today()
# if redates[-1] > today:
#     redates[-1] = today
    sf.index = redates

    sf.columns = ['sf']

    return sf
Пример #14
0
def gold_view_update(ctx, startdate, enddate, viewid):
    #function:"更新结果"
    mv = cal_gold_view()
    today = datetime.now().strftime('%Y-%m-%d')
    df = {}
    df['globalid'] = np.repeat(viewid, len(mv))
    df['bl_date'] = mv.index
    df['bl_view'] = mv.view_01.values
    df['bl_index_id'] = np.repeat('120000014', len(mv))
    df['created_at'] = np.repeat(today, len(mv))
    df['updated_at'] = np.repeat(today, len(mv))
    df_new = pd.DataFrame(df).set_index(['globalid', 'bl_date'])

    db = database.connection('asset')
    metadata = MetaData(bind=db)
    t = Table('ra_bl_view', metadata, autoload=True)
    columns = [
        t.c.globalid,
        t.c.bl_date,
        t.c.bl_view,
        t.c.bl_index_id,
        t.c.created_at,
        t.c.updated_at,
    ]
    s = select(columns, (t.c.globalid == viewid))
    df_old = pd.read_sql(s,
                         db,
                         index_col=['globalid', 'bl_date'],
                         parse_dates=['bl_date'])
    df_new = df_new[df_old.columns]  #保持跟df_old列名同步
    database.batch(db, t, df_new, df_old, timestamp=False)
    print('########### id=BL.000009 #######保存到表格:asset/ra_bl_view  ####')
    print('########### gold view date:', mv.index[-1].strftime('%Y-%m-%d'),
          '#####value:', mv.view_01.values[-1])
def load_m2_yoy():

    engine = database.connection('caihui')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql = session.query(t_macro_msupply.nyear, t_macro_msupply.nmonth,
                        t_macro_msupply.growthrate_m2).statement
    m2_yoy = pd.read_sql(sql, session.bind)

    session.commit()
    session.close()

    dates = []
    for y, m in zip(m2_yoy.nyear.values, m2_yoy.nmonth.values):
        d = monthrange(y, m)[1]
        date = datetime(y, m, d)
        dates.append(date)

    m2_yoy.index = dates
    m2_yoy = m2_yoy.sort_index()

    dates = m2_yoy.index
    redates = []
    for day in dates:
        redates.append(day + timedelta(15))

# today = datetime.today()
# if redates[-1] > today:
#     redates[-1] = today
    m2_yoy.index = redates

    #    m2_yoy.to_csv('data/m2.csv', index_label = 'date')

    return m2_yoy
def ra_pool_fund_check(targetDate, weeks_lookback=1000):
    '''
    # 检查基金池:基金不能在不同基金池出现 <ra_portfolio_asset,ra_pool_fund表里,PO.000007记录基金池>
    '''
    # 获取ra_pool_fund表格的数据
    db = database.connection('asset')
    metadata = MetaData(bind=db)
    t1 = Table('ra_pool_fund', metadata, autoload=True)
    columns = [
        t1.c.ra_date,
        t1.c.ra_pool,
        t1.c.ra_fund_id,
    ]
    s = select(columns)
    startDate = (targetDate +
                 timedelta(weeks=-1 * weeks_lookback)).strftime('%Y-%m-%d')
    s = s.where(t1.c.ra_date >= startDate)
    s = s.where(t1.c.ra_date <= targetDate)
    df = pd.read_sql(s, db, parse_dates=['ra_date']).set_index('ra_date')
    df = df.loc[max(df.index)].reset_index()
    if (df.groupby('ra_fund_id')['ra_pool'].count() == 1).all():
        print('基金池是否交叉:\033[1;35mPass inspection\033[0m 更新日期:{}'.format(
            max(df['ra_date'])))
    else:
        print('基金池是否交叉:\033[1;43mFail inspection!!!\033[0m 更新日期:{}'.format(
            max(df['ra_date'])))
    pass
    def _load_fund_pool(self, date):

        engine = database.connection('caihui')
        metadata = MetaData(bind=engine)
        t = Table('tq_fd_basicinfo', metadata, autoload=True)

        columns = [
            t.c.SECODE.label('fund_id'),
            t.c.FSYMBOL.label('fund_code')
        ]

        s = select(columns).where(and_(
            t.c.FDSTYLE.in_([2, 3, 4]),
            # t.c.ENABLED==3,
            t.c.FOUNDDATE!='19000101',
            t.c.FOUNDDATE<=(date+relativedelta(years=-1)).strftime('%Y%m%d'),
            t.c.ENDDATE=='19000101',
            t.c.FDNATURE.in_(['证券投资基金', 'LOF']),
            t.c.FDMETHOD=='开放式基金',
            t.c.TOTSHARE>0.1,
            not_(t.c.FDNAME.contains('联接')),
            not_(t.c.FDNAME.contains('沪港深')),
            not_(t.c.FDNAME.contains('港股通'))
        ))

        df = pd.read_sql(s, engine, index_col=['fund_id'])

        return df
Пример #18
0
    def stock_st():
        if StockAsset.__all_st_stocks is None:
            all_stocks = StockAsset.all_stock_info()
            all_stocks = all_stocks.reset_index()
            all_stocks = all_stocks.set_index(['sk_secode'])

            engine = database.connection('caihui')
            Session = sessionmaker(bind=engine)
            session = Session()
            sql = session.query(
                db.asset_stock.tq_sk_specialtrade.secode,
                db.asset_stock.tq_sk_specialtrade.selecteddate,
                db.asset_stock.tq_sk_specialtrade.outdate).filter(
                    db.asset_stock.tq_sk_specialtrade.selectedtype <= 3
                ).filter(
                    db.asset_stock.tq_sk_specialtrade.secode.in_(
                        set(all_stocks.index))).statement
            st_stocks = pd.read_sql(sql,
                                    session.bind,
                                    index_col=['secode'],
                                    parse_dates=['selecteddate', 'outdate'])
            session.commit()
            session.close()

            StockAsset.__all_st_stocks = pd.merge(st_stocks,
                                                  all_stocks,
                                                  left_index=True,
                                                  right_index=True)

        return StockAsset.__all_st_stocks
Пример #19
0
async def create_booking(request: CreateBookingRequest, current_user: Profile):
    async with database.connection():
        response = await database.execute(query="""
                    INSERT INTO booking (
                        artist_id, 
                        client_id,
                        design_description, 
                        design_approved, 
                        price, 
                        price_approved
                    ) 
                    VALUES (
                        :artist_id, 
                        :client_id, 
                        :design_description, 
                        FALSE,
                        0,
                        FALSE
                    );
            """,
                                          values={
                                              "artist_id":
                                              request.artist_id,
                                              "client_id":
                                              current_user.profile_id,
                                              "design_description":
                                              request.design_description,
                                          })
        if response is None:
            return None
        booking = await database.fetch_one(
            query=
            "SELECT * FROM booking WHERE booking_id=(SELECT LAST_INSERT_ID())")
    return booking
Пример #20
0
    def stock_factor_filter(factor_df):

        if StockFactor.__valid_stock_filter is None:

            engine = database.connection('asset')
            Session = sessionmaker(bind=engine)
            session = Session()
            sql = session.query(valid_stock_factor.trade_date,
                                valid_stock_factor.stock_id,
                                valid_stock_factor.valid).statement
            valid_df = pd.read_sql(sql,
                                   session.bind,
                                   index_col=['trade_date', 'stock_id'],
                                   parse_dates=['trade_date'])
            valid_df = valid_df.unstack()
            valid_df.columns = valid_df.columns.droplevel(0)
            StockFactor.__valid_stock_filter = valid_df
            session.commit()
            session.close()

        if len(factor_df) == 0:
            return factor_df
        else:
            valid_df = StockFactor.__valid_stock_filter.copy()
            valid_df = valid_df.reindex(factor_df.index)
            valid_df = valid_df.reindex_axis(factor_df.columns, axis=1)
            factor_df[~(valid_df == 1)] = np.nan

        logger.info('vailid filter done')

        return factor_df
Пример #21
0
def jp_huobi_update(ctx):
    engine = database.connection('portfolio_sta')
    Session = sessionmaker(bind=engine)
    session = Session()
    sql_jijin = "SELECT jh_date, jh_week_annualized_return, concat(jh_paiming, '/', jh_all) AS jh_total_rank FROM jp_huobi WHERE jh_code = 'mf0001';"
    sql_qieman = "SELECT jh_date, jh_week_annualized_return, concat(jh_paiming, '/', jh_all) AS jh_total_rank FROM jp_huobi WHERE jh_code = 'jp0001';"

    df_jijin = pd.read_sql(sql=sql_jijin,
                           con=session.bind,
                           parse_dates=['jh_date'],
                           index_col=['jh_date'])
    df_qieman = pd.read_sql(sql=sql_qieman,
                            con=session.bind,
                            parse_dates=['jh_date'],
                            index_col=['jh_date'])
    session.commit()
    session.close()
    dfCore = pd.merge(left=df_jijin,
                      right=df_qieman,
                      on='jh_date',
                      how='inner')

    if len(dfCore) < 1:
        print("无数据需要插入")
        return
    engine_asset_allocation = database.connection('asset')
    Session = sessionmaker(bind=engine_asset_allocation)
    session = Session()

    query = session.query(asset_allocate.jp_huobi)
    query.delete()
    session.commit()
    print("成功删除全部数据;")

    for count, index in enumerate(dfCore.index):
        ins = asset_allocate.jp_huobi(
            jh_date=index,
            jh_week_annualized_return_mf=dfCore.loc[index, dfCore.columns[0]],
            jh_total_rank_mf=dfCore.loc[index, dfCore.columns[1]],
            jh_week_annualized_return_qieman=dfCore.loc[index,
                                                        dfCore.columns[2]],
            jh_total_rank_qieman=dfCore.loc[index, dfCore.columns[3]])
        session.add(ins)
        session.commit()
        print("成功插入 %d 条数据;" % (count + 1))

    return
Пример #22
0
def macro_view_update(ctx, startdate, enddate, viewid, idx):

    backtest_interval = pd.date_range(startdate, enddate)
    rev = re_view(backtest_interval)
    irv = ir_view(backtest_interval)
    epsv = eps_view(backtest_interval, idx)
    #print rev.tail()
    #print irv.tail()
    #print epsv.tail()

    mv = pd.concat([rev, irv, epsv], 1)
    mv['mv'] = mv['rev'] + mv['irv'] + mv['epsv']
    #print mv.tail()
    #mv = mv.loc[:, ['mv']]

    today = datetime.now()
    mv_view_id = np.repeat(viewid, len(mv))
    mv_date = mv.index
    mv_inc = mv.mv.values

    created_at = np.repeat(today, len(mv))
    updated_at = np.repeat(today, len(mv))
    #df_inc_value = np.column_stack([mv_view_id, mv_date, mv_inc, created_at, updated_at])
    #df_inc = pd.DataFrame(df_inc_value, columns = ['mc_view_id', 'mc_date', 'mc_inc', 'created_at', 'updated_at'])
    union_mv = {}
    union_mv['globalid'] = mv_view_id
    union_mv['bl_date'] = mv_date
    union_mv['bl_view'] = np.sign(mv_inc)
    union_mv['created_at'] = created_at
    union_mv['updated_at'] = updated_at
    union_mv_df = pd.DataFrame(
        union_mv,
        columns=['globalid', 'bl_date', 'bl_view', 'created_at', 'updated_at'])

    for index_id in ['120000001', '120000002']:

        df_new = union_mv_df
        df_new['bl_index_id'] = index_id
        df_new = df_new.set_index(['globalid', 'bl_date', 'bl_index_id'])

        db = database.connection('asset')
        metadata = MetaData(bind=db)
        t = Table('ra_bl_view', metadata, autoload=True)
        columns = [
            t.c.globalid,
            t.c.bl_date,
            t.c.bl_view,
            t.c.bl_index_id,
            t.c.created_at,
            t.c.updated_at,
        ]
        s = select(columns).where(t.c.globalid == viewid).where(
            t.c.bl_index_id == index_id)
        df_old = pd.read_sql(s,
                             db,
                             index_col=['globalid', 'bl_date', 'bl_index_id'],
                             parse_dates=['bl_date'])
        database.batch(db, t, df_new, df_old, timestamp=False)
        print df_new.tail()
Пример #23
0
def save(gid,df):
    #function:"更新数据"
    db = database.connection('wind')
    t = Table('mc_gold_indicator',MetaData(bind=db),autoload=True)
    columns = [literal_column(c) for c in (df.index.names + list(df.columns))]
    s = select(columns).where(t.c.globalid==gid)
    df_old = pd.read_sql(s,db,index_col=['globalid','mc_gold_date'],parse_dates=['mc_gold_date'])
    database.batch(db,t,df,df_old,timestamp=False)
async def get_user_from_id(profile_id: str):
    async with database.connection():
        result = await database.fetch_one(
            query="SELECT * FROM profile WHERE profile_id=:profile_id",
            values={'profile_id', profile_id})
    if result is None:
        raise "Profile not found"
    return Profile(**dict(result))
Пример #25
0
 def test_createTables(self):
     self.c=database.connection().cursor()
     self.c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='account'")
     name=self.c.fetchone()[0]
     self.assertEqual(name,"account")
     self.c.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='user'")
     name=self.c.fetchone()[0]
     self.assertEqual(name,"user")
Пример #26
0
async def booking_exists(booking_id: str):
    async with database.connection():
        response = await database.fetch_one(
            query="SELECT * FROM booking WHERE booking_id=:booking_id",
            values={"booking_id": booking_id})
    if response is None:
        raise HTTPException(status_code=400, detail='Booking does not exist')
    return Booking(**dict(response))
Пример #27
0
def fund_pool_info(ctx, optpool):

    db = database.connection('base')
    metadata = MetaData(bind=db)
    fund_info_t = Table('fund_infos', metadata, autoload=True)
    company_t = Table('company_infos', metadata, autoload=True)

    fund_info = [
        fund_info_t.c.fi_globalid,
        fund_info_t.c.fi_code,
        fund_info_t.c.fi_name,
        fund_info_t.c.fi_company_id,
    ]

    fund_info_df = pd.read_sql(select(fund_info), db)

    company_info = [
        company_t.c.ci_globalid,
        company_t.c.ci_name,
    ]

    company_info_df = pd.read_sql(select(company_info), db)

    fund_company_df = pd.merge(fund_info_df,
                               company_info_df,
                               left_on=['fi_company_id'],
                               right_on=['ci_globalid'])
    fund_company_df = fund_company_df[[
        'fi_globalid', 'fi_code', 'fi_name', 'ci_name'
    ]]
    fund_company_df = fund_company_df.set_index(['fi_globalid'])

    dfs = []
    for _id in optpool.split(','):

        pool_id = _id.strip()
        pool_name = asset_ra_pool.find(pool_id)['ra_name']
        pool_df = asset_ra_pool_fund.load(pool_id)
        pool_df = pool_df['ra_fund_code']
        pool_df = pool_df.reset_index()
        pool_df = pool_df.set_index('ra_fund_id')

        pool_fund_company_df = pd.concat([pool_df, fund_company_df],
                                         axis=1,
                                         join_axes=[pool_df.index])
        pool_fund_company_df = pool_fund_company_df[[
            'ra_date', 'ra_fund_code', 'fi_name', 'ci_name'
        ]]
        pool_fund_company_df['pool_name'] = pool_name
        #print pool_fund_company_df.head()

        dfs.append(pool_fund_company_df)

    df = pd.concat(dfs)
    print df.head()
    df.to_csv('fund_pool.csv', encoding='gbk')
    #pool_fund_company_df.to_csv('pool_fund_company_df.csv', encoding = 'gbk')
    '''
Пример #28
0
def all_trade_dates():

    db = database.connection('base')
    t = Table('trade_dates', MetaData(bind=db), autoload=True)
    s = select([t.c.td_date]).where(t.c.td_date >= '2002-01-04').where(
        t.c.td_type.op('&')(0x02)).order_by(t.c.td_date.asc())

    res = s.execute().fetchall()
    return [i[0].strftime('%Y-%m-%d') for i in res]
async def get_artist(profile_id: int):
    async with database.connection():
        artist = await database.fetch_one(query="""
                SELECT * FROM artist WHERE profile_id=:profile_id
            """,
                                          values={"profile_id": profile_id})
    if artist is None:
        return None
    return Artist(**dict(artist))
Пример #30
0
def fund_update_factor_pool(pool, adjust_points, optlimit, optcalc):
    ''' re calc fund for single fund pool
    '''
    lookback = pool.ra_lookback
    limit = optlimit

    if optcalc:
        #
        # 计算每个调仓点的最新配置
        #
        # ffr = asset_fund_factor.load_fund_factor_return()
        ffr = asset_stock_factor.load_stock_factor_return()
        ffr = ffr.unstack().T
        ffr.index = ffr.index.levels[1]
        ffe = asset_fund_factor.load_fund_factor_exposure()
        # ffe.to_csv('data/factor/fund_factor_exposure.csv', index_label = ['fund_id', 'ff_id', 'trade_date'])
        # ffe = pd.read_csv('data/factor/fund_factor_exposure.csv', index_col = ['fund_id', 'ff_id', 'trade_date'], parse_dates = ['trade_date'])
        ffe = ffe.reset_index()
        data = []
        pre_codes = None

        with click.progressbar(length=len(adjust_points), label='calc pool %s' % (pool.id)) as bar:
            for day in adjust_points:
                bar.update(1)
                # codes, codes_pool = pool_by_factor(pool, day, lookback, limit, ffr, ffe)
                codes, codes_pool = pool_by_single_factor(pool, day, lookback, limit, ffr, ffe)
                if pre_codes is None:
                    pre_codes = codes
                else:
                    pre_codes_origin = pre_codes.copy()
                    pre_codes = np.intersect1d(pre_codes, codes_pool)
                    new_codes = np.setdiff1d(codes, pre_codes)
                    fund_lack_num = limit - len(pre_codes)
                    codes = np.append(pre_codes, new_codes[:fund_lack_num])
                    pre_codes = codes
                    print('turnover rate:', 1 - len(np.intersect1d(codes, pre_codes_origin))/float(limit))

                print(day, len(codes), codes)
                if codes is None or len(codes) == 0:
                    continue
                ra_fund = base_ra_fund.load(codes = codes)
                ra_fund = ra_fund.set_index(['ra_code'])
                ra_pool = pool['id']
                for code in ra_fund.index:
                    ra_fund_id = ra_fund.loc[code, 'globalid']
                    data.append([ra_pool, day, ra_fund_id, code])
        fund_df = pd.DataFrame(data, columns = ['ra_pool', 'ra_date', 'ra_fund_id', 'ra_fund_code'])
        fund_df = fund_df.set_index(['ra_pool', 'ra_date', 'ra_fund_id'])

        df_new = fund_df
        columns = [literal_column(c) for c in (df_new.index.names + list(df_new.columns))]
        s = select(columns)
        db = database.connection('asset')
        ra_pool_fund_t = Table('ra_pool_fund', MetaData(bind=db), autoload=True)
        s = s.where(ra_pool_fund_t.c.ra_pool.in_(df_new.index.get_level_values(0).tolist()))
        df_old = pd.read_sql(s, db, index_col = df_new.index.names)
        database.batch(db, ra_pool_fund_t, df_new, df_old)