up = stock_list[:100]
lower = stock_list[200:]
random.seed(50)
sup = random.sample(up, 20)
slow = random.sample(lower, 10)

r = sup + slow
print(r)

test_list = ['2888']

for name in test_list:
    #for name in r:
    s = query_part(name=name,
                   feature='close',
                   adjust='all',
                   collection_name='HKEX_ADJ',
                   db_name='MarketDataUpdate')
    # s['strdate'] = pd.to_datetime(s['date'], unit='s')
    s['strdate'] = pd.to_datetime(s['date'], unit='s')
    s['strdate'] = s['strdate'].apply(lambda x: x.strftime('%Y-%m-%d'))
    s['rebase_mongodb'] = s['value'] / s['value'][0]

    yahoo = pd.read_csv(
        '/home/pingan/PycharmProjects/test/calculate_AF/yahoo/' +
        name.zfill(4) + '.HK.csv')
    # yahoo = yahoo.drop(yahoo.index[0])
    yahoo = yahoo[yahoo['Date'] >= '2018-07-09']
    yahoo['rebase_yahoo'] = yahoo['Adj Close'] / yahoo['Adj Close'].tolist()[0]
    # print(type(s['strdate'].tolist()[0]))
    # print(type(yahoo['Date'].tolist()[0]))
コード例 #2
0
path = '/home/pingan/PycharmProjects/test/calculate_AF/adj_price_without_adjustment_factor'
without = getfilename(path, '.csv')
print(without)

test_list = ['1451']
equal_count = []
not_equal_ticker = []

#notequal_df = pd.DataFrame()

#for name in test_list:
for name in without:
    check_column = []
    adj = query_part(name=name,
                     feature={'$in': ['close', 'high', 'open', 'low']},
                     adjust='all',
                     collection_name='HKEX_ALL',
                     db_name='MarketData')
    noa = query_part(name=name,
                     feature={'$in': ['close', 'high', 'open', 'low']},
                     adjust='no',
                     collection_name='HKEX_ALL',
                     db_name='MarketData')
    if adj.empty or noa.empty:
        continue

    else:
        new = pd.merge(noa,
                       adj,
                       on=['date', 'feature', 'name', 'category'],
                       how='outer')
import pymongo
import sys
sys.path.append('/home/pingan/PycharmProjects/test/DataUpdate/tools')
from mongodb import query_all, query_part, tomongo

import pandas as pd
import os
import matplotlib.pyplot as plt
import pylab
import xlrd

old_factor_file = query_part(feature='price_af_star',
                             collection_name='HKEX_AF_Daily',
                             db_name='MarketDataUpdate')

adjustment_list = old_factor_file['name'].values.tolist()

# for name in ticker_list:
#    new_factor = query_part(name = name, feature = 'price_af', collection_name = 'HKEX_AF_Daily', db_name = 'MarketDataUpdate')
#    new_factor.to_csv('/home/pingan/PycharmProjects/test/calculate_AF/new_daily_factor/' + name + '.csv')
#    no_adjust = query_part(name = name, feature = {'$in':['close', 'open', 'high', 'low']}, collection_name = 'HKEX_ALL_NOADJ', db_name = 'MarketDataUpdate')
#    no_adjust.to_csv('/home/pingan/PycharmProjects/test/calculate_AF/no_adjust_price/' + name + '.csv')

# calculate ticker who has adjustmentfactor

adjustment_list = [x for x in adjustment_list]
print(adjustment_list)
test_list = ['1136']

for name in adjustment_list:
    #for name in test_list:
import pymongo

import sys
sys.path.append('/home/pingan/PycharmProjects/test/DataUpdate/tools')
from mongodb import query_all, query_part

list = [
    '8096', '4335', '4336', '6860', '667', '8017', '2168', '3868', '1832',
    '1775', '1743', '1025'
]

for i in range(len(list)):
    data = query_part(name=list[i],
                      feature='close',
                      adjust='no',
                      collection_name='HKEX_ALL',
                      db_name='MarketData')
    data['date'] = pd.to_datetime(data['date'], unit='s')
    data.to_csv(os.path.join(
        '/home/pingan/PycharmProjects/test/hkex_noadj/abnormal_start_list/' +
        list[i] + '.csv'),
                index=False)

#667 HK Equity  2006-10-05-----------2011-12-08
#1025  2003-11-20----------2016-01-06
#1743    2007-07-06------------2008-06-16
#1775     2008-04-02---------2008-11-10
#1832    2007-07-13------------2010-05-24
#2168    this ticker is belongs to another company but it died and kasai own this ticker now, so the data start from 2018-12-05
#3868    2007-10-02-----------2017-11-29