Esempio n. 1
0
def crawler_composite(table: str) -> Generator:
    def craw(date: str) -> pd.DataFrame:
        d = get_dict(date)
        if 'stat' in d and d['stat'] == '很抱歉,沒有符合條件的資料!':
            raise crawler.NoData('很抱歉,沒有符合條件的資料!')
        data = d['data3']
        fields = d['fields3']
        date = d['date'][0:4] + '-' + d['date'][4:6] + '-' + d['date'][6:]
        df = pd.DataFrame(data, columns=fields).replace(',', '',
                                                        regex=True).replace(
                                                            '--', np.nan)
        df.insert(0, '年月日', date)
        df['年月日'] = pd.to_datetime(df['年月日']).astype(str)
        floatColumns = ['成交金額(元)', '成交股數(股)', '成交筆數']
        df = ast.to_float(floatColumns, df)
        return df

    def save(df: pd.DataFrame) -> None:
        saver.lite(table, df)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    firstday = dt.datetime(2004, 2, 11)
    days_db = days_lite(table)
    nPeriods = lastdate + crawler.dt_to_str(
        adjust.days_trade(firstday) - days_db)

    # lastdate = saver.last_datetime(table)
    # nPeriods = crawler.input_dates(lastdate, dt.datetime.now())
    return crawler.looper(craw_save, nPeriods)
Esempio n. 2
0
def crawler_callableBull(coll, table, firstday) -> Generator:
    def gen_url_giventype(input_date: str) -> str:
        return gen_url('0999C', input_date)

    # gen_url_giventype is local func, can not be used by global get_dict, so make sure to def get_dict locally
    def get_dict(date: str) -> dict:
        return cytoolz.compose(jsonLoadsF, get_plain_text,
                               gen_url_giventype)(date)

    def craw(date: str) -> dict:
        return get_dict(date)

    def save(d: dict) -> None:
        print(coll.insert_one(d).inserted_id)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    # firstday = dt.datetime(2004, 2, 11)
    days_db = days_lite(table)
    nPeriods = lastdate + \
        crawler.dt_to_str(adjust.days_trade(firstday) - days_db)
    dates = [
        t.replace('-', '') for t in nPeriods
        if coll.find_one({"date": t}) == None
    ]
    print('dates', dates)
    return crawler.looper(craw_save, dates)
Esempio n. 3
0
def crawler_extendedCallableBear(table: str) -> Generator:
    gen_url_giventype = partial(gen_url, '0999X')

    # gen_url_giventype is local func, can not be used by global get_dict, so make sure to def get_dict locally
    def get_dict(date: str) -> dict:
        return cytoolz.compose(jsonLoadsF, get_plain_text,
                               gen_url_giventype)(date)

    def craw(date: str) -> pd.DataFrame:
        d = get_dict(date)
        if 'stat' in d and d['stat'] == '很抱歉,沒有符合條件的資料!':
            raise crawler.NoData('很抱歉,沒有符合條件的資料!')
        data = d['data1']
        fields = d['fields1']
        date = d['date'][0:4] + '-' + d['date'][4:6] + '-' + d['date'][6:]
        df = pd.DataFrame(data, columns=fields).replace(',', '',
                                                        regex=True).replace(
                                                            '--', np.nan)
        df.insert(0, '年月日', date)
        df['年月日'] = pd.to_datetime(df['年月日']).astype(str)
        df['漲跌(+/-)'] = df['漲跌(+/-)'].replace(
            "<p style= color:red>+</p>",
            1).replace("<p style= color:green>-</p>",
                       -1).replace('X', np.nan).replace(' ', 0)
        df['牛熊證觸及限制價格'] = df['牛熊證觸及限制價格'].replace('',
                                                  0).replace('*', 1).replace(
                                                      '*', 1).fillna(np.nan)
        df['本益比'] = df['本益比'].replace('', np.nan).fillna(np.nan)
        intColumns = ['成交股數', '成交筆數', '最後揭示買量', '最後揭示賣量']
        floatColumns = [
            '成交金額', '開盤價', '最高價', '最低價', '收盤價', '漲跌(+/-)', '漲跌價差', '最後揭示買價',
            '最後揭示賣價', '本益比', '牛熊證觸及限制價格', '標的證券收盤價/指數'
        ]
        floatColumns = [col for col in floatColumns if col in list(df)]
        df[intColumns + floatColumns] = df[intColumns + floatColumns].replace(
            '', 0).fillna(np.nan)
        df = ast.to_int(intColumns, df)
        df = ast.to_float(floatColumns, df)
        return df

    def save(df: pd.DataFrame) -> None:
        saver.lite(table, df)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    firstday = dt.datetime(2014, 7, 31)
    days_db = days_lite(table)
    nPeriods = lastdate + crawler.dt_to_str(
        adjust.days_trade(firstday) - days_db)

    # lastdate = saver.last_datetime(table)
    # nPeriods = crawler.input_dates(lastdate, dt.datetime.now())
    return crawler.looper(craw_save, nPeriods)
Esempio n. 4
0
def crawler_upsAndDown(table: str) -> Generator:
    def craw(date: str) -> pd.DataFrame:
        d = get_dict(date)
        if 'stat' in d and d['stat'] == '很抱歉,沒有符合條件的資料!':
            raise crawler.NoData('很抱歉,沒有符合條件的資料!')
        data = d['data4']
        fields = d['fields4']
        date = d['date'][0:4] + '-' + d['date'][4:6] + '-' + d['date'][6:]
        data[0][1].split('(')[0]
        L = []
        l = data[0]
        L.append([i.split('(')[0] for i in l])
        L.append([i.split('(')[1].replace(')', '') for i in l])
        l = data[1]
        L.append([i.split('(')[0] for i in l])
        L.append([i.split('(')[1].replace(')', '') for i in l])
        L.append(data[2])
        L.append(data[3])
        L.append(data[4])
        df = pd.DataFrame(L, columns=fields).replace(',', '',
                                                     regex=True).replace(
                                                         '--', np.nan)
        df.insert(0, '年月日', date)
        df['年月日'] = pd.to_datetime(df['年月日']).astype(str)
        intColumns = ['整體市場', '股票']
        df = ast.to_int(intColumns, df)
        return df

    def save(df: pd.DataFrame) -> None:
        saver.lite(table, df)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    firstday = dt.datetime(2011, 8, 1)
    days_db = days_lite(table)
    nPeriods = lastdate + crawler.dt_to_str(
        adjust.days_trade(firstday) - days_db)

    # lastdate = saver.last_datetime(table)
    # nPeriods = crawler.input_dates(lastdate, dt.datetime.now())
    return crawler.looper(craw_save, nPeriods)
Esempio n. 5
0
def crawler_close(coll, table, firstday) -> Generator:
    def craw(date: str) -> dict:
        return get_dict(date)

    def save(d: dict) -> None:
        print(coll.insert_one(d).inserted_id)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    # firstday = dt.datetime(2004, 2, 11)
    days_db = days_lite(table)
    nPeriods = lastdate + \
        crawler.dt_to_str(adjust.days_trade(firstday) - days_db)
    dates = [
        t.replace('-', '') for t in nPeriods
        if coll.find_one({"date": t}) == None
    ]
    print('dates to craw:', dates)
    return crawler.looper(craw_save, dates)
Esempio n. 6
0
def crawler_marketReturn(table: str) -> Generator:
    def craw(date: str) -> pd.DataFrame:
        d = get_dict(date)
        if 'stat' in d and d['stat'] == '很抱歉,沒有符合條件的資料!':
            raise crawler.NoData('很抱歉,沒有符合條件的資料!')
        data = d['data2']
        fields = d['fields2']
        date = d['date'][0:4] + '-' + d['date'][4:6] + '-' + d['date'][6:]
        df = pd.DataFrame(data, columns=fields).replace(',', '',
                                                        regex=True).replace(
                                                            '--', np.nan)
        df['漲跌(+/-)'] = df['漲跌(+/-)'].replace(
            "<p style ='color:red'>+</p>",
            1).replace("<p style ='color:green'>-</p>",
                       -1).replace('X', 0).replace(' ', 0)
        df.insert(0, '年月日', date)
        df = df.rename(columns={'報酬指數': '指數'})
        df['年月日'] = pd.to_datetime(df['年月日']).astype(str)
        floatColumns = ['收盤指數', '漲跌(+/-)', '漲跌點數', '漲跌百分比(%)']
        df = ast.to_float(floatColumns, df)
        return df

    def save(df: pd.DataFrame) -> None:
        saver.lite(table, df)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    firstday = dt.datetime(2009, 1, 5)
    days_db = days_lite(table)
    nPeriods = lastdate + crawler.dt_to_str(
        adjust.days_trade(firstday) - days_db)

    # lastdate = saver.last_datetime(table)
    # nPeriods = crawler.input_dates(lastdate, dt.datetime.now())
    return crawler.looper(craw_save, nPeriods)
Esempio n. 7
0
def crawler_close(table: str) -> Generator:
    def craw(date: str) -> pd.DataFrame:
        d = get_dict(date)
        if 'stat' in d and d['stat'] == '很抱歉,沒有符合條件的資料!':
            raise crawler.NoData('很抱歉,沒有符合條件的資料!')
        data = d['data5']
        fields = d['fields5']
        date = d['date'][0:4] + '-' + d['date'][4:6] + '-' + d['date'][6:]
        df = pd.DataFrame(data,
                          columns=fields).replace(',', '', regex=True).replace(
                              '--', np.nan).replace('', np.nan)
        df['漲跌(+/-)'] = df['漲跌(+/-)'].replace(
            '<p style= color:red>+</p>',
            1).replace('<p style= color:green>-</p>',
                       -1).replace('X', 0).replace(' ', 0)
        df.insert(0, '年月日', date)
        df['年月日'] = pd.to_datetime(df['年月日']).astype(str)
        floatColumns = [
            '成交股數', '成交筆數', '成交金額', '開盤價', '最高價', '最低價', '收盤價', '漲跌(+/-)',
            '漲跌價差', '最後揭示買價', '最後揭示買量', '最後揭示賣價', '最後揭示賣量', '本益比'
        ]
        df = ast.to_float(floatColumns, df)
        return df

    def save(df: pd.DataFrame) -> None:
        saver.lite(table, df)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    firstday = dt.datetime(2004, 2, 11)
    days_db = days_lite(table)
    nPeriods = lastdate + crawler.dt_to_str(
        adjust.days_trade(firstday) - days_db)
    return crawler.looper(craw_save, nPeriods)
Esempio n. 8
0
def craw_hugeDeal(coll) -> Generator:
    table = '鉅額交易日成交資訊'

    def craw(date: str) -> dict:
        return get_dict(date)

    def save(d: dict) -> None:
        print(coll.insert_one(d).inserted_id)

    def craw_save(date: str) -> None:
        crawler.craw_save(save, craw, date)

    firstday = dt.datetime(2005, 4, 4)
    lastdate = crawler.dt_to_str([saver.last_datetime(table)])
    days_db = days_lite(table)
    nPeriods = lastdate + \
        crawler.dt_to_str(adjust.days_trade(firstday) - days_db)
    print('nPeriods', nPeriods)
    dates = [
        t.replace('-', '') for t in nPeriods
        if coll.find_one({"date": t}) == None
    ]
    print('dates', dates)
    return crawler.looper(craw_save, dates)
# '2005-08-22',
# '2005-08-23',
# '2005-08-24',
# '2005-08-26',
# '2005-08-30',
# '2005-09-05',
# '2005-09-07',
# '2005-09-09',
# '2005-09-12',
# '2005-09-14',
# '2005-09-15',
# '2005-09-19',
# '2005-09-20',
# '2005-09-22',
# '2005-09-23',
# '2005-09-27',
# '2005-09-29',
# '2005-09-30',
# '2005-10-05']
#
#exclude = [i.replace('-','') for i in ex]
#nPeriods = [i for i in nPeriods if i not in  exclude]

nPeriods = crawler.input_dates(lastdate, dt.datetime.now())
generatorG = crawler.looper(craw_save, nPeriods)
for _ in generatorG:
    pass
#crawler.loop(craw_save, nPeriods)

s.close()