コード例 #1
0
ファイル: MainScrapper.py プロジェクト: taltole/Stock-Mining
def main():
    """
    Given a URL of the stock market and the url's for each stock imported from TopMarketScrapper.py,
    creates a printable DataFrame and financial table for all the stocks.
    :return: DF and dict
    """
    db = Database()
    user_options = stock_parser()
    print(
        f'{user_options[ARG_SCRAP].title()} Scrapping On {user_options[ARG_TICKER]}...'
    )
    if user_options[ARG_SCRAP] == 'concise':
        scrap_sectors = SectorScrapper.SectorScrapper(URL_SECTOR)
        top_sectors = scrap_sectors.summarizer()
        dict_sectors = db.dict_sectors(top_sectors)
        print('Sectors Summary', top_sectors, sep='\n')
        db.insert_sectors_table(top_sectors)

        scrap_industries = IndustryScrapper.IndustryScrapper(URL_INDUSTRY)
        top_industries = scrap_industries.summarizer()
        print('Industry Summary', top_industries, sep='\n')
        db.insert_industry_table(top_industries, dict_sectors)

        scrap_top = TopMarketScrapper.TopMarketScrapper(URL)
        top_stocks = scrap_top.summarizer()
        db.insert_main_table(top_stocks, dict_sectors)
        print('', 'Stock Summary', top_stocks, sep='\n')

    elif user_options[0] == 'expanded':
        scrap_sectors = SectorScrapper.SectorScrapper(URL_SECTOR)
        top_sectors = scrap_sectors.summarizer()
        dict_sectors = db.dict_sectors(top_sectors)
        db.insert_sectors_table(top_sectors)

        scrap_industries = IndustryScrapper.IndustryScrapper(URL_INDUSTRY)
        top_industries = scrap_industries.summarizer()
        db.insert_industry_table(top_industries, dict_sectors)

        scrap_top = TopMarketScrapper.TopMarketScrapper(URL)
        top_stocks = scrap_top.summarizer()
        db.insert_main_table(top_stocks, dict_sectors)
        ids_list = db.read_from_db('Main')

        top_stocks = StockScrapper.main(user_options[ARG_TICKER])
        print('Stock Summary', top_stocks, sep='\n')
        db.insert_valuation_table(top_stocks, ids_list)
        db.insert_metrics_table(top_stocks, ids_list)
        db.insert_balance_sheet_table(top_stocks, ids_list)
        db.insert_price_history_table(top_stocks, ids_list)
        db.insert_dividends_table(top_stocks, ids_list)
        db.insert_margins_table(top_stocks, ids_list)
        db.insert_income_table(top_stocks, ids_list)
        api_overview = API_Scrapper.api_overview(user_options[ARG_TICKER])
        db.insert_api_table(api_overview, ids_list)

    db.close_connect_db()
コード例 #2
0
ファイル: MainScrapper2.py プロジェクト: taltole/Stock-Mining
def main():
    """
    Given a URL of the stock market and the url's for each stock imported from TopMarketScrapper.py,
    creates a printable DataFrame and financial table for all the stocks.
    :return: DF and dict
    """
    db = Database()
    user_options = stock_parser()
    print(user_options[0])
    print(user_options[1])
    if user_options[0] == 'concise':
        # # printing info to console and file
        # scrap_industries = IndustryScrapper.IndustryScrapper(URL_INDUSTRY)
        # top_industries = scrap_industries.summarizer()
        # print('Industry Summary', top_industries, sep='\n')
        # db.insert_industry_table(top_industries)
        # # printing info to console and file
        #
        scrap_sectors = SectorScrapper.SectorScrapper(URL_SECTOR)
        top_sectors = scrap_sectors.summarizer()
        print('Sectors Summary', top_sectors, sep='\n')
        db.insert_sectors_table(top_sectors)
        #
        # # Stock financial in depth info

        # getting urls for individual stock and sectors mining
        scrap_top = TopMarketScrapper.TopMarketScrapper(URL)
        stock, sectors = scrap_top.get_urls()
        # print('Links to Stocks and Sectors:', stock, sectors, sep='\n')
        # printing info to console and file
        top_stocks = scrap_top.summarizer()
        print('', 'Stock Summary', top_stocks, sep='\n')
        db.insert_main_table(top_stocks)
        print(db.read_from_db('Main'))

    elif user_options[0] == 'expanded':

        scrap_sectors = SectorScrapper.SectorScrapper(URL_SECTOR)
        top_sectors = scrap_sectors.summarizer()
        dict_sectors = db.dict_sectors(top_sectors)
        print(dict_sectors)
        db.insert_sectors_table(top_sectors)

        scrap_industries = IndustryScrapper.IndustryScrapper(URL_INDUSTRY)
        top_industries = scrap_industries.summarizer()
        db.insert_industry_table(top_industries, dict_sectors)

        scrap_top = TopMarketScrapper.TopMarketScrapper(URL)
        top_stocks = scrap_top.summarizer()
        db.insert_main_table(top_stocks, dict_sectors)
        ids_list = db.read_from_db('Main')

        top_stocks = StockScrapper.main(user_options[1])
        print('Stock Summary', top_stocks, sep='\n')
        db.insert_valuation_table(top_stocks, ids_list)
        db.insert_metrics_table(top_stocks, ids_list)
        db.insert_balance_sheet_table(top_stocks, ids_list)
        db.insert_price_history_table(top_stocks, ids_list)
        db.insert_dividends_table(top_stocks, ids_list)
        db.insert_margins_table(top_stocks, ids_list)
        db.insert_income_table(top_stocks, ids_list)
        api_overview = API_Scrapper.api_overview(user_options[1])
        db.insert_api_table(api_overview, ids_list)

    db.close_connect_db()