Example #1
0
    def test_stock_in_which_index_multiple(self):
        soup = scrap.get_soup_code_from_file("data/bo_sap-aktie.html")
        link_items = parse.get_listed_indizes(soup)
        asserted_idx_values = [
            "TecDAX",
            "DAX",
            "STOXX 50",
            "EURO STOXX 50",
            "S&P 400 MidCap",
            "EURO STOXX Technology",
            "Prime All Share",
            "LDAX",
            "LTecDAX",
            "HDAX",
            "DivDAX",
            "NYSE International 100",
            "CDAX",
            "EURO STOXX",
            "TecDAX Kursindex",
            "DAX Kursindex",
            "BX Swiss -  EMEA",
            "XDAX",
            "DAXglobal Sarasin Sustainability Germany Index EUR",
            "L&S DAX Indikation",
            "QIX Deutschland",
            "DAXglobal Sarasin Sustainability Germany",
            "Schatten-Index-TecDAX",
        ]

        self.assertEqual(asserted_idx_values, link_items)

        soup2 = scrap.get_soup_code_from_file("data/bo_3i-Aktie_overview.html")
        link_items2 = parse.get_listed_indizes(soup2)
        asserted_indizes_values2 = ["FTSE 100", "FTSE Allshare"]
        self.assertEqual(asserted_indizes_values2, link_items2)
Example #2
0
    def test_get_market_cap(self):
        soup = scrap.get_soup_code_from_file("data/bo_tesla-aktie.html")
        market_cap_value = parse.get_market_cap(soup)
        asserted_market_cap = 37530
        self.assertEqual(asserted_market_cap, market_cap_value)

        soup2 = scrap.get_soup_code_from_file("data/bo_3i-Aktie_overview.html")
        market_cap_value2 = parse.get_market_cap(soup2)
        asserted_market_cap2 = 9600
        self.assertEqual(asserted_market_cap2, market_cap_value2)

        soup3 = scrap.get_soup_code_from_file(
            "data/bo_london_stock_exchange-Aktie_overview.html")
        market_cap_value3 = parse.get_market_cap(soup3)
        asserted_market_cap3 = 15890
        self.assertEqual(asserted_market_cap3, market_cap_value3)
Example #3
0
def write_stock_last_quarterly_figures_date_to_db():
    stock_list = db.get_list(table=cst.TABLE_STOCKS, columns=cst.COLUMN_URI)
    file_list = [
        cst.PATH_STOCK_DATES + stock[:-6] + cst.HTML_EXTENSION
        for stock in stock_list
    ]
    for file in file_list:
        stock_uri = file.split("/")[-1][:-5] + "-Aktie"
        stock_targets_soup = scrap.get_soup_code_from_file(file)

        if stock_targets_soup is None:
            logger.warning(
                "Write Stock Quaterlyfigures Data to DB: Soup is None for %s" %
                str(file))
            continue
        try:
            last_figures_date = parse.get_last_quarterly_figures_date(
                stock_targets_soup)
            if last_figures_date is None:
                continue
            else:
                # db.write_single_stock_dates_data_to_db(stock_uri, last_figures_date)
                db.upsert_item(
                    table=cst.TABLE_STOCK_DATES,
                    primary_keys=[cst.COLUMN_STOCK_URI, cst.COLUMN_DATE],
                    current_date=last_figures_date,
                    quarterly="Quarterly/Yearly",
                )

        except:
            logger.exception(
                "Write Stock Quaterly Data to DB: Exception for stock: %s" %
                stock_uri)
Example #4
0
def write_stock_targets_data_to_db():
    stock_list = db.get_list(table=cst.TABLE_STOCKS, columns=cst.COLUMN_URI)
    file_list = [
        cst.PATH_STOCK_TARGETS + stock[:-6] + cst.HTML_EXTENSION
        for stock in stock_list
    ]
    for file in file_list:
        stock_uri = file.split("/")[-1][:-5] + "-Aktie"
        stock_targets_soup = scrap.get_soup_code_from_file(file)

        if stock_targets_soup is None:
            logger.warning(
                "Write Stock Targets Data to DB: Soup is None for %s" %
                str(file))
            continue

        try:
            buy, hold, sell = parse.get_analyst_ratings(stock_targets_soup)
            db.upsert_item(
                table=cst.TABLE_COMPANY_DATA,
                primary_keys=[cst.COLUMN_STOCK_URI, cst.COLUMN_DATE],
                current_date=date.get_current_date(),
                stock_uri=stock_uri,
                analyst_buy=buy,
                analyst_hold=hold,
                analyst_sell=sell,
            )

        except:
            logger.exception(
                "Write Stock Targets Data to DB: Exception for stock: %s" %
                stock_uri)
Example #5
0
def write_stock_estimates_data_to_db():
    stock_list = db.get_list(table=cst.TABLE_STOCKS, columns=cst.COLUMN_URI)
    file_list = [
        cst.PATH_STOCK_ESTIMATES + stock[:-6] + cst.HTML_EXTENSION
        for stock in stock_list
    ]
    for file in file_list:
        stock_uri = file.split("/")[-1][:-5] + "-Aktie"
        stock_estimate_soup = scrap.get_soup_code_from_file(file)

        if stock_estimate_soup is None:
            logger.warning(
                "Write Stock Estimate Data to DB: Soup is None for %s" %
                str(file))
            continue

        try:
            eps_0, eps_p1 = parse.get_result_per_share_current_and_next_year(
                stock_estimate_soup)

            db.upsert_item(
                table=cst.TABLE_COMPANY_DATA,
                primary_keys=[cst.COLUMN_STOCK_URI, cst.COLUMN_DATE],
                current_date=date.get_current_date(),
                stock_uri=stock_uri,
                eps_0=eps_0,
                eps_p1=eps_p1)
        except:
            logger.exception(
                "Write Stock Estimate Data to DB: Exception for stock: %s" %
                stock_uri)
Example #6
0
 def test_get_result_per_share_current_and_next_year(self):
     soup = scrap.get_soup_code_from_file("data/bo_schaetzungen.html")
     result_2018, result_2019 = parse.get_result_per_share_current_and_next_year(
         soup)
     asserted_result_2018 = 8.22
     asserted_result_2019 = 9.54
     self.assertEqual(asserted_result_2018, result_2018)
     self.assertEqual(asserted_result_2019, result_2019)
Example #7
0
 def test_get_bygone_dates(self):
     soup = scrap.get_soup_code_from_file("data/bo_termine.html")
     result = parse.get_bygone_dates(soup)
     asserted_result = [
         ["adidas AG", "2018-08-09", "Q2 2018 Earnings Release", "Past"],
         ["adidas AG", "2018-05-09", "Hauptversammlung", "Past"],
         ["adidas AG", "2017-11-09", "Q3 2017", "Past"],
     ]
     self.assertEqual(asserted_result, result)
Example #8
0
 def test_get_analyst_ratings(self):
     soup = scrap.get_soup_code_from_file("data/bo_kursziele.html")
     no_buy, no_hold, no_sell = parse.get_analyst_ratings(soup)
     asserted_no_buy = 15
     asserted_no_hold = 8
     asserted_no_sell = 0
     self.assertEqual(asserted_no_buy, no_buy)
     self.assertEqual(asserted_no_hold, no_hold)
     self.assertEqual(asserted_no_sell, no_sell)
Example #9
0
 def test_get_historic_prices(self):
     soup = scrap.get_soup_code_from_file("data/bo_index_history_big.html")
     table_list = parse.get_historic_prices_from_history(soup)
     asserted_list = [
         ["01.10.2018", "9.127,05", "9.094,28", "9.155,65", "9.084,22"],
         ["02.10.2018", "9.087,32", "9.076,57", "9.090,46", "9.050,50"],
         ["03.10.2018", "9.175,21", "9.126,31", "9.194,99", "9.123,48"],
     ]
     self.assertEqual(asserted_list, table_list[:3])
Example #10
0
 def test_get_result_per_share_last_three_years(self):
     soup = scrap.get_soup_code_from_file("data/bo_bilanz_guv.html")
     result_15, result_16, result_17 = parse.get_result_per_share_last_three_years(
         soup)
     asserted_result_2015 = 3.3
     asserted_result_2016 = 5.08
     asserted_result_2017 = 6.69
     self.assertEqual(asserted_result_2015, result_15)
     self.assertEqual(asserted_result_2016, result_16)
     self.assertEqual(asserted_result_2017, result_17)
Example #11
0
def write_stock_balance_data_to_db():
    stock_list = db.get_list(table=cst.TABLE_STOCKS, columns=cst.COLUMN_URI)
    file_list = [
        cst.PATH_STOCK_BALANCE + stock[:-6] + cst.HTML_EXTENSION
        for stock in stock_list
    ]
    for file in file_list:
        stock_uri = file.split("/")[-1][:-5] + "-Aktie"
        stock_balance_soup = scrap.get_soup_code_from_file(file)

        if stock_balance_soup is None:
            logger.warning(
                "Write Stock Balance Data to DB: Soup is None for %s" %
                str(file))
            continue

        if not parse.is_data_available(stock_balance_soup):
            logger.warning(
                "Write Stock Balance Data to DB: No Data Available for %s" %
                str(file))
            continue

        try:
            earnings_after_tax = parse.get_current_value_of_attribute(
                stock_balance_soup, cst.TEXT_RESULT_AFTER_TAX)
            operative_result = parse.get_current_value_of_attribute(
                stock_balance_soup, cst.TEXT_OPERATIVE_RESULT)
            sales_revenue = parse.get_current_value_of_attribute(
                stock_balance_soup, cst.TEXT_SALES_REVENUE)
            balance = parse.get_current_value_of_attribute(
                stock_balance_soup, cst.TEXT_BALANCE)
            equity_capital = parse.get_current_value_of_attribute(
                stock_balance_soup, cst.TEXT_EQUITY_CAPITAL)
            eps_m3, eps_m2, eps_m1 = parse.get_result_per_share_last_three_years(
                stock_balance_soup)

            db.upsert_item(
                table=cst.TABLE_COMPANY_DATA,
                primary_keys=[cst.COLUMN_STOCK_URI, cst.COLUMN_DATE],
                current_date=date.get_current_date(),
                stock_uri=stock_uri,
                equity_capital=equity_capital,
                earnings_after_tax=earnings_after_tax,
                operative_result=operative_result,
                sales_revenue=sales_revenue,
                balance=balance,
                eps_m3=eps_m3,
                eps_m2=eps_m2,
                eps_m1=eps_m1)

        except:
            logger.exception(
                "Write Stock Balance Data to DB: Exception for stock: %s" %
                stock_uri)
            continue
Example #12
0
    def test_stock_sectors(self):
        soup = scrap.get_soup_code_from_file("data/bo_sap-aktie.html")
        link_items = parse.get_sectors(soup)
        asserted_indizes_values = [
            "Informationstechnologie",
            "IT-Dienstleister",
            "Server-/ GroĂźrechner (Software)",
            "Software",
        ]
        self.assertEqual(asserted_indizes_values, link_items)

        soup_2 = scrap.get_soup_code_from_file("data/bo_ab_inbev-aktie.html")
        link_items_2 = parse.get_sectors(soup_2)
        asserted_indizes_values_2 = ["Getränke / Tabak"]
        self.assertEqual(asserted_indizes_values_2, link_items_2)

        soup3 = scrap.get_soup_code_from_file("data/bo_3i-Aktie_overview.html")
        link_items3 = parse.get_sectors(soup3)
        asserted_indizes_values3 = ["Finanzdienstleister"]
        self.assertEqual(asserted_indizes_values3, link_items3)
Example #13
0
 def test_get_historic_prices_from_overview(self):
     soup = scrap.get_soup_code_from_file("data/bo_3i-Aktie_overview.html")
     table_list = parse.get_historic_stock_prices_from_overview(soup)
     asserted_list = [
         ["29.11.2018", "9,87"],
         ["28.11.2018", "9,69"],
         ["27.11.2018", "9,75"],
         ["26.11.2018", "9,62"],
         ["23.11.2018", "9,44"],
     ]
     self.assertEqual(asserted_list, table_list)
Example #14
0
 def test_index_stocks_list_extract(self):
     soup = scrap.get_soup_code_from_file("data/bo_index_stocks.html")
     stocks_table = soup.find_all("div", {"id": "index-list-container"})
     table_list = parse.extract_index_stocks_to_list(stocks_table)
     asserted_list = [
         ["adidas", "adidas-Aktie"],
         ["Allianz", "allianz-Aktie"],
         ["BASF", "basf-Aktie"],
         ["Bayer", "bayer-Aktie"],
         ["Beiersdorf", "beiersdorf-Aktie"],
     ]
     self.assertEqual(table_list[:5], asserted_list)
Example #15
0
 def test_get_future_dates(self):
     soup = scrap.get_soup_code_from_file("data/bo_termine.html")
     result = parse.get_future_dates(soup)
     asserted_result = [
         ["adidas AG", "2018-11-07", "Quartalszahlen", "Future"],
         ["adidas AG", "2019-03-07", "Quartalszahlen", "Future"],
         ["adidas AG", "2019-05-02", "Quartalszahlen", "Future"],
         ["adidas AG", "2019-05-09", "Hauptversammlung", "Future"],
         ["adidas AG", "2019-08-08", "Quartalszahlen", "Future"],
         ["adidas AG", "2019-11-07", "Quartalszahlen", "Future"],
     ]
     self.assertEqual(asserted_result, result)
Example #16
0
    def test_get_stock_table_of_index(self):
        soup = scrap.get_soup_code_from_file("data/bo_FTSE_100.html")
        asserted_stock_list_first = [
            ["3i", "3i-Aktie"],
            ["Admiral Group", "admiral_group-Aktie"],
            ["Anglo American", "anglo_american-Aktie"],
        ]
        asserted_stock_list_last = [
            ["WPP 2012", "wpp_2012-Aktie"],
            ["WPP 2012", "wpp_2012-Aktie"],
        ]

        stock_list = parse.get_stock_list_of_single_index(soup)
        self.assertEqual(asserted_stock_list_first, stock_list[:3])
        self.assertEqual(asserted_stock_list_last, stock_list[-2:])
Example #17
0
def write_index_contents_from_html_to_db():
    index_list = db.get_list(
        table=cst.TABLE_INDIZES,
        columns=cst.COLUMN_URI,
        condition=[cst.COLUMN_ACTIVE, 1],
    )
    file_list = [
        cst.PATH_INDEX_CONTENT + index + cst.HTML_EXTENSION
        for index in index_list
    ]

    for file in file_list:
        index_content_soup = scrap.get_soup_code_from_file(file)
        stock_list = parse.get_stock_list_of_single_index(index_content_soup)
        index_uri = file.split("/")[-1][:-5]
        db.write_index_content_list_to_db(stock_list, index_uri)

    return True
Example #18
0
def write_stock_histories_from_html_to_db():
    stock_list = db.get_list(table=cst.TABLE_STOCKS, columns=cst.COLUMN_URI)
    file_list = [
        cst.PATH_STOCK_HISTORY + stock[:-6] + cst.HTML_EXTENSION
        for stock in stock_list
    ]
    for file in file_list:
        stock_history_soup = scrap.get_soup_code_from_file(file)
        if stock_history_soup is None:
            logger.warning(
                "Write Stock History Data to DB: Stock History Soup is None for %s"
                % str(file))
            continue
        stock_history_list = parse.get_historic_prices_from_history(
            stock_history_soup)

        stock_uri = file.split("/")[-1][:-5] + "-Aktie"
        db.write_stock_history_to_db(stock_history_list, stock_uri)
Example #19
0
def write_stock_overview_data_to_db():
    stock_list = db.get_list(table=cst.TABLE_STOCKS, columns=cst.COLUMN_URI)
    file_list = [
        cst.PATH_STOCK_OVERVIEW + stock + cst.HTML_EXTENSION
        for stock in stock_list
    ]
    for file in file_list:
        stock_uri = file.split("/")[-1][:-5]
        stock_overview_soup = scrap.get_soup_code_from_file(file)
        if stock_overview_soup is None:
            logger.warning(
                "Write Stock Overview Data to DB: Stock Overview Soup is None for %s"
                % str(file))
            continue

        market_cap = parse.get_market_cap(stock_overview_soup)
        if market_cap is None:
            logger.warning(
                "Write Stock Overview Data to DB: Market Cap is None for %s" %
                str(file))
            market_cap = -1.00

        stock_indices = str(parse.get_listed_indizes(stock_overview_soup))
        stock_sectors = str(parse.get_sectors(stock_overview_soup))
        market_place = parse.get_market_place(stock_overview_soup)

        db.upsert_item(table=cst.TABLE_COMPANY_DATA,
                       primary_keys=[cst.COLUMN_STOCK_URI, cst.COLUMN_DATE],
                       stock_uri=stock_uri,
                       market_cap=market_cap,
                       current_date=date.get_current_date(),
                       stock_indices=stock_indices,
                       stock_sectors=stock_sectors)

        db.update_item(table=cst.TABLE_STOCKS,
                       primary_keys=cst.COLUMN_URI,
                       uri=stock_uri,
                       market_place=market_place)

        stock_history_list = parse.get_historic_stock_prices_from_overview(
            stock_overview_soup)
        if stock_history_list is not None:
            db.write_stock_overview_history_to_db(stock_history_list,
                                                  stock_uri)
Example #20
0
def write_index_histories_from_html_to_db():
    index_list = db.get_list(
        table=cst.TABLE_INDIZES,
        columns=cst.COLUMN_URI,
        condition=[cst.COLUMN_ACTIVE, 1],
    )
    file_list = [
        cst.PATH_INDEX_HISTORY + index + cst.HTML_EXTENSION
        for index in index_list
    ]
    for file in file_list:
        index_history_soup = scrap.get_soup_code_from_file(file)
        index_history_list = parse.get_historic_prices_from_history(
            index_history_soup)

        index_uri = file.split("/")[-1][:-5]
        try:
            db.write_index_history_to_db(index_history_list, index_uri)
        except:
            logger.exception(
                "Unhandled Exceptions at parse.write_index_histories_from_html_to_db"
            )
Example #21
0
 def test_get_stock_indices(self):
     soup = scrap.get_soup_code_from_file("data/bo_bechtle-Aktie.html")
     indices_list = parse.get_listed_indizes(soup)
     asserted_indices = [
         "TecDAX",
         "MDAX",
         "LMDAX",
         "LTecDAX",
         "HDAX",
         "Technology All Share",
         "Prime All Share",
         "CDAX",
         "TecDAX Kursindex",
         "MDAX Kursindex",
         "BX Swiss -  EMEA",
         "DAXglobal Sarasin Sustainability Germany Index EUR",
         "QIX Deutschland",
         "DAXglobal Sarasin Sustainability Germany",
         "Schatten-Index-TecDAX",
         "Schatten-Index-SDAX",
     ]
     self.assertEqual(asserted_indices, indices_list)
Example #22
0
 def test_get_data_available_info(self):
     soup = scrap.get_soup_code_from_file("data/no_history_data.html")
     self.assertEqual(False, scrap.is_data_available(soup))
Example #23
0
 def test_get_market_place(self):
     soup3 = scrap.get_soup_code_from_file(
         "data/bo_aes-Aktie_overview.html")
     market_place3 = parse.get_market_place(soup3)
     asserted_market_place3 = "FSE"
     self.assertEqual(asserted_market_place3, market_place3)
Example #24
0
 def test_get_historic_prices_from_overview_without_content(self):
     soup = scrap.get_soup_code_from_file(
         "data/bo_overview_abertis-Aktie.html")
     table_list = parse.get_historic_stock_prices_from_overview(soup)
     self.assertIsNone(table_list)
Example #25
0
 def test_get_closing_price_from_date_before(self):
     soup = scrap.get_soup_code_from_file("data/bo_kurse.html")
     closing_price = parse.get_closing_price_from_date_before(
         soup, "09.08.2018")
     asserted_price = 190.55
     self.assertEqual(asserted_price, closing_price)
Example #26
0
 def test_get_sales_revenue(self):
     soup = scrap.get_soup_code_from_file("data/bo_bilanz_guv.html")
     result = parse.get_current_value_of_attribute(soup, "Umsatzerlöse")
     asserted_result = 21218
     self.assertEqual(asserted_result, result)
Example #27
0
 def test_pagination_getting(self):
     soup = scrap.get_soup_code_from_file(
         "data/bo_index_stocks_pagination.html")
     highest_pagination = scrap.get_max_page(soup)
     self.assertEqual(3, highest_pagination)
Example #28
0
 def test_get_total_assets(self):
     soup = scrap.get_soup_code_from_file("data/bo_bilanz_guv.html")
     result = parse.get_current_value_of_attribute(soup, "Bilanzsumme")
     asserted_result = 14522
     self.assertEqual(asserted_result, result)
Example #29
0
 def test_get_equity_capital_new(self):
     soup = scrap.get_soup_code_from_file("data/bo_bilanz_guv.html")
     result = parse.get_current_value_of_attribute(soup, "Eigenkapital")
     asserted_result = 6435
     self.assertEqual(asserted_result, result)
Example #30
0
 def test_get_last_quarterly_figures_date(self):
     soup = scrap.get_soup_code_from_file("data/bo_termine.html")
     result = parse.get_last_quarterly_figures_date(
         soup=soup, current_date=date.string_to_date("10.10.2018"))
     asserted_result = date.string_to_date("09.08.18")
     self.assertEqual(asserted_result, result)