Esempio n. 1
0
def crawl_insider(counter, trading_date):
    stk_list = formStocklist(counter, S.KLSE_LIST)
    dirUrl = I3_INSIDER_DIRECTOR_URL + stk_list[counter] + ".jsp"
    shdUrl = I3_INSIDER_SHAREHOLDER_URL + stk_list[counter] + ".jsp"
    if S.DBG_ALL or S.DBG_INSIDER:
        print ("\tInsider: " + counter + " " + stk_list[counter] + " " + dirUrl)
    dirList = scrape_insider(counter, stk_list[counter], connect_url(dirUrl), trading_date)
    if S.DBG_ALL or S.DBG_INSIDER:
        print ("\tInsider: " + counter + " " + stk_list[counter] + " " + shdUrl)
    shdList = scrape_insider(counter, stk_list[counter], connect_url(shdUrl), trading_date)
    return dirList, shdList
Esempio n. 2
0
def crawl_entitlement(
        trading_date=getToday("%d-%b-%Y"), formatted_output=False):
    url = I3_DIVIDEND_URL
    latest_dividends = scrape_entitlement(connect_url(url), url, trading_date,
                                          formatted_output)
    if formatted_output and len(latest_dividends) > 0:
        format_table_entitlement("Latest Dividends", latest_dividends)

    url = I3_ENTITLEMENT_OTHERS_URL
    latest_others = scrape_entitlement(connect_url(url), url, trading_date,
                                       formatted_output)
    if formatted_output and len(latest_others) > 0:
        format_table_entitlement("Latest Bonus, Share Split & Consolidation",
                                 latest_others)

    return latest_dividends, latest_others
Esempio n. 3
0
def get_yoy_links(jsp_link):
    soup_ar = connect_url(S.I3_KLSE_URL + jsp_link)
    if soup_ar is None or len(soup_ar) <= 0:
        print('getYoYLinks ERR: no result')
        return None
    divs = soup_ar.find("div", {"id": "container"})
    div = divs.find("div", {"id": "content"})
    tables = div.findAll('table')
    pdf_links = {}
    found = False
    for table in tables:
        for tr in table.findAll('tr'):
            for td in tr.findAll('td'):
                links = td.findAll('a')
                for link in links:
                    pdf_link = link.get('href')
                    if pdf_link is None:
                        continue
                    if "staticfile" in pdf_link:
                        found = True
                        f_name = link.getText().replace('&nbsp;', '')
                        pdf_links[S.I3_KLSE_URL + pdf_link] = f_name.strip()
                    else:
                        if found:
                            break
                if found:
                    break
            if found:
                break
        if found:
            break
    return pdf_links
Esempio n. 4
0
def crawl_qr(counter):
    counter = counter.upper()
    slist = formStocklist(counter, S.KLSE_LIST)
    qrUrl = I3_QR_URL + slist[counter] + ".jsp"
    if S.DBG_ALL or S.DBG_QR:
        print ("\tQR: " + counter + " " + slist[counter] + " " + qrUrl)
    qr = scrape_qr(counter, slist[counter], connect_url(qrUrl))
    return qr
Esempio n. 5
0
def crawl_latest(trading_date=getToday("%d-%b-%Y"), formatted_output=False):
    url = I3_INSIDER_DIRECTOR_URL
    latest_dir = scrape_latest(connect_url(url), url, trading_date, formatted_output)
    if formatted_output and len(latest_dir) > 0:
        new_list = []
        for key in latest_dir:
            new_list.append(latest_dir[key])
        format_table_insiders("Latest Directors Transactions", new_list)
        # return new_list

    url = I3_INSIDER_SHAREHOLDER_URL
    latest_shd = scrape_latest(connect_url(url), url, trading_date, formatted_output)
    if formatted_output and len(latest_shd) > 0:
        format_table_insiders("Latest Substantial Shareholders Transactions", latest_shd)

    url = I3_INSIDER_COMPANY_URL
    latest_company = scrape_latest(connect_url(url), url, trading_date, formatted_output)
    if formatted_output and len(latest_company) > 0:
        format_table_insiders("Latest Company Transactions", latest_company)
    return latest_dir, latest_shd, latest_company
Esempio n. 6
0
def crawl_listing(trading_date=getToday("%d-%b-%Y"), formatted_output=True):
    latest_listings = scrape_listing(connect_url(I3_LISTING_URL), trading_date,
                                     formatted_output)
    if formatted_output and len(latest_listings) > 0:
        new_list = []
        for key in latest_listings:
            new_list.append(latest_listings[key])
        format_table_listing("Additional Listing", new_list)
        return new_list

    return latest_listings
Esempio n. 7
0
def crawl_price_target(
        trading_date=getToday("%d-%b-%Y"), formatted_output=True):
    price_targets = scrape_target(connect_url(I3_TARGET_URL), trading_date,
                                  formatted_output)
    if formatted_output and len(price_targets) > 0:
        new_list = []
        for key in price_targets:
            new_list.append(price_targets[key])
        format_table_target("Price Target", new_list)
        return new_list

    return price_targets
Esempio n. 8
0
def crawl_latest_qr(trading_date=getToday("%d-%b-%Y")):
    latestQR = scrape_latest_qr(connect_url(I3_LATEST_QR_URL), trading_date)
    return latestQR
Esempio n. 9
0
def crawl_latest_ar(trading_date=getToday("%d-%b-%Y")):
    return scrape_latest_ar(connect_url(I3_LATEST_AR_URL), trading_date)