Example #1
0
def getlatestitems():
    if settings.use_fast_requests:
        html = req.get2str(url)
    else:
        r = requests.get(url, verify=False)
        html = r.content

    soup = BeautifulSoup(html, 'html.parser')

    all_items_table = soup.find("tbody")
    print(all_items_table)
    all_items = []
    for item in all_items_table.find_all("tr"):
        inner_data = item.find_all('td')
        curr_item = {}
        curr_item['item'] = inner_data[1].get_text().strip()
        curr_item['ltp'] = inner_data[2].get_text().strip()
        curr_item['volume'] = inner_data[9].get_text().strip()

        # Change Percentage Calculation
        change_percentage = "--"
        try:
            ltp = float(inner_data[2].get_text().strip())
            open = float(inner_data[3].get_text().strip())

            change_percentage = ((ltp - open) / open) * 100.0
            change_percentage = str(format(change_percentage, '.2f') + "%")
        except:
            pass

        curr_item['changepercentage'] = change_percentage
        all_items.append(curr_item)

    return json.dumps(all_items)
Example #2
0
def getInfo(url):
    # Make request and parse html
    body = requests.get2str(url)
    html = BeautifulSoup(body, 'lxml')

    # Get Info
    novel = html.find('div', class_='novel')
    title = unidecode(novel.img['alt'])
    img = novel.img['src']

    details = html.select('div.novel-detail-body')

    # Type
    novelType = details[0].a.string

    # Genres
    genres = [genre.string for genre in details[1].select('a')]

    # details[2] - Tags

    # Language
    language = details[3].li.string

    # Author
    author = details[4].li.string

    # Year
    year = details[6].li.string

    # Prepare json object
    info = {
        'title': title,
        'img': img,
        'type': novelType,
        'genres': genres,
        'language': language,
        'author': author,
        'year': year,
        'chapters': []
    }

    # Start parsing chapters
    volume = html.select('div.panel-default')

    for vol in volume:
        chapters = []
        for chp in vol.select('ul.chapter-chs a'):
            tmp = {
                'name': chp.string,
                'url': chp['href'],
                'volume': vol.h4.string
            }

            chapters.append(tmp)

        info['chapters'] += chapters

    # Return parsed info
    return info
Example #3
0
    def actListSelect(self, item):
        if item is None:
            return

        # Get Image
        url = [b['img'] for b in self.results if b['title'] == item.text()][0]

        img = QPixmap()
        img.loadFromData(get2str(url))

        w = self.ui.imgCover.width()
        h = self.ui.imgCover.height()

        # Show Image
        self.ui.imgCover.setPixmap(img.scaled(w, h))
Example #4
0
def getChapterText(url):
    # Make request and parse html
    body = requests.get2str(url)
    html = BeautifulSoup(body, 'lxml')

    # Get text
    text = html.find('div', class_='desc')

    # Remove unwanted tags
    for script in text('script'):
        script.decompose()

    [x.decompose() for x in text('div', class_='hidden')]
    [x.decompose() for x in text('div', class_='col-lg-12 text-center')]

    # Remove formatting
    for attr in ['class', 'style', 'data-size']:
        del text[attr]

    return str(text)
Example #5
0
 def returnjs_fromhtml(self, u):
     m = []
     try:
         r = get2str(u)
     except Exception as E:
         print(E, E.__class__)
         return [], []
     s = BeautifulSoup(r, 'html.parser')
     stext = filter(
         None,
         map(lambda st: beautify(st.string).split('\n'),
             filter(None, s.find_all("script"))))
     for st in stext:
         m.extend(st)
     return m, [
         self.returncomment_fromcomment(s),
         self.returnexline_fromscript(s),
         self.returnhiddden_frominput(s),
         self.returnlink_fromhtml(s),
         self.returnsrc_fromimg(s)
     ]
print(requests.get("http://httpbin.org/get"))  # HTTP GET.

print(
    requests.post("http://httpbin.org/post",
                  """{"foo": "bar", "baz": true}"""))  # HTTP POST.

print(requests.put("http://httpbin.org/put",
                   """{"foo": "bar", "baz": true}"""))  # HTTP PUT.

print(requests.delete("http://httpbin.org/delete"))  # HTTP DELETE.

print(
    requests.patch("http://httpbin.org/patch",
                   """{"foo": "bar", "baz": true}"""))  # HTTP PATCH.

print(requests.get2str(
    "http://httpbin.org/get"))  # HTTP GET body only to string response.

print(requests.get2dict(
    "http://httpbin.org/get"))  # HTTP GET body only to dictionary response.

print(requests.get2json(
    "http://httpbin.org/get"))  # HTTP GET body only to JSON response.

print(
    requests.post2str("http://httpbin.org/post",
                      """{"foo": "bar", "baz": true}""")
)  # HTTP POST data only to string response.

print(
    requests.post2dict("http://httpbin.org/post",
                       """{"foo": "bar", "baz": true}""")
Example #7
0
 def test_get2str(self):
     self.assertTrue(dict(json.loads(faster_than_requests.get2str("http://httpbin.org/get"))))
 def __extract_stats_page__(self):
     url = self.__extract_canonical__() + "/stats/"
     text = faster_than_requests.get2str(url)
     return parser.BeautifulSoup(text, 'lxml')
 def __extract__(self):
     url = "https://www.scribblehub.com/?p=" + str(self.uuid)
     text = faster_than_requests.get2str(url)
     return parser.BeautifulSoup(text, 'lxml')
def url_parse(projectID, fileID):
    file_url = 'https://addons-ecs.forgesvc.net/api/v2/addon/' + \
        str(projectID)+'/file/' + str(fileID)+'/download-url'

    url = requests.get2str(file_url)
    return url
Example #11
0
 def request_script(self):
     """Sends a get request to scrape the moves on the table."""
     page = requests.get2str(self.driver.current_url)
     return BeautifulSoup(page, 'html.parser').find_all("script")[2].string
Example #12
0
import faster_than_requests as r
from bs4 import BeautifulSoup
import re
from itertools import islice


# print letter statistics for chosen array up to 26 elements, starting from most common
def letter_statistics(array):
    array = dict(sorted(array.items(), key=lambda x: x[1], reverse=True))
    for letter in islice(array, 0, 26):
        print(letter, array[letter])
    print(len(array))


source = r.get2str("https://thesilphroad.com/species-stats")
soup = BeautifulSoup(source, "html.parser")

first_letter = {}
first_two_letters = {}
four_letters = {}
last_letter = {}
vowels = []
palindromes = []
stats = {}
names = []
lengths = [0] * 12

file = open("pokemon.txt", "w")
file.write(
    "NAME\t\t\t\tID\tMAX CP\tATT\tDEF\tHP\tDEF*HP\tSUM\tPROD\t\tHEIGHT\tWEIGHT\t\tTYPES\n"
)
Example #13
0
def getItemDetail(item_name):
    final_result = {}
    url = get_url(item_name)

    if settings.use_fast_requests:
        html = req.get2str(url)
    else:
        r = requests.get(url, verify=False)
        html = r.text

    soup = BeautifulSoup(
        str(html).encode('ascii', 'ignore').decode('ascii'), "html.parser")

    ################## GRAB COMPANY NAME #####################
    comp_table = soup.find("th", {"style": "text-align:center !important;"})
    company_name = comp_table.get_text().replace("Company Name: ", '').strip()
    print(company_name)

    ####################   Grab Data from Market Information    #########################
    table_params = {'border': '1', 'cellspacing': '1', 'width': '100%'}
    market_info_tables = soup.find_all("table", table_params)
    ###################    First Table ####################################
    # m_i_table_params ={'width':"100%", 'border':'0', 'cellpadding':'0', 'cellspacing':'1', 'bgcolor':'#C0C0C0'}
    m_i_table = market_info_tables[0]
    values = []
    for td in m_i_table.find_all("tr"):
        values.append(td.text)
        # print td.text

    lastTrade = str(values[3]).replace("Last Trading Price", "").strip()
    print("Last Tarade - " + lastTrade)

    change_num = str(values[5]).strip()
    change_num = change_num[:-7].replace("Change*", "").strip()

    print("Change Num: " + change_num)
    change_percentage = str(values[5]).replace(change_num,
                                               "").replace("Change*",
                                                           "").strip()
    print("Change Percentage:" + change_percentage)
    open_price = str(values[8]).replace("Opening Price", "").strip()
    print("Open Price: " + open_price)

    adjust_open_price = str(values[9]).replace("Adjusted Opening Price",
                                               "").strip()
    print("Adjust Opening Price: " + adjust_open_price)
    yesterday_close_price = str(values[10]).replace(
        "Yesterday's Closing Price", "").strip()
    print("YCP:" + yesterday_close_price)
    closePrice = str(values[11]).replace("Closing Price", "").strip()
    print("Closing Price: " + closePrice)
    daysRange = str(values[12]).replace("Day's Range", "").strip()
    print("Days Range: " + daysRange)

    amount_traded_in_bdt = str(values[13]).replace("Day's Value (mn)",
                                                   "").strip()
    print("Amount traded in bdt (mn): " + amount_traded_in_bdt)
    weekRange = str(values[14]).replace("52 Weeks' Moving Range", "").strip()
    print("Week's Range: " + weekRange)
    volume = str(values[15]).replace("Day's Volume (Nos.)", "").strip()
    print("Volume : " + volume)
    totalTrade = str(values[16]).replace("Day's Trade (Nos.)", "").strip()
    print("Total Trade: " + totalTrade)
    market_capital = str(values[17]).replace("Market Capitalization (mn)",
                                             '').strip()
    print("Market Capital: " + market_capital)

    #########      WE ARE DONE WITH MARKET INFO TABLE #####################

    ################## BASIC INFORMATION ########################
    second_table = market_info_tables[1]

    basic_info_table = []
    try:

        for td in second_table.find_all("tr"):
            basic_info_table.append(td.text)
    except:
        pass
    # print str(values_second_table[2])

    authorized_capital = basic_info_table[3].replace("Authorized Capital (mn)",
                                                     "").strip()
    print("Authorized Capital: " + authorized_capital)
    paidupvalue = \
        basic_info_table[4].replace("Paid-up Capital (mn)", "").replace("\n", "").replace("\r", "").strip().split(" ")[
            0]
    print("Paid Up Value: " + paidupvalue)

    facevalue = basic_info_table[5].replace("Face/par Value", "").strip()
    print("Face Value: " + facevalue)
    marketLot = basic_info_table[9].replace("Market Lot", "").strip()
    print("Market Lot: " + marketLot)

    noofsecurities = basic_info_table[6].replace(
        "Total No. of Outstanding Securities", "").strip()
    print("Total no of sec. : " + noofsecurities)
    segment = basic_info_table[10].replace("Sector", "").strip()
    print("Segment: " + segment)

    ############### END OF BASIC INFORMATION ############

    ################ P/E Ratio ######################

    # Un-Audited
    peratio_table = market_info_tables[4]

    values = []

    for row in peratio_table.find_all("tr"):
        cells = row.find_all("td")
        for cell in cells:
            values.append(cell.get_text().strip())
    peratio_basic = "UA: " + values[15]
    print(peratio_basic)
    peratio_diluted = "UA: " + values[22]
    print(peratio_diluted)

    # Audited
    peratio_table = market_info_tables[5]

    values = []

    for row in peratio_table.find_all("tr"):
        cells = row.find_all("td")
        for cell in cells:
            values.append(cell.get_text().strip())
    peratio_basic_a = " A: " + values[15]
    print("PE A: " + peratio_basic_a)
    peratio_diluted_a = " A: " + values[22]

    print("PED A: " + peratio_diluted_a)
    peratio_basic = peratio_basic + peratio_basic_a
    peratio_diluted = peratio_diluted + peratio_diluted_a
    '''
    ############################ FINANCIAL PERFORMANCE ##########################
    table_params = {'border': "1", 'width': "100%", 'cellspacing': "0", 'cellpadding': "0"}
    
    fp_table = soup.find_all("table", table_params)[1]
    values = []
    for row in fp_table.find_all("tr"):
        cells = row.find_all("td")
        for cell in cells:
            values.append(cell.get_text().strip())
    
        # 2013 Year index - 131
    eps_2013 = values[132]
    netassetvalue_2013 = values[136]
    
    netprofit_continue_2013 = values[138]
    
    netprofit_extraordinary_2013 = values[139]
    # 2014 Year Index - 140
    
    eps_2014 = values[141]
    netassetvalue_2014 = values[145]
    netprofit_continue_2014 = values[147]
    netprofit_extraordinary_2014 = values[148]
    
    ######## DIVIDEND ##############
    table_params = {'border': "1", 'width': "100%", 'cellspacing': "0", 'cellpadding': "0"}
    
    dividend_table = soup.find_all("table", table_params)[2]
    values = []
    for row in dividend_table.find_all("tr"):
        cells = row.find_all("td")
        for cell in cells:
            values.append(cell.get_text().strip())
    
    dividend_2014 = values[80]
    dividend_2013 = values[75]
    dividend_2012 = values[70]
    dividend_2011 = values[65]
    dividend_2010 = values[60]
    dividend_2009 = values[55]
    '''
    #################### HISTORY AND OTHERS #########################

    history_table = market_info_tables[2]

    values = []
    for row in history_table.find_all("tr"):
        cells = row.find_all("td")
        for cell in cells:
            values.append(cell.get_text().strip())

    lastAgm = values[0].replace("Last AGM held on:",
                                "").replace("For the year ended:",
                                            "")[:-13].strip()
    print("Last AGM: " + lastAgm)

    yearEnd = values[9].strip()
    print("Year End: " + yearEnd)

    bonusIssue = values[5].strip()
    print("Bonus Issue: " + bonusIssue)
    rightIssue = values[7].strip()
    print("Right Issue: " + rightIssue)
    reserveandsurplus = values[11]
    print("Reserve and surplus: " + reserveandsurplus)

    ################## SHARE PERCENTAGE ####################

    sp_table = market_info_tables[8]

    values = []
    for row in sp_table.find_all("tr"):
        cells = row.find_all("td")
        for cell in cells:
            values.append(cell.get_text().strip())

    marketCatagory = values[5]
    print("Market Catagory: " + marketCatagory)
    sponsor = values[10].replace("Sponsor/Director:", "").strip()
    print("Sponsor: " + sponsor)
    govt = values[11].replace("Govt:", '').strip()
    print("Govt. : " + govt)
    institute = values[12].replace("Institute:", '').strip()
    print("Institute: " + institute)
    foreign = values[13].replace("Foreign:", '').strip()
    print("Foreign: " + foreign)

    public = values[14].replace("Public:", "").strip()
    print("Public: " + public)

    ####### GATHER ALL AND ADDEM TOGETHER
    final_result["closeprice"] = closePrice
    final_result["ycp"] = yesterday_close_price
    final_result["openprice"] = open_price
    final_result["adjustopenprice"] = adjust_open_price
    final_result["daysrange"] = daysRange
    final_result["volume"] = volume
    final_result["totaltrade"] = totalTrade
    final_result["marketcapital"] = market_capital
    final_result[
        "authorizedcapital"] = authorized_capital  # authorized_capital
    final_result["paidupvalue"] = paidupvalue  # paidupvalue
    final_result["facevalue"] = facevalue  # facevalue
    final_result["noofsecurities"] = noofsecurities  # noofsecurities
    final_result["weekrange"] = weekRange  # weekRange
    final_result["marketlot"] = marketLot  # marketLot
    final_result["segment"] = segment  # segment
    final_result["rightissue"] = rightIssue  # rightIssue
    final_result["yearend"] = yearEnd  # yearEnd
    final_result["reserveandsurplus"] = reserveandsurplus  # reserveandsurplus
    final_result["bonousissue"] = bonusIssue  # bonusIssue
    final_result["companyname"] = company_name
    final_result["ltp"] = lastTrade
    final_result["changeval"] = change_num
    final_result["changepercentage"] = change_percentage
    final_result["lastagm"] = lastAgm  # lastAgm
    final_result["p_e_ratio_basic"] = peratio_basic  # peratio_basic
    final_result["p_e_ratio_diluted"] = peratio_diluted  # peratio_diluted
    final_result["marketcatagory"] = marketCatagory  # marketCatagory
    final_result["fp2013_epscontinueoperation"] = "0"  # eps_2013
    final_result["fp2013_NAV"] = "0"  # netassetvalue_2013
    final_result[
        "fp2013_NPATcontinueoperation"] = "0"  # netprofit_continue_2013
    final_result[
        "fp2013_NPATextraordinaryincome"] = "0"  # netprofit_extraordinary_2013
    final_result["fp2014_epscontinueoperation"] = "0"  # eps_2014
    final_result["fp2014_NAV"] = "0"  # netassetvalue_2014
    final_result[
        "fp2014_NPATextraordinaryincome"] = "0"  # netprofit_extraordinary_2014
    final_result[
        "fp2014_NPATcontinueoperation"] = "0"  # netprofit_continue_2014
    final_result["fpcontinue_dividend_2009"] = "0"  # dividend_2009
    final_result["fpcontinue_dividend_2010"] = "0"  # dividend_2010
    final_result["fpcontinue_dividend_2011"] = "0"  # dividend_2011
    final_result["fpcontinue_dividend_2012"] = "0"  # dividend_2012
    final_result["fpcontinue_dividend_2013"] = "0"  # dividend_2013
    final_result["fpcontinue_dividend_2014"] = "0"  # dividend_2014
    final_result["sp_sponsor_director"] = sponsor  # sponsor
    final_result["sp_govt"] = govt  # govt
    final_result["sp_institute"] = institute  # institute
    final_result["sp_foreign"] = foreign  # foreign
    final_result["sp_public"] = public  # public
    final_result["amounttradedinbdt"] = amount_traded_in_bdt
    final_result["item"] = item_name

    json_converted = json.dumps(final_result)
    return json_converted
Example #14
0
 def returnjs_fromjs(self, u):
     try:
         return beautify(get2str(u)).split('\n')
     except Exception as E:
         print(E, E.__class__)
     return []
Example #15
0
import faster_than_requests as r
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
import time
import logging
import sys

source = r.get2str("https://pastebin.com/raw/jr5qpQEm")

driver = webdriver.Firefox()
driver.get('https://discord.com/login')

email = WebDriverWait(driver, 10).until(
    EC.presence_of_element_located((By.NAME, 'email')))
password = WebDriverWait(driver, 10).until(
    EC.presence_of_element_located((By.NAME, 'password')))
submit = WebDriverWait(driver, 10).until(
    EC.presence_of_element_located((By.XPATH, '//button[@type="submit"]')))

email.send_keys('YOUR_DISCORD_EMAIL')
password.send_keys('YOUR_DISCORD_PASSWORD')
submit.click()
time.sleep(15)  # time to enter 2FA code
driver.get(
    'https://discord.com/channels/@me/ID_OF_YOUR_DISCORD_CHAT_WITH_POKEMON_BOT'
)
try:
    textbox = WebDriverWait(driver, 10).until(
Example #16
0
import time
import requests
import faster_than_requests as reqs

time1 = time.time()
for i in range(9):
    requests.get("https://httpbin.org/get").content
print(f'Requests = {time.time() - time1}')

time2 = time.time()
for i in range(9):
    reqs.get2str("https://httpbin.org/get")
print(f'Faster_than_requests = {time.time() - time2}')

# $ python3 example2.py
# Requests = 7.90063419342041
# Faster_than_requests = 2.0017221927642822
Example #17
0
    res = requests.get('https://twitter.com/')
    print(".")
timeEnd = time.perf_counter() - timeStart
averageTime = timeEnd / 10
timeEnd = round(timeEnd, 4)
print(f"Made 10 requests with an average of {averageTime} seconds each")

print("PyCurl Requests")
timeStart = time.perf_counter()
for _ in range(10):
    c = pycurl.Curl()
    c.setopt(c.URL, 'https://twitter.com/')
    c.setopt(pycurl.SSL_VERIFYPEER, 0)
    c.perform_rs()
    res = c
    print(".")
timeEnd = time.perf_counter() - timeStart
averageTime = timeEnd / 10
timeEnd = round(timeEnd, 4)
print(f"Made 10 requests with an average of {averageTime} seconds each")

print("Faster than Requests")
timeStart = time.perf_counter()
for _ in range(10):
    res = faster_than_requests.get2str('https://twitter.com/')
    print(".")
timeEnd = time.perf_counter() - timeStart
averageTime = timeEnd / 10
timeEnd = round(timeEnd, 4)
print(f"Made 10 requests with an average of {averageTime} seconds each")
Example #18
0
def input_url(index):
    url = input("Enter URL: ")
    list_url[index] = url
    matter = req.get2str(url)
    return matter
Example #19
0
def getItemDetail(item_name):
    final_result = {}
    url = get_url(item_name)

    if settings.use_fast_requests:
        html = req.get2str(url)
    else:
        r = requests.get(url, verify=False)
        html = r.text

    soup = BeautifulSoup(
        str(html).encode('ascii', 'ignore').decode('ascii'), "html.parser")

    ################## GRAB COMPANY NAME #####################
    comp_table = soup.find("div", {"class": "com_title"})
    company_name = comp_table.get_text().strip()
    print(company_name)

    ####################   Grab Data from Market Information    #########################
    table_params = {
        'border': '0',
        'cellspacing': '0',
        'width': '100%',
        'cellpadding': '0'
    }
    market_info_tables = soup.find_all("table", table_params)

    ###################    First Table ####################################
    m_i_table = market_info_tables[1]

    values = []
    for tr in m_i_table.find_all("tr"):
        td = tr.find_all("td")
        values.append(str(td[1].text).strip())
        # print td.text

    lastTrade = values[0]
    print("Last Tarade - " + lastTrade)

    change_percentage = values[2]
    print("Change Percentage:" + change_percentage)
    open_price = values[3]
    print("Open Price: " + open_price)

    change_num = "0"
    try:
        change_num = str(format(float(lastTrade) - float(open_price), '.2f'))
    except:
        pass

    print("Change Num: " + change_num)

    daysRange = values[4]
    print("Days Range: " + daysRange)

    ############### Second Table ##################
    m_i_table = market_info_tables[2]
    values = []
    for tr in m_i_table.find_all("tr"):
        td = tr.find_all("td")
        values.append(str(td[1].text).strip())
        # print td.text

    totalTrade = values[0]
    print("Total Trade: " + totalTrade)

    volume = values[1]
    print("Volume : " + volume)

    closePrice = values[2]
    print("Closing Price: " + closePrice)

    yesterday_close_price = values[3]
    print("YCP:" + yesterday_close_price)

    market_capital = values[4]
    print("Market Capital: " + market_capital)

    ############## BASIC INFO ############
    m_i_table = market_info_tables[4]
    values = []
    for tr in m_i_table.find_all("tr"):
        td = tr.find_all("td")
        try:
            values.append(str(td[1].text).strip())
        except:
            pass

    marketLot = values[2]
    print("Market Lot: " + marketLot)

    facevalue = values[3]
    print("Face Value: " + facevalue)

    paidupvalue = values[4]
    print("Paid Up Value: " + paidupvalue)

    #### SECOND TABLE ######
    m_i_table = market_info_tables[5]
    values = []
    for tr in m_i_table.find_all("tr"):
        td = tr.find_all("td")
        try:
            values.append(str(td[1].text).strip())
        except:
            pass

    marketCatagory = values[3]
    print("Market Catagory: " + marketCatagory)

    authorized_capital = values[4]
    print("Authorized Capital: " + authorized_capital)

    noofsecurities = values[5]
    print("Total no of sec. : " + noofsecurities)

    weekRange = values[6]
    print("Week's Range: " + weekRange)

    yearEnd = values[7]
    print("Year End: " + yearEnd)

    reserveandsurplus = values[8]
    print("Reserve and surplus: " + reserveandsurplus)

    ######## SHARE PERCENTAGE
    m_i_table = market_info_tables[7]
    values = []
    td = m_i_table.find_all("tr")[1]

    for td in td.find_all('td'):
        try:
            values.append(str(td.text).strip())
        except:
            pass

    sponsor = values[1]
    print("Sponsor: " + sponsor)

    govt = values[2]
    print("Govt. : " + govt)

    institute = values[3]
    print("Institute: " + institute)

    foreign = values[4]
    print("Foreign: " + foreign)

    public = values[5]
    print("Public: " + public)

    #### LAST AGM ####
    lastAgmTexTd = soup.find("td", text="AGM Date ")
    lastAgmDateTd = lastAgmTexTd.find_next_sibling("td")

    values = []
    values.append(str(lastAgmDateTd.text).strip())

    lastAgm = values[0]
    print("Last AGM: " + lastAgm)

    #### Bonous Issue ####
    bonousIssueTextTd = soup.find("td", text="Bonus Issue ")
    bonousIssueTd = bonousIssueTextTd.find_next_sibling("td")
    values = []
    values.append(str(bonousIssueTd.text).strip())

    bonusIssue = values[0]
    print("Bonus Issue: " + bonusIssue)

    #########      WE ARE DONE WITH MARKET INFO TABLE #####################

    ################## BASIC INFORMATION ########################
    second_table = market_info_tables[1]

    basic_info_table = []
    try:

        for td in second_table.find_all("tr"):
            basic_info_table.append(td.text)
    except:
        pass
    # print str(values_second_table[2])

    ############### END OF BASIC INFORMATION ############

    ####Un Supported
    rightIssue = "--"
    peratio_basic = "--"
    peratio_diluted = "--"
    amount_traded_in_bdt = "--"
    adjust_open_price = "--"
    segment = "--"

    ####### GATHER ALL AND ADDEM TOGETHER
    final_result["closeprice"] = closePrice
    final_result["ycp"] = yesterday_close_price
    final_result["openprice"] = open_price
    final_result["adjustopenprice"] = adjust_open_price
    final_result["daysrange"] = daysRange
    final_result["volume"] = volume
    final_result["totaltrade"] = totalTrade
    final_result["marketcapital"] = market_capital
    final_result[
        "authorizedcapital"] = authorized_capital  # authorized_capital
    final_result["paidupvalue"] = paidupvalue  # paidupvalue
    final_result["facevalue"] = facevalue  # facevalue
    final_result["noofsecurities"] = noofsecurities  # noofsecurities
    final_result["weekrange"] = weekRange  # weekRange
    final_result["marketlot"] = marketLot  # marketLot
    final_result["segment"] = segment  # segment
    final_result["rightissue"] = rightIssue  # rightIssue
    final_result["yearend"] = yearEnd  # yearEnd
    final_result["reserveandsurplus"] = reserveandsurplus  # reserveandsurplus
    final_result["bonousissue"] = bonusIssue  # bonusIssue
    final_result["companyname"] = company_name
    final_result["ltp"] = lastTrade
    final_result["changeval"] = change_num
    final_result["changepercentage"] = change_percentage
    final_result["lastagm"] = lastAgm  # lastAgm
    final_result["p_e_ratio_basic"] = peratio_basic  # peratio_basic
    final_result["p_e_ratio_diluted"] = peratio_diluted  # peratio_diluted
    final_result["marketcatagory"] = marketCatagory  # marketCatagory
    final_result["fp2013_epscontinueoperation"] = "0"  # eps_2013
    final_result["fp2013_NAV"] = "0"  # netassetvalue_2013
    final_result[
        "fp2013_NPATcontinueoperation"] = "0"  # netprofit_continue_2013
    final_result[
        "fp2013_NPATextraordinaryincome"] = "0"  # netprofit_extraordinary_2013
    final_result["fp2014_epscontinueoperation"] = "0"  # eps_2014
    final_result["fp2014_NAV"] = "0"  # netassetvalue_2014
    final_result[
        "fp2014_NPATextraordinaryincome"] = "0"  # netprofit_extraordinary_2014
    final_result[
        "fp2014_NPATcontinueoperation"] = "0"  # netprofit_continue_2014
    final_result["fpcontinue_dividend_2009"] = "0"  # dividend_2009
    final_result["fpcontinue_dividend_2010"] = "0"  # dividend_2010
    final_result["fpcontinue_dividend_2011"] = "0"  # dividend_2011
    final_result["fpcontinue_dividend_2012"] = "0"  # dividend_2012
    final_result["fpcontinue_dividend_2013"] = "0"  # dividend_2013
    final_result["fpcontinue_dividend_2014"] = "0"  # dividend_2014
    final_result["sp_sponsor_director"] = sponsor  # sponsor
    final_result["sp_govt"] = govt  # govt
    final_result["sp_institute"] = institute  # institute
    final_result["sp_foreign"] = foreign  # foreign
    final_result["sp_public"] = public  # public
    final_result["amounttradedinbdt"] = amount_traded_in_bdt
    final_result["item"] = item_name

    json_converted = json.dumps(final_result)
    return json_converted
Example #20
0
import faster_than_requests as requests
import time
import datetime
import urllib.request

print("Faster")
start = datetime.datetime.now()
requests.get2str('http://localhost/sureflap-master/LockOutsite.php')
end = datetime.datetime.now()
time_taken = end - start
print('Time: ', time_taken)

start = datetime.datetime.now()
requests.get2str('http://localhost/sureflap-master/UnLock.php')
end = datetime.datetime.now()
time_taken = end - start
print('Time: ', time_taken)

print("UrlLib")
start = datetime.datetime.now()
webUrl = urllib.request.urlopen(
    'http://localhost/sureflap-master/LockOutsite.php')
end = datetime.datetime.now()
time_taken = end - start
print('Time: ', time_taken)

start = datetime.datetime.now()
webUrl = urllib.request.urlopen('http://localhost/sureflap-master/UnLock.php')
end = datetime.datetime.now()
time_taken = end - start
print('Time: ', time_taken)