def returnCurrent(): stockSymbols = getSymbolsDatabase() totalPrice = [] # Loops through all stocks, printing out their current prices for stock in stockSymbols: price = getData(stock) converted = float(price[1]) totalPrice.append(converted) newTotalPrice = sum(totalPrice) return round(newTotalPrice,2)
def current(): print("Checking current prices...\n") # Make sure to enter data in quotes stockSymbols = getSymbolsDatabase() totalPrice = [] # Loops through all stocks, printing out their current prices for stock in stockSymbols: price = getData(stock) converted = float(price[1]) totalPrice.append(converted) print("{} is valued at ${} \n" .format(stock, price[1])) newTotalPrice = sum(totalPrice) print("Your portfolio is currently valued at ${}" .format(round(newTotalPrice, 2)))
def getPlayers(playerFile, nGames): players = [] if os.path.exists(playerFile): # Load saved player data players = pickle.load(open(playerFile, 'rb')) # Add missing players # players2, defense = scrape.getData(nGames, ['Joseph Young']) # for name in players2: # players[name] = players2[name] # pickle.dump(players, open(playerFile, 'wb'), protocol=3) else: # Scrape and save player data players, defenses = scrape.getData(nGames, None) pickle.dump(players, open(playerFile, 'wb'), protocol=3) return players
date = input('Enter the game(s) date you want to pull in YYYYMMDD format, after 20151027: ') url = "http://www.nba.com/gameline/" + str(date) + "/" homeTeams, awayTeams = getGames.getInitialData(url) homeTeams, awayTeams = getGames.getTeams(homeTeams, awayTeams) homeTeams = getGames.removeTeamDuplicates(homeTeams) awayTeams= getGames.removeTeamDuplicates(awayTeams) homeTeams = getGames.cleanTeams(homeTeams) awayTeams = getGames.cleanTeams(awayTeams) schedule = getGames.createSchedule(awayTeams, homeTeams) scoreboardURLs = getGames.getScoreboard(date, schedule) print(scoreboardURLs) gamePlayerDataOdd, gamePlayerDataEven = scrape.getData(scoreboardURLs) #print(gamePlayerDataOdd) names = scrape.getLinks(scoreboardURLs) names = scrape.getNameLinks(names) names = getGames.removeTeamDuplicates(names) names = scrape.getNames(names) print(names) print('You have reached the end and the "raw_input()" function is keeping the window open') raw_input()
import streamlit as st from scrape import getData st.title("Github Scraper") userName = st.text_input('Enter Github Username') if userName != '': try: info, repo_info = getData(userName) for key, value in info.items(): if key != 'image_url': st.subheader(''' {} : {} '''.format(key, value)) else: st.image(value) st.subheader(" Recent Repositories") st.table(repo_info) except: st.subheader("User doesn't exist")
from data_handlers import appendData from email_handler import send_email from graph import plotGraph from config import config from scrape import getData for item in config: data = getData(item) appendData(data) if data["price_num"] < data["target_price"]: send_email(data) else: print("Product is not below target price")
# path to notification window icon ICON_PATH = "/home/sanket/Desktop/training/Corona\ notifier/icon.ico" # initialise the d-bus connection notify2.init("Corona Notifier") # create Notification object n = notify2.Notification(None, icon = ICON_PATH) # set urgency level n.set_urgency(notify2.URGENCY_NORMAL) # set timeout for a notification n.set_timeout(10000) while True: confirmed,cured,deaths=scrape.getData() # update notification data for Notification object n.update("Corona Update", "Confirmed Cases: "+confirmed+"\n"+"Cured: "+cured+"\n"+"Deaths: "+deaths) # show notification on screen n.show() # short delay between notifications time.sleep(3600)
print("Hello, starting up the python engine in main.py") print("Base_url: ", base_url) pull_the_plug = False json_written_locally = False success_log = {} # Extract data try: print("1. Trying to get link data from wikipedia...") print("Sys Arg V ", sys.argv) getData(start_link=sys.argv[1], first_leaf_limit=500, second_leaf_limit=500, output_location='data/history.gexf') print("--1. [x] SUCCESS") success_log[ '--1. Trying to get link data from wikipedia...'] = 'SUCCESS' except Exception as e: pull_the_plug = True print("--1. Error in getting link data from wikipedia...", e) success_log['--1. Getting link data from wikipedia'] = 'FAIL' # Parse/Prepare Data try: print('2. Trying to read data...') G = nx.read_gexf('./data/history.gexf')
import scrape, lineups def output(data): #print players print('[ %s, %s, %s, %s, %s, %s, %s, %s, %s ]' % data[0]) print("Projected Score: %0.2f" % data[1]) players = scrape.getData() best = lineups.run(players) for data in best: output(data) print('')