def main(options, args): if not options and not args: show_usage_and_die(usage) all_tickers = get_requested_tickers(options, args) # Remove dups, we're not chumps all_pivs = list() for ticker in all_tickers: try: all_pivs.append(PivPoints(ticker, get_prev_day(ticker))) except urllib2.HTTPError: sys.stderr.write("warning: ticker {0} not found\n\n".format(ticker)) continue sys.stderr.write("warning: prices may be delayed. Use them as reference only.\n\n") table = list() for piv in all_pivs: msg = "Closest to {0} : {1}".format(*piv.get_closest_line()) table.append([piv.ticker, piv.r2, piv.r1, piv.p, piv.s1, piv.s2, piv.price, msg]) headers = ["Company", "R2", "R1", "P", "S1", "S2", "Price", "Close to"] print tabulate.tabulate(table, headers=headers, tablefmt="grid")
def main(): if len(sys.argv) != 4: show_usage_and_die(usage) try: principal, rate = map(float, sys.argv[1:3]) periods = int(sys.argv[3]) except Exception as e: sys.stderr.write(str(e) + "\n") show_usage_and_die(usage) total = principal for i in range(periods): total = round(total * (1 + rate), 2) print "\tPeriod {0} : {1}".format(i, total) print "Total = {0}".format(total)
def main(opts, args): if not opts and not args: show_usage_and_die(usage) all_tickers = get_requested_tickers(opts, args) table = list() for ticker in all_tickers: try: data = MovAvgs(ticker) except urllib2.HTTPError as e: sys.stderr.write("warning: failed retrieving data for ticker {0}: {1}".format( ticker, str(e))) continue except Exception as e: sys.stderr.write("{0} : unhandled error: {1}\n".format(ticker, str(e))) continue below_price = get_mavgs_below_price(data) if below_price: msg = "Price is above: {0}".format(below_price) else: msg = "Price is trading below MAs" table_entry = [ticker, data.price, data.ma_5, data.ma_10, data.ma_20, data.ma_50, msg] table.append(table_entry) headers = ["Company", "Price", "5-day", "10-day", "20-day", "50-day", "Price vs. MAs"] # Rearrange table: companies where price is above MAs appear first above = filter(lambda e: "above" in e[6], table) above = sorted(above, key=lambda a: len(a[6]), reverse=True) table = above + filter(lambda e: "below" in e[6], table) print tabulate.tabulate(table, headers=headers, tablefmt="grid") # Stop if not opts.plot: return 0 print print "Getting plot links..." for ticker in all_tickers: url = get_shorturl(get_mavgs_url(ticker)) print "\t{ticker} : {url}".format( ticker=ticker, url=url )
def main(opts, args): if not opts and not args: common.show_usage_and_die(usage) all_tickers = common.get_requested_tickers(opts, args) table = list() try: for ticker in all_tickers: try: price = float(ystock.get_price(ticker)) is_above, high = price_higher_than_days(ticker, opts.ndays, price) except urllib2.HTTPError as e: sys.stderr.write("warning: failed retrieving data for {0}: {1}\n".format( ticker, str(e))) continue except Exception as e: sys.stderr.write("{0}: error: {1}".format(ticker, str(e))) continue # Each entry in table: table_entry = [ticker, price, high, "ABOVE" if is_above else "below" ] table.append(table_entry) except urllib2.HTTPError as e: sys.stderr.write("warning: failed retrieving data for {0}: {1}".format( ticker, str(e))) headers = ["Company", "Price", "{0}-day high".format(opts.ndays), "Price vs. {0}-day high".format( opts.ndays)] # Lets rearrange the table so all ABOVEs appear before belows aboves = filter(lambda a: a[3] == "ABOVE", table) table = aboves + filter(lambda a: a[3] == "below", table) print tabulate.tabulate(table, headers=headers, tablefmt="grid")
# Rearrange table: companies where price is above MAs appear first above = filter(lambda e: "above" in e[6], table) above = sorted(above, key=lambda a: len(a[6]), reverse=True) table = above + filter(lambda e: "below" in e[6], table) print tabulate.tabulate(table, headers=headers, tablefmt="grid") # Stop if not opts.plot: return 0 print print "Getting plot links..." for ticker in all_tickers: url = get_shorturl(get_mavgs_url(ticker)) print "\t{ticker} : {url}".format( ticker=ticker, url=url ) if __name__ == "__main__": if len(sys.argv) == 1: show_usage_and_die(usage) parser = get_default_optparser() parser.add_option("-p", "--plot", dest="plot", default=False, action="store_true", help="Get links to Yahoo plots. This will be slow.") sys.exit(main(*parser.parse_args()))
# Each entry in table: table_entry = [ticker, price, high, "ABOVE" if is_above else "below" ] table.append(table_entry) except urllib2.HTTPError as e: sys.stderr.write("warning: failed retrieving data for {0}: {1}".format( ticker, str(e))) headers = ["Company", "Price", "{0}-day high".format(opts.ndays), "Price vs. {0}-day high".format( opts.ndays)] # Lets rearrange the table so all ABOVEs appear before belows aboves = filter(lambda a: a[3] == "ABOVE", table) table = aboves + filter(lambda a: a[3] == "below", table) print tabulate.tabulate(table, headers=headers, tablefmt="grid") if __name__ == "__main__": if len(sys.argv) == 1: common.show_usage_and_die(usage) parser = common.get_default_optparser() parser.add_option("-n", "--ndays", dest="ndays", default=20, action="store", help="Number of days. Defaults to 20") sys.exit(main(*parser.parse_args()))