def test_new_item_soldout_removal(): scraped_item = collect.SurplusItem('Current', 'test1', '$10', 'SOLD OUT', 'http://', 'http://link1') (added, _, _, _) = collect.run([], [scraped_item], FakeScraper(scraped_item)) assert len(added) == 0
def test_simple_addition(): scraped_item = collect.SurplusItem('Current', 'test1', '$10', '11', 'http://', 'http://link1') (added, _, _, _) = collect.run([], [scraped_item], FakeScraper(scraped_item)) assert len(added) == 1 assert added[0] == scraped_item
def test_simple_removal(): db_item = collect.SurplusItem('Current', 'test1', '$10', '11', 'http://', 'http://link1') scraper_item = collect.SurplusItem('Sold', 'test1', '$10', '11', 'http://', 'http://link1') (_, removed, _, _) = collect.run([db_item], [], FakeScraper(scraper_item)) assert len(removed) == 1 assert removed[0] == scraper_item
def test_simple_modification(): db_item = collect.SurplusItem('Current', 'test1', '$10', '10', 'http://', 'http://link1') scraped_item = collect.SurplusItem('Current', 'test1', '$10', '11', 'http://', 'http://link1') (_, _, modified, _) = collect.run([db_item], [scraped_item], FakeScraper(scraped_item)) assert len(modified) == 1 assert modified[0] == (db_item, scraped_item)
def cli_main(): if sys.argv[1] == 'collect': import collect collect.run()
import argparse, os import collect if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("--source", type=str, default="/", help="source directory") parser.add_argument("--var", type=str, action="append", default=[], help="variable") parser.add_argument("lstfile", type=str, help=".lst file") parser.add_argument("destination", type=str, help="destination directory") args = parser.parse_args() if os.getuid() != 0: raise Exception("You must be a root user.") destination = args.destination if destination.strip() == "": destination = "." if os.path.abspath(destination) in ["/", "//"]: raise Exception( "Setting system root directory as destination is totally insane.") context = collect.Context( args.source, destination, dict(map(lambda x: collect.parse_var(x), args.var))) collect.run(args.lstfile, context)
""" Where the program is run. """ import collect from request import Scraper import time import pickle from notifications.Notification import PriceNotification, TradeNotification import parser if __name__ == "__main__": while True: # search the coins.txt and settings.txt files for updates collect.run() with open("coinList.pickle", "rb") as f: coins = pickle.load(f) # Request data for each coin scrape = Scraper(coins) for coin in scrape.coins: result = scrape.request_coin(coin.id) parser.parse(coin, result) # Alerts! # this is a list of all alerts for this coin alerts = [] buy_alert = coin.buy_notification() # TODO: Build sell notification in Coin class sell_alert = coin.sell_notification()
def run(self): collect_to = Path(self.output().path) collect_to.parent.mkdir(parents=True, exist_ok=True) collect.run(collect_to, Path(self.input().path))
import argparse, os import collect if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--source", type=str, default="/", help="source directory") parser.add_argument("--var", type=str, action="append", default=[], help="variable") parser.add_argument("lstfile", type=str, help=".lst file") parser.add_argument("destination", type=str, help="destination directory") args = parser.parse_args() if os.getuid() != 0: raise Exception("You must be a root user.") destination = args.destination if destination.strip() == "": destination = "." if os.path.abspath(destination) in ["/", "//"]: raise Exception("Setting system root directory as destination is totally insane.") context = collect.Context(args.source, destination, dict(map(lambda x: collect.parse_var(x), args.var))) collect.run(args.lstfile, context)