def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("--stepno", type=int, required=True) parser.add_argument("--nsteps", type=int, required=True) parser.add_argument("--file", type=str, required=True) parser.add_argument("remainder", nargs=argparse.REMAINDER) args = parser.parse_args() bar = Bar(args.file, color='cyan', max=args.nsteps, index=args.stepno) bar.update() sys.stdout.write("\r")
class Getter: def get(self, url, to): self.p = None def update(blocks, bs, size): if not self.p: self.p = Bar(to, max=size) else: if size < 0: self.p.update() else: self.p.goto(blocks * bs) urllib.request.urlretrieve(url, DIR + '/pretrained.tar.gz', update) self.p.finish()
def create_epg(config): now = pytz.utc.localize(datetime.datetime.utcnow()) timespan_global = int(config.find("timespan_index").text) timespan_full_global = int(config.find("timespan_full").text) caching_global = config.find("caching").text if caching_global in ("on", "yes", "true", "True"): caching_global = True else: caching_global = False try: timespan_force_global = int(config.find("timespan_force").text) except TypeError: timespan_force_global = -1 try: with open("cached_epg.pkl", "rb") as fp: cache = pickle.load(fp) except IOError: cache = {} cache_new = {} # root element of epg tv = ET.Element("tv") tv.set("generator-info-name", "simplEPG v0.1") tv.set("generator-info-url", "https://github.com/eminga/simplEPG") c_pos = 0 successful = set() for channel in config.findall("channel"): try: site = importlib.import_module('sites.' + channel.get("site")) except ModuleNotFoundError: print("Error: could not find module sites." + channel.get("site")) continue channelid = channel.get("xmltv_id") print(channel.get("site") + ":" + channelid) if channelid in successful: print("channel already added, skipping...") continue try: timespan = int(channel.get("timespan_index")) except TypeError: timespan = timespan_global try: timespan_full = int(channel.get("timespan_full")) except TypeError: timespan_full = timespan_full_global try: caching = channel.get("caching") if caching is not None: if caching in ("on", "yes", "true", "True"): caching = True else: caching = False else: caching = caching_global except TypeError: caching = caching_global try: timespan_force = int(channel.get("timespan_force")) except TypeError: timespan_force = timespan_force_global if timespan_force == -1: timespan_force = -10000 try: shows = site.grab(channel.get("site_id"), timespan) except (KeyboardInterrupt, SystemExit): raise except: shows = [] print("An error occured:") print(sys.exc_info()) if len(shows) > 0: successful.add(channelid) c = ET.Element("channel", id = channel.get("xmltv_id")) ET.SubElement(c, "display-name").text = channel.text tv.insert(c_pos, c) c_pos += 1 # create progress bar if module is available try: from progress.bar import Bar except ImportError: class Bar: def __init__(self, label, max): self.max = max self.index = 0 def next(self): self.index += 1 def update(self): pass def finish(self): if self.index > 0: print("%s shows added." % self.index) bar = Bar("Processing", max=len(shows)) for i in range(len(shows)): if isinstance(shows[i], type(ET.Element(None))): starttime = parse_xmltv_date(shows[i].get("start")) stoptime = shows[i].get("stop") if type(stoptime) != None: stoptime = parse_xmltv_date(stoptime) shows[i] = {"xml": shows[i], "start": starttime, "stop": stoptime} else: shows[i] = {"xml": shows[i], "start": starttime} shows.sort(key = lambda r: r["start"]) for i in range(len(shows)): show = shows[i] if "stop" in show: stoptime = show["stop"] elif i < len(shows) - 1: stoptime = shows[i + 1]["start"] else: break # don't store shows that are already finished if stoptime < now: bar.max -= 1 bar.max = max(bar.max, 1) continue starttime = show["start"] # don't store shows that start more than "timespan" hours in the future if (starttime - now).total_seconds() / 3600 > timespan: break if "xml" in show: show = show["xml"] show.set("channel", channelid) tv.append(show) else: url = show.pop("details-url", None) if url is not None and len(url) > 0: if timespan_full > -1 and (starttime - now).total_seconds() / 3600 <= timespan_full: if caching and (starttime - now).total_seconds() / 3600 > timespan_force: force = False try: try: details = cache_new[url] except KeyError: details = cache[url] show.update(details) cache_new[url] = details except KeyError: force = True else: force = True if force: try: details = site.grabdetails(url) show.update(details) if caching: # don't store times in cache details.pop("start", None) details.pop("stop", None) cache_new[url] = details except (AttributeError, TypeError): pass programme = ET.SubElement(tv, "programme", start=starttime.strftime("%Y%m%d%H%M%S %z"), stop=stoptime.strftime("%Y%m%d%H%M%S %z"), channel=channelid) process_show(programme, show) bar.next() if bar.index > 0: bar.max = bar.index else: print("0 shows found.") bar.update() bar.finish() else: print("0 shows found.") if len(cache_new) > 0: with open("cached_epg.pkl", "wb") as fp: cache = pickle.dump(cache_new, fp) return tv
def pretty(stream): ''' Read from fifo pipe and output a formatted stream to stdout. ''' progress = None started_tasks = 1 error_messages = {} last_event = None try: while True: for line in iter(stream.readline, ""): event = json.loads(line) if event['tag'] == 'playbook_start': print banner(event['title']) print bcolors.WARNING + "Contains: " + str(json.loads(event['text'])['plays']) + " Play(s)." + bcolors.ENDC last_event = event['tag'] elif event['tag'] == 'play_start': if progress: progress.next() started_tasks = 1 num_tasks = json.loads(event['text'])['tasks'] if last_event != 'playbook_start': print "\n" + banner(event['title']) else: print banner(event['title']) print bcolors.WARNING + "Contains: " + str(json.loads(event['text'])['tasks']) + " Task(s)." + bcolors.ENDC + "\n" print "TASK(s):" progress = Bar("Processing...", max=num_tasks, suffix=SUFFIX) progress.update() last_event = event['tag'] elif event['tag'] == 'task_start': if progress: progress.message = event['title'] progress.update() if started_tasks > 1: progress.next() started_tasks = started_tasks + 1 last_event = event['tag'] elif event['tag'] == 'playbook_complete': if progress: progress.next() progress.finish() print banner(event['title']) print banner("RUN Statistics:") print output_statistics(event['text']) if len(error_messages) > 0: print banner("RUN Errors:") print output_errors(error_messages) last_event = event['tag'] elif event['tag'] == 'unreachable': error_messages[event['host']] = event['text'] last_event = event['tag'] except KeyboardInterrupt: stream.flush() stream.close()
class Fetcher: def check_hash(self, filename, dhash='md5'): '''Compute file hash ''' # BUF_SIZE is totally arbitrary, change for your app! BUF_SIZE = 65536 # lets read stuff in 64kb chunks! if dhash == 'md5': fhash = hashlib.md5() elif dhash == 'sha1': fhash = hashlib.sha1() else: return -1 with open(filename, 'rb') as f: while True: data = f.read(BUF_SIZE) if not data: break fhash.update(data) return fhash.hexdigest() def get(self, url, fname=False): '''Download file from url using requests library ''' r = requests.get(url, stream=True, verify=False) size = r.headers['content-length'] if not fname: fname = url.split('/')[-1] if size: p = Bar(fname, max=int(size)) else: p = Spinner(fname) with open(fname, 'wb') as f: for chunk in r.iter_content(chunk_size=1024 * 50): if chunk: # filter out keep-alive new chunks p.next(len(chunk)) f.write(chunk) p.finish() return fname def get_urllib(self, url, to): '''Download file from url using urllib (works for ftp urls) ''' self.p = None def update(blocks, bs, size): if not self.p: if size < 0: self.p = Spinner(to) else: self.p = Bar(to, max=size) else: if size < 0: self.p.update() else: self.p.goto(blocks * bs) try: urlretrieve(url, to, update) except SSLCertVerificationError: ssl._create_default_https_context = ssl._create_unverified_context urlretrieve(url, to, update) self.p.finish()
def pretty(stream): ''' Read from fifo pipe and output a formatted stream to stdout. ''' progress = None started_tasks = 1 error_messages = {} last_event = None try: while True: for line in iter(stream.readline, ""): event = json.loads(line) if event['tag'] == 'playbook_start': print banner(event['title']) print bcolors.WARNING + "Contains: " + str( json.loads(event['text']) ['plays']) + " Play(s)." + bcolors.ENDC last_event = event['tag'] elif event['tag'] == 'play_start': if progress: progress.next() started_tasks = 1 num_tasks = json.loads(event['text'])['tasks'] if last_event != 'playbook_start': print "\n" + banner(event['title']) else: print banner(event['title']) print bcolors.WARNING + "Contains: " + str( json.loads(event['text']) ['tasks']) + " Task(s)." + bcolors.ENDC + "\n" print "TASK(s):" progress = Bar("Processing...", max=num_tasks, suffix=SUFFIX) progress.update() last_event = event['tag'] elif event['tag'] == 'task_start': if progress: progress.message = event['title'] progress.update() if started_tasks > 1: progress.next() started_tasks = started_tasks + 1 last_event = event['tag'] elif event['tag'] == 'playbook_complete': if progress: progress.next() progress.finish() print banner(event['title']) print banner("RUN Statistics:") print output_statistics(event['text']) if len(error_messages) > 0: print banner("RUN Errors:") print output_errors(error_messages) last_event = event['tag'] elif event['tag'] == 'unreachable': error_messages[event['host']] = event['text'] last_event = event['tag'] except KeyboardInterrupt: stream.flush() stream.close()