def get_subs(): for channel in settings["channels"]: params = [ ("key", settings["api_key"]), ("channelId", channel), ("part", "snippet,id"), ("order", "date"), ] if "last_check" in settings: params.append(("publishedAfter", settings["last_check"])) params.append(("maxResults", 50)) elif "video_count" in settings: params.append(("maxResults", settings["video_count"])) else: log( "Not sure how this happened, but I don't know what videos to download.", "fail") exit(1) res = requests.get("https://www.googleapis.com/youtube/v3/search", params=params) if not res.ok: log(res.text, "fail") exit(1) res = res.json() for video in res["items"]: get_video(video) settings["last_check"] = str(datetime.datetime.now().isoformat('T')) + "Z" settings.pop("video_count", None)
def get_video(video_info): save_path = os.path.join(settings["store_path"], video_info["snippet"]["channelTitle"]) mkdir(save_path) upload = YouTube("https://www.youtube.com/watch?v={0}".format( video_info["id"]["videoId"])) video = None res = "" i = 0 while not video: if not res: resolution = video_resolutions[str(settings["max_quality"] - i)] if upload.filter(resolution=resolution): res = resolution else: i += 1 if i > settings["max_quality"]: log("No upload candidate matches quality criteria.", "warn") return else: if len(upload.filter(resolution=res)) > 1: if upload.filter("mp4", resolution=res): video = upload.get('mp4', res) else: video = upload.get(None, res) video.download(save_path) get_audio( video_info, os.path.join(save_path, (video.filename + "." + video.extension)))
def mkdir(directory): """ Checks if a directory exists, makes it if it doesn't. Returns true or false. """ if not os.path.isdir(directory): try: os.makedirs(directory) log("Created new directory: {0}".format(directory), "success") return True except OSError as exception: log("Tried to make a directory, but couldn't.", "fail") if config["debug"]: raise return False else: return True
def sha256(filename): """ Calculates the SHA256 hash of a file """ sha2h = hashlib.sha256() try: with open(filename, "rb") as f: [ sha2h.update(chunk) for chunk in iter(functools.partial(f.read, 256), b"") ] except: log("Failed to calculate SHA256 for file: {0}".format(filename), "fail") if config["debug"]: raise return False return sha2h.hexdigest()
def update_feed(video_info, audio_path): log("RSS feed not yet implemented. Sorry.", "fail")
#!/usr/bin/python3 from pytube import YouTube from HandyLib import log, config from HandyLib.file import mkdir, get_file_extension import os, datetime, requests, json, yaml if os.path.isfile("settings.yml"): settings = yaml.safe_load(open("settings.yml")) else: settings = {} # Check settings if not "last_check" in settings: log("Welcome to PodTube!", "header", force=True) log("Looks like this is the first time we've run.", force=True) log("In the future, I'll just check for uploads published since now. Just for this time, though...", force=True) settings["video_count"] = input( "How many uploads should we fetch from each channel? ") if not "api_key" in settings: settings["api_key"] = input("Please give me an API key to use: ") if not "channels" in settings: log("What channels do you want to watch for new uploads?", "end", force=True) # "end" log_type applies no styles settings["channels"] = [] while True: channel = input() if not channel: break
def extract_file(filepath, filename, flat=False, loop=True): """ Provides file extraction capabilities for common compression and archive formats and returns a list of the files output. `flat` determines whether directory structure of archives is preserved. `loop` can be used to run the function a second time, for example with .tar.gz files. """ import zipfile, gzip, tarfile, shutil out_files = [] # Just in case we get passed something stupid... filename = str(filename) filepath = str(filepath) f_base, f_ext = os.path.splitext(filename) # ZIP archives if f_ext == ".zip": log("Expanding ZIP archive {0}.".format(filename)) try: with zipfile.ZipFile(os.path.join(filepath, filename)) as zip: # testzip() returns None or name of first bad file if zipfile.ZipFile.testzip(zip) is not None: log( "Malformed ZIP or contents corrupted! Unable to process.", "fail") return False if flat: # Not using extractall() because we don't want a tree structure for member in zip.infolist(): member = unique_filename(filepath, member) if flat: zip.extract(member, filepath) else: zip.extract(member) out_files.append(str(member)) else: zip.extractall(filepath) # Delete the zip file now that we have its contents os.remove(os.path.join(filepath, filename)) except: log( "Unable to expand ZIP archive {0}. You should check its headers or something." .format(filename), "fail") if config["debug"]: raise return False # GZIP compression elif f_ext == ".gz": log("Expanding GZIP compressed file {0}.".format(filename)) try: out_fname = unique_filename(filepath, f_base) with gzip.open(os.path.join(filepath, filename), "rb") as f_in, open( os.path.join(filepath, out_fname), "wb") as f_out: shutil.copyfileobj(f_in, f_out) out_files.append(out_fname) # Delete the gz file now that we have its contents os.remove(os.path.join(filepath, filename)) except: log( "Unable to expand GZIP file {0}. It's likely malformed.". format(filename), "fail") if config["debug"]: raise return False # TAR archives elif f_ext == ".tar": log("Expanding TAR archive {0}.".format(filename)) try: with tarfile.open(os.path.join(filepath, filename), "r") as tar: if flat: # Not using extractall() because we don't want a tree structure for member in tar.getmembers(): if member.isreg(): if flat: # Strip any path information from members member.name = unique_filename( filepath, os.path.basename(member.name)) tar.extract(member, filepath) out_files.append(member.name) # Delete the tar file now that we have its contents os.remove(os.path.join(filepath, filename)) except: log( "Unable to expand TAR archive {0}. Something is wrong with it." .format(filename), "fail") if config["debug"]: raise return False # The file is not compressed or archived, or not a supported format else: return if not loop: return # Iterate back through, in case of layered archives or compressed archives (e.g. example.tar.gz) for filename in out_files: # Set loop switch to False to avoid creating blackhole extract_file(filepath, filename, loop=False)