def main(): parser = argparse.ArgumentParser(description='Search video in channel') parser.add_argument('--id', type=str) parser.add_argument('--max', '-m', type=int, default=10) parser.add_argument('--cleanup', action='store_true') args = parser.parse_args() config = config_factory.load() master = master_site(config, merge_small_groups=False) if args.cleanup: cleanup(master) return if args.id is None: skip_lang = ['fr'] langs = config.site_config.languages for lang in langs: if lang in skip_lang: print("Skip fetching %s" % lang) continue print("==== Fetching %s ====" % lang) master = fetch_all(config, master, lang, max_result=args.max) cleanup(master) else: fetch_single(config, master, args.id)
def main(keyword, debug, search_tags=False): config = config_factory.load(False) master = master_site(config) video_data = master.global_site.video_data print("\nLocal search: %s" % keyword) for g in reversed(master.global_site.groups_by_week): #print("# %s" % g.title) search(g.ids, video_data, keyword, debug, search_tags)
def test_regroup(): config = config_factory.load(False) master = data_loader.master_site(config, merge_small_groups=False) import cleanup site = master.lang_sites['ru'] print(len(site.groups)) print("---") new_groups = cleanup.regroup_by_channel(site.groups, site.video_data) new_groups = sorted(new_groups, key=lambda group: group.title.lower()) print(len(new_groups))
def main_url(): config = config_factory.load(False) master = master_site(config) site = master.global_site groups = site.groups_by_week[:2] for g in groups: idx = 0 chunks = util.chunks(g.ids, 50) for ids in chunks: idx += 1 print('%s - %d' % (g.title, idx)) url = playlist_url_from_ids(ids) print(url) print() print("Steps:") print("https://webapps.stackexchange.com/questions/120451/how-to-create-a-playlist-form-a-list-of-links-not-from-bookmarks")
def main(): config = config_factory.load(False) master = master_site(config) site = master.global_site groups = site.groups_by_time dump_groups_stat(groups, site.video_data) dump_groups_details(groups[1:6], site.video_data) stat = {} stat['langs'] = dump_lang_stat(master) #dump_groups_ids(groups[1:6], site.video_data) print("All videos: %s" % len(site.video_data)) dump_top_videos(site) dump_monthly_stat(site.video_data)
def main(): util.prepare_cache() config = config_factory.load() master = master_site(config, merge_small_groups=False) new_data = parse(open("input.txt").readlines()) for (lang, site) in master.lang_sites.items(): if new_data.get(lang, None): groups = site.groups + [new_data[lang]] lines = [] for group in groups: lines.append("# %s" % group.title) for id in group.ids: lines.append("%s" % id) lines.append("") with open("data/%s/data.txt" % site.lang, "w") as f: f.write('\n'.join(lines)) print("Updated %s" % f.name) cleanup.main()
def main(): config = config_factory.load(False) master = master_site(config) site = master.global_site groups = site.groups_by_time api = PlaylistApi() api.auth() # g = groups[6]: i = 0 for g in site.topics: print("%d Spark AR TV - %s" % (i, g.title)) i += 1 print() #g = site.topics[16] g = groups[1] print(g.title) playlist_id = "PLJ-lx8QFIxZauK6a7vtMs4Ha0vhhNazTG" add_group_to_playlist(api, g, playlist_id)
def main(args): prod = args.prod assets_only = args.assets index_only = args.index_only global_only = args.global_only or index_only channel = args.channel or prod validate = args.validate util.prepare_cache() config = config_factory.load(prod) config.global_only = global_only config.index_only = index_only config.channel = channel if assets_only: util.copy_all_assets(config) elif validate: video_cache = data_loader.load_cache() data_loader.load_global_groups(config, video_cache) else: gen_site(config)
def main(): config = config_factory.load(False) master = master_site(config) site = master.global_site groups = site.groups_by_week api = PlaylistApi() api.auth() # g = groups[6]: i = 0 for g in site.topics: print("%d Spark AR TV - %s" % (i, g.title)) i += 1 print() #g = site.topics[16] g = groups[1] print(g.title) playlist_id = "PLJ-lx8QFIxZYuTqACE5t5V_iY2rase3Bn" add_group_to_playlist(api, g, playlist_id)
def main(keyword, debug): config = config_factory.load(False) master = master_site(config) video_data = master.global_site.video_data print("\nLocal search: %s" % keyword) has_result = False for id,video in video_data.items(): match = False if keyword in video.title.lower(): match = True if debug: print("- title: %s" % video.title) for tag in video.tags: if keyword in tag.lower(): match = True if debug: print("- tag: %s" % tag) if match: print("%s // %s" % (video.id, video.title)) has_result = True if not has_result: print("No match")
def main(): config = config_factory.load() master = master_site(config, merge_small_groups=False) cleanup(master)
def main(): config = config_factory.load() master = master_site(config, merge_small_groups = False) site = master.global_site ids = [id for g in site.groups_by_day[:10] for id in g.ids] print('\n'.join(ids))
print("thumbnail.py {week-yyyy-mm-dd}") return slug = sys.argv[1] generate_custom_week_thumbnails(site, ids, slug) #generate_channel_thumbnails(site) def main_ping(): master = master_site(config) site = master.global_site ping_all(site.video_data) def main_day(): master = master_site(config) site = master.global_site groups = site.groups_by_day[0:2] for g in groups: generate_day_thumbnail(site, g) if __name__ == "__main__": config = config_factory.load() #main_ping() #main_topics() main() #main_day()
import config_factory import site_config from lib import data_loader from lib import util def test_extract_youtube_id(): tests = [ "IDI6xi9z3Zk", "/watch?v=IDI6xi9z3Zk", "/watch?v=IDI6xi9z3Zk //comment", "/watch?v=IDI6xi9z3Zk&t=1", "/watch?v=IDI6xi9z3Zk&t=1 //comment", ] for t in tests: print(util.extract_youtube_id(t)) if __name__ == "__main__": test_extract_youtube_id() config = config_factory.load(False) master = data_loader.master_site(config)
def main(): util.prepare_cache() config = config_factory.load() master = master_site(config, merge_small_groups=False) cleanup(master)