def run(name): dy = Dygod() movie_pages = dy.search(name) choice_list = [] _cis = [x['name'] for x in movie_pages] choice_list.append(_cis) while choice_list: c = helper.num_choice(choice_list[-1]) if str(c) in 'bB' and len(choice_list) == 1: continue page_url = movie_pages[c].get('url') movies = dy.page_links(name, page_url) if not movies: log.warn('\nnothing got from ({})\n'.format(page_url)) continue choice_list.append([x['name'] for x in movies]) c = helper.num_choice(choice_list[-1], depth=2) choice_list.pop() if str(c) in 'bB': continue link = movies[c].get('url') log.debug( '\n{}\ndownload link url has been copied to [clipboard]!\n'.format( link)) helper.copy_to_clipboard(link) return
def run(update, name, all_libs, count, gitstore, search_both_git_and_jobb): jobb = Jobb() if gitstore: jobb = Md() # jobb.update_cache() if update: jobb.update_cache() os._exit(-1) jobb.load_cache() if count: log.debug(jobb.soft_count) os._exit(-1) if name: v = jobb.search_by_name(name) log.debug('JOBBOLE {}'.format(v if v else 'Nothing found!!!')) if search_both_git_and_jobb: jobb = Md() v = jobb.search_by_name(name) log.debug('GITHUB {}'.format(v if v else 'Nothing found!!!')) os._exit(-1) if all_libs: soft_jobb = jobb.soft c1 = helper.num_choice([s.get('name') for s in soft_jobb]) cho = soft_jobb[c1] i = helper.num_choice([c.get('name') for c in cho['sub_res']]) log.debug('[{}]({})'.format(cho['sub_res'][i]['name'], cho['sub_res'][i]['url']))
def run(catalog_index, login, lesson, down, show): if login: da = Da() da.login() abc.force_quit() if not catalog_index or catalog_index not in catalog.keys(): log.error('catalog index needed: [{}]'.format(','.join( catalog.keys()))) abc.force_quit() da = Da(catalog_index=catalog_index) da.sess.cookies = da.load_cookies() da.load_chapters() if show: helper.num_choice([ '{}({})'.format(x['name'], ','.join(x.keys())) for x in da.da2017 ]) if lesson: chaps = da.fetch_chapters() da.gen_lesson_url(chaps) if down: da.download()
def get_by_name(self, name=''): raw = self.bs4get(My.get('search').format(name), to=10) if not raw: sys.exit('cannot got {}'.format(name)) art_docs = raw.find('div', id='content') candidates = self.get_articles(art_docs) if not candidates: sys.exit('cannot find {}'.format(name)) i = helper.num_choice([ '{} ({})'.format(c.get('txt'), c.get('netdisk')) for c in candidates]) _soft_info = candidates[i] log.debug('Your Choice is: [{}]'.format(_soft_info['txt'])) soft = self.get_file_url(_soft_info) if not soft: log.error('cannot get file: ({})'.format(_soft_info['txt'])) return else: log.debug('Try: [{}]({})'.format(_soft_info['txt'], soft[1])) if soft[0] == 1: selen.ctfile_by_chrome(soft[1]) elif soft[0] == 3: selen.baidu_pan_by_chrome(soft[1], soft[2]) else: log.warn('Not Support')
def run(name, simple): fri = Fri() pcs = fri.main_page() if name: pcs = [x for x in pcs if x['name'].lower().find(name.lower()) != -1] c = 'b' while c in 'b,B'.split(','): c = helper.num_choice([x['name'] for x in pcs]) if c in range(len(pcs)): pc = pcs[c] arm_info = fri.pc_detail(pc['url'], simple) if not arm_info: arm_info = ['未找到,请查看web: {}'.format(pc['url'])] hd_info, gpio_table = arm_info if not simple: print(gpio_table) helper.pause_choice('{}\n{}\n{}'.format('*' * 64, '\n'.join(hd_info), '*' * 64), fg_color='cyan') # abc.color_print('_' * 32) c = 'b'
def do_user_handler(self, dat_in): def gen_str(dat): name_ = dat.get('screen_name') name_ = '{0:{w}}'.format( name_, w=16 - base.cn_len(name_), ) st_info = '微博: %-10s 关注: %-10s 粉丝: %-10s' % ( str(dat.get('statuses_count')), str(dat.get('follow_count')), str(dat.get('followers_count')), ) txt = '{}[{}]\t{:{}}\t'.format(name_, gender(dat.get('gender', '')), st_info, 48 - base.cn_len(st_info)) txt += '级别: %-4s\t 关注我: %s\t已关注: %s' % ( dat.get('urank') if dat.get('urank') else '', yorn(dat.get('follow_me')), yorn(dat.get('following')), # yorn(dat.get('like_me')), \t喜欢我:{}\t已喜欢:{} # yorn(dat.get('like')), ) txt += '' return txt if not dat_in: log.warn('1. 账号无相关信息!') return users_ = [gen_str(x) for x in dat_in] if not users_: log.warn('2. 账号无相关信息!') return while True: c = helper.num_choice( users_, default=1, # valid_keys='p,P,n,N,go', valid_keys='all', ) if not c: return c if str(c) in 'bB': return str(c) if str(c) in 'nNpP': return str(c) if str(c).startswith('go'): print(str(c)) return c c = int(c) - 1 self.selected_user = dat_in[c] self.add_menu_to_choices('user', 1)
def wbui(self, orig_dat, cb=None, **kwargs): def echo(val=''): with textui.indent(indent=2, quote=' '): pprint(val) cb = cb if cb else echo choice = [x.get('_txt') for x in orig_dat] while True: c = helper.num_choice(choice, **kwargs) if str(c) in 'bB': return c cb(orig_dat[c])
def search_by_name(self, name): if not self.soft_flatten: self.load_cache() name = name.lower() flat = self.soft_flatten cand_soft = [x for x in flat if x.get('name').lower().find(name) != -1] if len(cand_soft) == 1: return cand_soft if not len(cand_soft): return None cho = helper.num_choice(cand_soft) return cand_soft[cho]
def click_fn_update_photos(self, user_details, init): c = 'b' albums = self.load_albums_photos(user_details) while c in ['b', 'B']: c = helper.num_choice([ '{:<32}{}({})'.format(x['album_id'], x['caption'], x['count']['photos']) for x in albums ]) if c in ['b', 'B']: continue self.update_photos(albums[c], init_photos=init) break
def get_by_name(self, name='', page_num=1): log.debug('get {} with page {}'.format(name, page_num)) _ts = time.time() raw = self.bs4get(M.search.format(page_num, name)) _te = time.time() if _te - _ts > 5: log.warning('Take {}s to fetch!'.format(_te - _ts)) if not raw: sys.exit('cannot got {}'.format(name)) art_docs = raw.find('div', id='content') has_next_page = raw.find_all('a', class_='nextpostslink') candidates, total = self.get_articles(art_docs) if not candidates: log.warning('valid/total {}/{}, try without `-d`'.format( name, total)) return softs = [ '{} ({})'.format(c.get('txt'), c.get('netdisk')) for c in candidates ] while True: c = helper.num_choice(softs, default=1, valid_keys='n,N,p,P') if not c: return c if str(c) in 'bB': return if str(c) in 'nN': if has_next_page: page_num += 1 return self.get_by_name(name, page_num) else: log.warning( 'page {} is last page, check previous page with <pP> instead' .format(page_num)) continue if str(c) in 'pP': page_num -= 1 return self.get_by_name(name, page_num) if c: c = int(c) - 1 _soft_info = candidates[c] log.debug('Your Choice is: [{}]'.format(_soft_info['txt'])) self.do_download(_soft_info)
def show_user_followed(self, user_info): select_user = {} c = '' while True: # 如果是 b/B, 则使用上次历史数据 if c in ['n', 'N'] or not self.cached_users_followed: self.cached_users_followed_index += 1 self.cached_users_followed = self._show_user_followed( user_info, self.cached_users_followed_index) if c in ['p', 'P'] or not self.cached_users_followed: if self.cached_users_followed_index <= 1: log.warn('already the first page.') else: self.cached_users_followed_index -= 1 self.cached_users_followed = self._show_user_followed( user_info, self.cached_users_followed_index) c = self.choose_user(self.cached_users_followed) if c in ['b', 'B']: return 'b' if isinstance(c, dict): select_user = c elif isinstance(c, str): continue actions = [{ 'action': 'show_user_details', 'txt': '查看详情', }, { 'action': 'follow', 'txt': '关注', }, { 'action': 'unfollow', 'txt': '取消关注', }] while True: c = helper.num_choice([x.get('txt') for x in actions], valid_keys='b,B') if c in range(len(actions)): print('{} > {}'.format(actions[c].get('txt'), select_user.get('name', ''))) getattr(self, actions[c].get('action'))(select_user) continue return 'b'
def check_local(scan_mode, client, name): """judge if found from local, only press ``s`` will go next""" if scan_mode: return False client.scan_all_songs() cache = client.is_file_exist(name) if cache: CP.G('Found from local, still want search and download???') CP.R('press s to skip local, and re-download, or other key to exit') c = helper.num_choice( cache, default='q', valid_keys='s', extra_hints='s-skip', ) return c != 's'
def choose_user(self, users_in, auto_select=False): def __show(user): sex_ = '' if user.get('sex'): sex_ = ' ♂ ' if user.get('sex') == 'm' else ' ♀ ' _display = '{:<16}'.format(user['uid'] + sex_) _display += '{}'.format(user['name']) # for x in ['approve', 'person_num']: if user.get('approve'): _display += '「{}」'.format(user.get('approve')) if user.get('person_num'): _display += '<{}>'.format(user.get('person_num')) if user.get('followed_from'): _display += '『{}』'.format(user.get('followed_from')) return _display if not auto_select: c = helper.num_choice( [__show(x) for x in users_in], valid_keys='n,N,p,P', separator=['*' * 64], ) else: c = 0 if c in range(len(users_in)): user_details = users_in[c] plain_info = '' keys = list(user_details.keys()) for k in sorted(keys): plain_info += '{}\n'.format(user_details[k]) self.cat_net_img(user_details.get('person_pic')) y = helper.yn_choice(plain_info, separator=['*' * 64]) if y: return user_details else: return 'b' else: return c
def run(name): crx = Crx() pages = crx.search(name) choice_list = [] # if True: # choices = [ # '({})[{}]'.format(x['name'], x['desc']) # for x in pages # ] # else: _ = ['{}'.format(x['name']) for x in pages] choice_list.append(_) while choice_list: c = helper.num_choice(choice_list[-1]) if str(c) in 'bB' and len(choice_list) == 1: continue choice_list.pop() _url = 'http://chromecj.com/Handler/Download/{}'.format( pages[c]['cid']) rs = os.popen('wget {} -O /tmp/{}_{}.zip'.format( _url, pages[c]['name'], pages[c]['cid'])) print('download {}'.format(rs))
def click_fn_user_actions(self, user_details): if not user_details: return actions = [ 'show_user_info', 'show_user_followed', 'show_user_fans', 'update_user_info', 'update_albums', 'load_albums_photos', 'follow', 'unfollow', 'unfollow_list', ] c = 'b' running = [x for x in range(len(actions))] running += ['b', 'B'] while c in running: c = helper.num_choice(actions, default='2') if c in range(len(actions)): if actions[c] == 'load_albums_photos': break getattr(self, actions[c])(user_details)
def feed_large_handler(self, act): dat = self.selected_feed def show_retweeted(dat): pics = [x['url'] for x in dat.get('pics', [])] if not pics and dat.get('page_info'): pics = dat['page_info'].get('page_pic', {}).get('url') if pics and not isinstance(pics, list): pics = [pics] if pics: return pics retweeted = dat.get('retweeted_status') if retweeted: return show_retweeted(retweeted) pics_all = show_retweeted(dat) if not pics_all: return if len(pics_all) == 1: self.cat_net_img(pics_all[0], large=True, height=base.get_height()) return pics_sn = ['图片({})'.format(x + 1) for x in range(len(pics_all))] while True: c = helper.num_choice( pics_sn, default=1, ) if str(c) in 'bB': return str(c) c = int(c) - 1 self.cat_net_img(pics_all[c], large=True, height=base.get_height())
def run( name, site, multiple, no_cache, log_level, scan_mode, timeout, override, auto_mode, failure_songs, clear_failure_songs, from_163_logs, ): """ a lovely script use sonimei search qq/netease songs """ # if scan_mode, will be all songs local # else will be the name passed in scanned_songs = [] timeout = timeout or cfg.get('snm.timeout') force_netease = False if clear_failure_songs: FailureHandle().dump_failure_songs([], action='clear') os._exit(0) if auto_mode: force_netease, scanned_songs = check_player(auto_mode) if failure_songs: scanned_songs = check_failure(failure_songs) if from_163_logs: force_netease = from_163_logs if force_netease: _client = NeteaseDL(not no_cache, log_level=log_level * 10, timeout=timeout, override=override) else: _client = Sonimei(site, not no_cache, log_level=log_level * 10, timeout=timeout, override=override) if scan_mode: _client.store.scan_all_songs() scanned_songs = _client.store.all_songs if name: scanned_songs = [x for x in name.split('#') if x] if not scanned_songs: error_hint('{0}>>> use -h for details <<<{0}'.format('' * 16), quit_out=None) for i, name in enumerate(scanned_songs): songs_store = {} page = 1 is_searched_from_site = False CP.F((PRETTY.symbols['right'] + ' ') * 2, 'processing/total: {}/{}'.format(i + 1, len(scanned_songs))) while True: if not is_searched_from_site and check_local(scan_mode, _client.store, name): CP.G(PRETTY.symbols['end'], 'quit') break status, song_pth = _client.store.is_file_id3_ok(name) if status: CP.G(PRETTY.symbols['music'], '[{}] is found and updated'.format(song_pth)) if not override: error_hint('>>> you can do force download with -o <<<', empty_line=False, bg='black', fg='yellow') break songs = songs_store.get(page) if not songs: zlog.info('from sonimei({}) try: {}/{}'.format(helper.G.format(site), name, page)) songs = _client.search_it(name, page=page) if not isinstance(songs, list): songs = [songs] songs_store[page] = songs is_searched_from_site = True song_info = [x['author'] + '-' + x['title'] for x in songs] c = helper.num_choice( song_info, valid_keys='p,n,s', depth=page, extra_hints='n-next,p-pre,s-skip', clear_previous=True, ) if isinstance(c, str): if c in 'qQ': return if c in 'bp': if page > 1: page -= 1 continue if c == 'n': page += 1 continue if c == 's': # skip current song break _client.save_song(songs[c]) # multiple mode is only worked in none scanned mode if not multiple: break
def do_feed_handler(self, dat_in): def gen_str(dat): txt = [ '{}{} (转发:{}, 评论:{}, 点赞:{}) 来自<{}>'.format( '👍 ' if dat.get('liked') else '', dat.get('created_at'), dat.get('reposts_count'), dat.get('comments_count'), dat.get('attitudes_count'), dat.get('source'), ) ] if len(dat.get('pics', [])): txt.append('[{}图]'.format(len(dat.get('pics')))) if dat.get('page_info', {}) and dat['page_info'].get('type') == 'video': txt.append('[1个视频]') txt = ', '.join(txt) __txt = base.bs4txt(dat.get('text')) txt += '\n{} ...\n'.format(__txt[:40]) if dat.get('retweeted_status'): # txt += '{} 转发自 {}\n'.format('-' * 32, '-' * 32) t = '{} 转发自 {}\n'.format('⇢ ' * 16, '⇠ ' * 16) t += gen_str(dat.get('retweeted_status')) # t = textui.colored.blue(t) t = textui.colored.yellow(t) txt += t return txt if not dat_in: log.warn('账号无微博!!!') return feeds = dat_in feeds = [gen_str(x) for x in feeds] if not feeds: log.warn('账号无微博!!!') return while True: c = helper.num_choice( feeds, default=1, valid_keys='all', ) if not c: return c if str(c) in 'bB': return str(c) if str(c) in 'nNpP': return str(c) if c and str(c).startswith('go'): print(str(c)) return c c = int(c) - 1 self.selected_feed = dat_in[c] self.add_menu_to_choices('feed', 1)