def long_info(self): """ longer info of this tmux object""" info = [] backup_fmt = u' Backup─┬─[%s] (%d sessions):' sess_fmt = u'─Session─┬─[%s] (%d windows):' win_fmt = u'─Window─┬─(%d) [%s] (%d panes):' pane_fmt = u'─' + log.hl('Pane','green') +' (%d) %s' info.append("%72s" % ('Backup was created on ' + self.create_time)) info.append(backup_fmt %(log.hl(self.tid,'bold'), len(self.sessions))) last_s = self.sessions[-1] for s in self.sessions: is_last_s = s.name == last_s.name s_info = sess_fmt % ( log.hl(s.name,'cyan'), len(s.windows)) info.append( tree_struc(s_info, [is_last_s],lvl=1)) last_w = s.windows[-1] for w in s.windows: is_last_w = w.win_id == last_w.win_id w_info = win_fmt % (w.win_id, log.hl(w.name,'blue'), len(w.panes)) info.append(tree_struc(w_info, [is_last_s,is_last_w],lvl=2)) last_p = w.panes[-1] for p in w.panes: p_info = pane_fmt%( p.pane_id, p.path) is_last_p = last_p.pane_id == p.pane_id info.append(tree_struc(p_info, [is_last_s,is_last_w,is_last_p],lvl=3)) return info
def long_info(self): """ longer info of this tmux object""" info = [] backup_fmt = u' Backup─┬─[%s] (%d sessions):' sess_fmt = u'─Session─┬─[%s] (%d windows):' win_fmt = u'─Window─┬─(%d) [%s] (%d panes):' pane_fmt = u'─' + log.hl('Pane','bold') +' (%d) %s' info.append("%72s" % ('Backup was created on ' + self.create_time)) info.append(backup_fmt %(log.hl(self.tid,'bold'), len(self.sessions))) last_s = self.sessions[-1] for s in self.sessions: is_last_s = s.name == last_s.name s_info = sess_fmt % ( log.hl(s.name,'bold'), len(s.windows)) info.append( tree_struc(s_info, [is_last_s],lvl=1)) last_w = s.windows[-1] for w in s.windows: is_last_w = w.win_id == last_w.win_id w_info = win_fmt % (w.win_id, log.hl(w.name,'bold'), len(w.panes)) info.append(tree_struc(w_info, [is_last_s,is_last_w],lvl=2)) last_p = w.panes[-1] for p in w.panes: p_info = pane_fmt%( p.pane_id, p.path) is_last_p = last_p.pane_id == p.pane_id info.append(tree_struc(p_info, [is_last_s,is_last_w,is_last_p],lvl=3)) return info
def start_download(songs, skipped_hists): """ start multi-threading downloading songs. and generate a summary file songs: the list of songs need to be downloaded call the finish_hook function, pass skipped_hist """ global total total = len(songs) LOG.debug('init thread pool (%d) for downloading'% config.THREAD_POOL_SIZE) pool = ThreadPool(config.THREAD_POOL_SIZE) downloader = Downloader(songs, pool) LOG.debug('Start downloading' ) downloader.start() while done < total: time.sleep(1) print_progress() # handling lyrics downloading download_lyrics(songs) print log.hl(msg.fmt_insert_hist, 'warning') hist_handler.insert_hist(songs) print log.hl(msg.fmt_all_finished, 'warning') #call finish hook finish_summary(skipped_hists)
def print_progress(): #the factor of width used for progress bar percent_bar_factor = 0.4 width = util.get_terminal_size()[1] -5 bar_count = (int(width*percent_bar_factor)-2/10) # number of percent bar #line = log.hl(u' %s\n'% ('-'*90), 'cyan') line = log.hl(u' %s\n'% ('+'*width), 'cyan') sys.stdout.write(u'\x1b[2J\x1b[H') #clear screen sys.stdout.write(line) header = u' 保存目录:[%s] | 线程池:[%d] | 总进度:[%d/%d]\n'% (config.DOWNLOAD_DIR, config.THREAD_POOL_SIZE,done,total) header = util.rjust(header, width) sys.stdout.write(log.hl(u' %s'%header,'warning')) sys.stdout.write(line) for filename, percent in progress.items(): bar = util.ljust('=' * int(percent * bar_count), bar_count) per100 = percent * 100 single_p = "%s [%s] %.1f%%\n" % \ (util.rjust(filename,(width - bar_count -10)), bar, per100) # the -10 is for the xx.x% and [ and ] sys.stdout.write(log.hl(single_p,'green')) if len(done2show): sys.stdout.write(line) sys.stdout.write(log.hl(util.rjust(u'最近完成(只显示%d个):\n'% config.SHOW_DONE_NUMBER, width),'warning')) sys.stdout.write(line) #display finished jobs for d in done2show: sys.stdout.write(log.hl(u' √ %s\n'% d,'cyan')) sys.stdout.flush()
def empty_hists(): """empty history table""" conn = __getConnection() dao = HistDao(conn) hists = dao.delete_all() print log.hl(msg.history_cleared, 'cyan') conn.commit() conn.close()
def empty_hists(): """empty history table""" conn = __getConnection() dao = HistDao(conn) hists = dao.delete_all() print log.hl(msg.history_cleared,'cyan') conn.commit() conn.close()
def finish_summary(skipped_hist): """ build the summary after finishing all dl skipped_hist: a History list, contains skipped songs, it is not empty only if incremental_dl is true """ border = "\n" + u">>" * 40 + u"\n" #build summary text: text = [] if skipped_hist: text.append(border + msg.fmt_summary_skip_title + border) text.append(msg.fmt_summary_skip_header) for hist in skipped_hist: text.append( "%s\t%s\t%s\t%s" % (msg.head_xm if hist.source == 1 else msg.head_163, hist.last_dl_time_str(), hist.song_name, hist.location)) if success_list: text.append(border + msg.fmt_summary_success_title + border) text.append(msg.fmt_summary_success_header) for song in success_list: text.append('%s\t%s' % (song.song_name, song.abs_path)) if failed_list: text.append(border + msg.fmt_summary_failed_title + border) text.append(msg.fmt_summary_failed_header) for song in failed_list: text.append('%s\t%s' % (song.song_name, song.abs_path)) #my save log if not os.path.exists('summary'): os.makedirs('summary') summary = path.join('summary', 'summary_' + str(datetime.datetime.today()) + ".txt") with codecs.open(summary, 'w', 'utf-8') as f: f.write("\n".join(text)) print log.hl(msg.summary_saved % summary, 'cyan') while False: sys.stdout.write(msg.summary_prompt) choice = raw_input().lower() if choice == 'q' or choice == '': break elif choice == 'v': pydoc.pager(u"\n".join(text)) break elif choice == 's': summary = path.join( config.DOWNLOAD_DIR, 'summary_' + str(datetime.datetime.today()) + ".txt") with codecs.open(summary, 'w', 'utf-8') as f: f.write("\n".join(text)) print log.hl(msg.summary_saved % summary, 'cyan') break else: sys.stdout.write(msg.summary_prompt_err)
def init_fav(self): """ parse html and load json and init Song object for each found song url""" page = 1 user = '' total = 0 cur = 1 #current processing link LOG.debug(msg.head_xm + msg.fmt_init_fav % self.uid) while True: html = self.handler.read_link(url_fav % (self.uid, page)).text soup = BeautifulSoup(html, 'html.parser') if not user: user = soup.title.string if not total: total = soup.find('span', class_='counts').string links = [ link.get('href') for link in soup.find_all( href=re.compile(r'xiami.com/song/[^?]+')) if link ] if links: for link in links: LOG.debug(msg.head_xm + msg.fmt_parse_song_url % link) if self.verbose: sys.stdout.write( log.hl( '[%d/%s] parsing song ........ ' % (cur, total), 'green')) sys.stdout.flush() try: cur += 1 song = XiamiSong(self.handler, url=link) #time.sleep(2) if self.verbose: sys.stdout.write(log.hl('DONE\n', 'green')) except: sys.stdout.write(log.hl('FAILED\n', 'error')) continue #rewrite filename, make it different song.group_dir = user song.post_set() self.songs.append(song) page += 1 else: break if len(self.songs): #creating the dir util.create_dir(path.dirname(self.songs[-1].abs_path)) LOG.debug(msg.head_xm + msg.fmt_init_fav_ok % self.uid)
def action_delete(tmux): if tmux and isinstance(tmux, tmux_obj.Tmux): confirm = raw_input("retmux> " + log.hl('Delete','red') + " backup "+tmux.tid+"? [yes|no] ") if confirm.lower() == "yes": if util.delete_backup(tmux.tid) == 0: global tmux_dict tmux_dict={} #empty the dict LOG.info('Backup %s was deleted'%tmux.tid)
def download_lyrics(songs): """download / write lyric to file if it is needed""" url_lyric_163 = "http://music.163.com/api/song/lyric?id=%s&lv=1" percent_bar_factor = 0.4 width = util.get_terminal_size()[1] -5 bar_count = (int(width*percent_bar_factor)-2/10) # number of percent bar line = log.hl(u' %s'% ('+'*width), 'cyan') if songs[0].handler.dl_lyric: print log.hl(msg.fmt_dl_lyric_start, 'warning') print line for song in songs: if song.lyric_abs_path: print log.hl(u' %s '% song.lyric_filename,'cyan'), #the ending comma is for hide the newline if song.song_type == 1: #xiami if song.handler.need_proxy_pool: if song.lyric_link: download_by_url(song.lyric_link, song.lyric_abs_path, show_progress=True, proxy={'http':song.handler.proxies.get_proxy()}) else: if song.lyric_link: download_by_url(song.lyric_link, song.lyric_abs_path, show_progress=True) else: #163 lyric_link = url_lyric_163 % song.song_id song.lyric_text = song.handler.read_link(lyric_link).json()['lrc']['lyric'] import codecs with codecs.open(song.lyric_abs_path, 'w', 'utf-8') as f: f.write(song.lyric_text) print log.hl(u' √','cyan') print line
def start_download(songs): global total, progress total = len(songs) LOG.debug('init thread pool (%d) for downloading'% config.THREAD_POOL_SIZE) pool = ThreadPool(config.THREAD_POOL_SIZE) downloader = Downloader(songs, pool) LOG.debug('Start downloading' ) downloader.start() while done < total: time.sleep(1) print_progress() #handling lyrics downloading download_lyrics(songs) print log.hl(msg.fmt_all_finished, 'warning')
def action_delete(tmux): if tmux and isinstance(tmux, tmux_obj.Tmux): confirm = raw_input("retmux> " + log.hl('Delete', 'red') + " backup " + tmux.tid + "? [yes|no] ") if confirm.lower() == "yes": if util.delete_backup(tmux.tid) == 0: global tmux_dict tmux_dict = {} #empty the dict LOG.info('Backup %s was deleted' % tmux.tid)
def finish_summary(skipped_hist): """ build the summary after finishing all dl skipped_hist: a History list, contains skipped songs, it is not empty only if incremental_dl is true """ border= "\n"+u">>"*40 + u"\n" #build summary text: text = [] if skipped_hist: text.append( border+msg.fmt_summary_skip_title +border) text.append( msg.fmt_summary_skip_header) for hist in skipped_hist: text.append( "%s\t%s\t%s\t%s" % (msg.head_xm if hist.source ==1 else msg.head_163, hist.last_dl_time_str(), hist.song_name, hist.location)) if success_list: text.append( border+msg.fmt_summary_success_title +border) text.append( msg.fmt_summary_success_header) for song in success_list: text.append('%s\t%s'%(song.song_name, song.abs_path)) if failed_list: text.append( border+msg.fmt_summary_failed_title +border) text.append( msg.fmt_summary_failed_header) for song in failed_list: text.append('%s\t%s'%(song.song_name, song.abs_path)) while True: sys.stdout.write(msg.summary_prompt) choice = raw_input().lower() if choice == 'q' or choice == '': break elif choice == 'v': pydoc.pager(u"\n".join(text)) break elif choice == 's': summary = path.join(config.DOWNLOAD_DIR,'summary_'+str(datetime.datetime.today())+".txt") with codecs.open(summary, 'w', 'utf-8') as f: f.write("\n".join(text)) print log.hl(msg.summary_saved % summary ,'cyan') break else: sys.stdout.write(msg.summary_prompt_err)
def init_fav(self): """ parse html and load json and init Song object for each found song url""" page = 1 user = '' total = 0 cur = 1 #current processing link LOG.debug(msg.head_xm + msg.fmt_init_fav % self.uid) while True: html = self.handler.read_link(url_fav%(self.uid,page)).text soup = BeautifulSoup(html,'html.parser') if not user: user = soup.title.string if not total: total = soup.find('span', class_='counts').string links = [link.get('href') for link in soup.find_all(href=re.compile(r'xiami.com/song/[^?]+')) if link] if links: for link in links: LOG.debug(msg.head_xm + msg.fmt_parse_song_url % link) if self.verbose: sys.stdout.write(log.hl('[%d/%s] parsing song ........ '%(cur, total), 'green')) sys.stdout.flush() try: cur += 1 song = XiamiSong(self.handler, url=link) #time.sleep(2) if self.verbose: sys.stdout.write(log.hl('DONE\n', 'green')) except: sys.stdout.write(log.hl('FAILED\n', 'error')) continue #rewrite filename, make it different song.group_dir = user song.post_set() self.songs.append(song) page += 1 else: break if len(self.songs): #creating the dir util.create_dir(path.dirname(self.songs[-1].abs_path)) LOG.debug(msg.head_xm + msg.fmt_init_fav_ok % self.uid)
def export_hists(): """ export all history data """ print log.hl(msg.history_exporting ,'cyan') conn = __getConnection() dao = HistDao(conn) hists = dao.get_all_histories() filename = path.join(config.DOWNLOAD_DIR,'zhuaxia_history_'+str(datetime.datetime.today())+".csv") with codecs.open(filename, 'w', 'utf-8') as f: f.write( "id;song_id;song_name;quality;source;location;api_url;download_time;download_times\n") for h in hists: f.write(h.to_csv()) f.write("\n") print log.hl(msg.history_exported % filename ,'cyan') conn.close()
def print_progress(): #the factor of width used for progress bar percent_bar_factor = 0.4 width = util.get_terminal_size()[1] - 5 bar_count = (int(width * percent_bar_factor) - 2 / 10 ) # number of percent bar #line = log.hl(u' %s\n'% ('-'*90), 'cyan') line = log.hl(u' %s\n' % ('+' * width), 'cyan') sep = log.hl(u' %s\n' % ('=' * width), 'cyan') sys.stdout.write(u'\x1b[2J\x1b[H') #clear screen sys.stdout.write(line) header = u' 保存目录:[%s] | 线程池:[%d]\n' % (config.DOWNLOAD_DIR, config.THREAD_POOL_SIZE) #header = util.ljust(header, width) sys.stdout.write(log.hl(u' %s' % header, 'warning')) sys.stdout.write(line) fmt_progress = '%s [%s] %.1f%%\n' all_p = [] #all progress bars, filled by following for loop sum_percent = 0 # total percent for running job total_percent = 0 for filename, percent in progress.items(): sum_percent += percent bar = util.ljust('=' * int(percent * bar_count), bar_count) per100 = percent * 100 single_p = fmt_progress % \ (util.rjust(filename,(width - bar_count -10)), bar, per100) # the -10 is for the xx.x% and [ and ] all_p.append(log.hl(single_p, 'green')) #calculate total progress percent total_percent = float(sum_percent + done) / total #global progress g_text = u'总进度[%d/%d]:' % (done, total) g_bar = util.ljust('#' * int(total_percent * bar_count), bar_count) g_progress = fmt_progress % \ (util.rjust(g_text,(width - bar_count -10)), g_bar, 100*total_percent) # the -10 is for the xx.x% and [ and ] #output all total progress bars sys.stdout.write(log.hl(u'%s' % g_progress, 'red')) sys.stdout.write(sep) #output all downloads' progress bars sys.stdout.write(''.join(all_p)) if len(done2show): sys.stdout.write(line) sys.stdout.write( log.hl(u' 最近%d个完成任务:\n' % config.SHOW_DONE_NUMBER, 'warning')) sys.stdout.write(line) #display finished jobs for d in done2show: sys.stdout.write(log.hl(u' √ %s\n' % d, 'cyan')) sys.stdout.flush()
def print_progress(): #the factor of width used for progress bar percent_bar_factor = 0.4 width = util.get_terminal_size()[1] -5 bar_count = (int(width*percent_bar_factor)-2/10) # number of percent bar #line = log.hl(u' %s\n'% ('-'*90), 'cyan') line = log.hl(u' %s\n'% ('+'*width), 'cyan') sep = log.hl(u' %s\n'% ('='*width), 'cyan') sys.stdout.write(u'\x1b[2J\x1b[H') #clear screen sys.stdout.write(line) header = msg.fmt_dl_header % (config.DOWNLOAD_DIR, config.THREAD_POOL_SIZE) #header = util.ljust(header, width) sys.stdout.write(log.hl(u' %s'%header,'warning')) sys.stdout.write(line) fmt_progress = '%s [%s] %.1f%%\n' all_p = [] #all progress bars, filled by following for loop sum_percent = 0 # total percent for running job total_percent = 0 for filename, percent in progress.items(): sum_percent += percent bar = util.ljust('=' * int(percent * bar_count), bar_count) per100 = percent * 100 single_p = fmt_progress % \ (util.rjust(filename,(width - bar_count -10)), bar, per100) # the -10 is for the xx.x% and [ and ] all_p.append(log.hl(single_p,'green')) #calculate total progress percent total_percent = float(sum_percent+done)/total #global progress g_text = msg.fmt_dl_progress % (done, total) g_bar = util.ljust('#' * int(total_percent* bar_count), bar_count) g_progress = fmt_progress % \ (util.rjust(g_text,(width - bar_count -10)), g_bar, 100*total_percent) # the -10 is for the xx.x% and [ and ] #output all total progress bars sys.stdout.write(log.hl(u'%s'%g_progress, 'red')) sys.stdout.write(sep) #output all downloads' progress bars sys.stdout.write(''.join(all_p)) if len(done2show): sys.stdout.write(line) sys.stdout.write(log.hl(msg.fmt_dl_last_finished % config.SHOW_DONE_NUMBER,'warning')) sys.stdout.write(line) #display finished jobs for d in done2show: sys.stdout.write(log.hl(u' √ %s\n'% d,'cyan')) sys.stdout.write(line) sys.stdout.flush()
def show_and_action(name=None, action=None): """list backups info. if name was given, show detailed info for given backup item. otherwise show short_info for all backups as list if there is action, will do the action after the details was displayed. the action should be a function with single argument of tmux_obj.Tmux instance """ #using restore check function to validate the given name name = tmux_id_4_show(name) if not name: #interactively show details while 1: list_all_sessions() idx = raw_input( "retmux> Please give backup No. (press q to exit):") if not idx: log.print_err("Invalid index: (empty)") elif idx.lower() == 'q': break elif not tmux_dict.has_key(idx): log.print_err("Invalid index: " + idx) else: tmux = tmux_dict[idx] print util.get_line('>') print log.hl('Details of backup:', 'bold') + '%s' % tmux.tid print util.get_line('>') print '\n'.join(tmux.long_info()) print util.get_line('<') if action: action(tmux) else: #till here, the name should be validated, exists print log.hl('Details of backup:', 'bold') + '%s' % name print util.get_line('=') tmux = util.get_tmux_by_id(name) print '\n'.join(tmux.long_info()) if action: action(tmux)
def show_and_action(name=None, action=None): """list backups info. if name was given, show detailed info for given backup item. otherwise show short_info for all backups as list if there is action, will do the action after the details was displayed. the action should be a function with single argument of tmux_obj.Tmux instance """ #using restore check function to validate the given name name = tmux_id_4_show(name) if not name: #interactively show details while 1: list_all_sessions() idx = raw_input("retmux> Please give backup No. (press q to exit):") if not idx: log.print_err("Invalid index: (empty)") elif idx.lower() == 'q': break elif not tmux_dict.has_key(idx): log.print_err("Invalid index: " + idx) else: tmux = tmux_dict[idx] print util.get_line('>') print log.hl('Details of backup:','bold') +'%s'% tmux.tid print util.get_line('>') print '\n'.join(tmux.long_info()) print util.get_line('<') if action: action(tmux) else: #till here, the name should be validated, exists print log.hl('Details of backup:','bold') +'%s'% name print util.get_line('=') tmux = util.get_tmux_by_id(name) print '\n'.join(tmux.long_info()) if action: action(tmux)
def export_hists(): """ export all history data """ print log.hl(msg.history_exporting, 'cyan') conn = __getConnection() dao = HistDao(conn) hists = dao.get_all_histories() filename = path.join( config.DOWNLOAD_DIR, 'zhuaxia_history_' + str(datetime.datetime.today()) + ".csv") with codecs.open(filename, 'w', 'utf-8') as f: f.write( "id;song_id;song_name;quality;source;location;api_url;download_time;download_times\n" ) for h in hists: f.write(h.to_csv()) f.write("\n") print log.hl(msg.history_exported % filename, 'cyan') conn.close()
import config, util, logging, log, downloader import xiami as xm from threadpool import ThreadPool from time import sleep from os import path LOG = log.get_logger("zxLogger") dl_songs = [] total = 0 done = 0 fmt_parsing = u'解析: "%s" ..... [%s] %s' fmt_has_song_nm = u'包含%d首歌曲.' fmt_single_song = u'[曲目] %s' border = log.hl(u'%s' % ('=' * 90), 'cyan') def shall_I_begin(in_str, is_file=False, is_hq=False): xiami_obj = xm.Xiami(config.XIAMI_LOGIN_EMAIL,\ config.XIAMI_LOGIN_PASSWORD, \ is_hq) if is_file: from_file(xiami_obj, in_str) else: from_url(xiami_obj, in_str) print border LOG.info(u' 下载任务总数: %d' % len(dl_songs)) sleep(3) downloader.start_download(dl_songs)
#after init config, loading message if config.LANG.upper() == 'CN': import i18n.msg_cn as msgTxt else: import i18n.msg_en as msgTxt LOG = log.get_logger("zxLogger") dl_songs = [] total = 0 done = 0 fmt_parsing = msgTxt.fmt_parsing fmt_has_song_nm = msgTxt.fmt_has_song_nm fmt_single_song = msgTxt.fmt_single_song border = log.hl(u'%s'% ('='*90), 'cyan') pat_xm = r'^https?://[^/.]*\.xiami\.com/' pat_163 = r'^https?://music\.163\.com/' #proxypool ppool = None #xiami object. declare it here because we want to init it only if it is required xiami_obj = None def __init_xiami_obj(is_hq,dl_lyric): #if ppool is required, it should have been initialized in shall_i_begin() global xiami_obj,ppool if not xiami_obj: xiami_obj = xm.Xiami(config.XIAMI_LOGIN_EMAIL,\
def print_progress(): """ print progress info """ #the factor of width used for progress bar percent_bar_factor = 0.4 width = util.get_terminal_size()[1] -5 bar_count = (int(width*percent_bar_factor)-2/10) # number of percent bar #line = log.hl(u' %s\n'% ('-'*90), 'cyan') line = log.hl(u' %s\n'% ('+'*width), 'cyan') sep = log.hl(u' %s\n'% ('='*width), 'cyan') sys.stdout.write(u'\x1b[2J\x1b[H') #clear screen sys.stdout.write(line) header = msg.fmt_dl_header % (config.DOWNLOAD_DIR, config.THREAD_POOL_SIZE) #header = util.ljust(header, width) sys.stdout.write(log.hl(u' %s'%header,'warning')) sys.stdout.write(line) fmt_progress = '%s [%s] %.1f%% (%dkib/s)\n' all_p = [] #all progress bars, filled by following for loop sum_percent = 0 # total percent for running job sum_rate = 0 # total rate for running job total_percent = 0 for filename, prog_obj in progress.items(): percent = prog_obj.percent() rate = prog_obj.rate() #sum for the total progress sum_percent += percent sum_rate += rate bar = util.ljust('=' * int(percent * bar_count), bar_count) per100 = percent * 100 single_p = fmt_progress % \ (util.rjust(filename,(width - bar_count -22)), bar, per100,rate) # the -20 is for the xx.x% and [ and ] xx.xkb/s (spaces) all_p.append(log.hl(single_p,'green')) #calculate total progress percent total_percent = float(sum_percent+done)/total #global progress g_text = msg.fmt_dl_progress % (done, total) g_bar = util.ljust('#' * int(total_percent* bar_count), bar_count) g_progress = fmt_progress % \ (util.rjust(g_text,(width - bar_count -22)), g_bar, 100*total_percent,sum_rate) # the -20 is for the xx.x% and [ and ] xx.xkb/s (spaces) #output all total progress bars sys.stdout.write(log.hl(u'%s'%g_progress, 'red')) sys.stdout.write(sep) #output all downloads' progress bars sys.stdout.write(''.join(all_p)) # finished jobs if len(done2show): sys.stdout.write(line) sys.stdout.write(log.hl(msg.fmt_dl_last_finished % config.SHOW_DONE_NUMBER,'warning')) sys.stdout.write(line) #display finished jobs for d in done2show: sys.stdout.write(log.hl(u' √ %s\n'% d,'cyan')) #failed downloads if len(failed_list): sys.stdout.write(line) sys.stdout.write(log.hl(msg.fmt_dl_failed_jobs,'error')) sys.stdout.write(line) #display failed jobs for failed_song in failed_list: sys.stdout.write(log.hl(u' ✘ %s\n' % failed_song.filename,'red')) sys.stdout.write(line) sys.stdout.flush()
def download_lyrics(songs): """download / write lyric to file if it is needed""" url_lyric_163 = "http://music.163.com/api/song/lyric?id=%s&lv=1" percent_bar_factor = 0.4 width = util.get_terminal_size()[1] -5 bar_count = (int(width*percent_bar_factor)-2/10) # number of percent bar line = log.hl(u' %s'% ('+'*width), 'cyan') if songs[0].handler.dl_lyric == True: print log.hl(msg.fmt_dl_lyric_start, 'warning') print line for song in songs: if song.lyric_abs_path: print log.hl(u' %s '% song.lyric_filename,'cyan'), #the ending comma is for hide the newline if song.song_type == 1: #xiami if song.handler.need_proxy_pool: if song.lyric_link: download_url(song.lyric_link, song.lyric_abs_path, show_progress=True, proxy={'http':song.handler.proxies.get_proxy()}) else: if song.lyric_link: download_url(song.lyric_link, song.lyric_abs_path, show_progress=True) print log.hl(u' √','cyan') else: #163 lyric_link = url_lyric_163 % song.song_id lyric_json = song.handler.read_link(lyric_link).json() if not lyric_json or not lyric_json.has_key('lrc') or not lyric_json['lrc'].has_key('lyric'): print log.hl(u' ✘ Not Found','red') continue song.lyric_text = song.handler.read_link(lyric_link).json()['lrc']['lyric'] import codecs with codecs.open(song.lyric_abs_path, 'w', 'utf-8') as f: f.write(song.lyric_text) print log.hl(u' √','cyan') print line
def print_progress(self): self.__update_status() def output(txt): sys.stdout.write(txt) width = util.get_terminal_size()[1] - 5 bar_count = (int(width * self.__class__.percent_bar_factor) - 2 / 10 ) # number of percent bar line = log.hl(u' %s\n' % ('+' * width), 'cyan') sep = log.hl(u' %s\n' % ('=' * width), 'cyan') output(u'\x1b[2J\x1b[H') #clear screen sys.stdout.write(line) header = fmt_dl_header % (self.title, self.pool_size) sys.stdout.write(log.hl(u' %s' % header, 'warning')) sys.stdout.write(line) all_p = [] #all progress bars, filled by following for loop sum_percent = 0 # total percent for running job sum_rate = 0 # total rate for running job total_percent = 0 for job in [j for j in self.jobs if j.status == JobStatus.Running]: prog_obj = job.progress percent = prog_obj.percent() #sum for the total progress sum_percent += percent bar = util.ljust('=' * int(percent * bar_count), bar_count) per100 = percent * 100 single_p = fmt_progress % \ (util.rjust(prog_obj.name,(width - bar_count)-32), bar, per100) # the -20 is for the xx.x% and [ and ] xx.xkb/s (spaces) all_p.append(log.hl(single_p, 'green')) #calculate total progress percent total_percent = float(sum_percent + self.done) / self.total #global progress g_text = fmt_dl_progress % (self.done, self.total) g_bar = util.ljust('#' * int(total_percent * bar_count), bar_count) g_progress = fmt_progress % \ (util.rjust(g_text,(width - bar_count - 32)), g_bar, 100*total_percent) # the -20 is for the xx.x% and [ and ] xx.xkb/s (spaces) #output all total progress bars sys.stdout.write(log.hl(u'%s' % g_progress, 'red')) sys.stdout.write(sep) #output all downloads' progress bars sys.stdout.write(''.join(all_p)) # finished jobs if len(self.done2show): sys.stdout.write(line) sys.stdout.write( log.hl(fmt_dl_last_finished % SHOW_DONE_NUMBER, 'warning')) sys.stdout.write(line) #display finished jobs for job in self.done2show: sys.stdout.write(log.hl(u' √ %s\n' % job.name, 'cyan')) #failed downloads if len(self.failed_list): sys.stdout.write(line) sys.stdout.write(log.hl(fmt_dl_failed_jobs, 'error')) sys.stdout.write(line) #display failed jobs for failed_job in self.failed_list: sys.stdout.write(log.hl(u' ✘ %s\n' % failed_job.name, 'red')) sys.stdout.write(line) sys.stdout.flush()