def get_title_wavfile_standard(date_s, outputdir, avconv_exec, debugonly=False, npr_api_key=None): if npr_api_key is None: npr_api_key = npr_utils.get_api_key() # download this data into an lxml elementtree nprURL = npr_utils.get_NPR_URL(date_s, _npr_waitwait_progid, npr_api_key) decdate = npr_utils.get_decdate(date_s) tree = lxml.etree.fromstring(requests.get(nprURL).content) if debugonly: openfile = os.path.join(outputdir, "NPR.WaitWait.tree.%s.xml" % decdate) with open(openfile, "w") as outfile: outfile.write(lxml.etree.tostring(tree)) return None # now get tuple of title to mp3 file title_mp3_urls = [] for elem in filter(lambda elem: len(list(elem.iter("mp3"))) != 0, tree.iter("story")): title = list(elem.iter("title"))[0].text.strip() m3uurl = max( filter(lambda elm: "type" in elm.keys() and elm.get("type") == "m3u", elem.iter("mp3")) ).text.strip() try: mp3url = requests.get(m3uurl).content.strip() order = int(mp3url.split("_")[-1].replace(".mp3", "")) title_mp3_urls.append((title, mp3url, order)) except Exception: pass titles, mp3urls, orders = zip(*sorted(title_mp3_urls, key=lambda tup: tup[2])) title = date_s.strftime("%B %d, %Y") title = "%s: %s." % (title, "; ".join(["%d) %s" % (num + 1, titl) for (num, titl) in enumerate(titles)])) outfiles = [ os.path.join(outputdir, "waitwait.%s.%d.mp3" % (decdate, num + 1)) for (num, mp3url) in enumerate(mp3urls) ] # download those files time0 = time.time() pool = multiprocessing.Pool(processes=len(mp3urls)) pool.map(_download_file, zip(mp3urls, outfiles)) # sox magic command # time0 = time.time() # wgdate = date_s.strftime('%d-%b-%Y') # wavfile = os.path.join(outputdir, 'waitwait%s.wav' % wgdate ).replace(' ', '\ ') # fnames = [ filename.replace(' ', '\ ') for filename in outfiles ] # split_cmd = [ '(for', 'file', 'in', ] + fnames + [ # ';', sox_exec, '$file', '-t', 'cdr', '-', ';', 'done)' ] + [ # '|', sox_exec, 't-', 'cdr', '-', wavfile ] # split_cmd = [ sox_exec, ] + fnames + [ wavfile, ] # sox_string_cmd = 'concat:%s' % '|'.join( fnames ) # split_cmd = [ avconv_exec, '-y', '-i', sox_string_cmd, '-ar', '44100', '-ac', '2', '-threads', # '%d' % multiprocessing.cpu_count(), wavfile ] # proc = subprocess.Popen(split_cmd, stdout = subprocess.PIPE, # stderr = subprocess.PIPE) # stdout_val, stderr_val = proc.communicate() # for filename in outfiles: # os.remove(filename) return title, outfiles
def get_freshair(outputdir, date_s, order_totnum=None, file_data=None, debug=False, do_mp4=False, exec_dict=None, check_if_exist=False, mp3_exist=False, to_file_debug=True): # check if outputdir is a directory if not os.path.isdir(outputdir): raise ValueError("Error, %s is not a directory." % outputdir) # check if actually a weekday assert (npr_utils.is_weekday(date_s)) if exec_dict is None: exec_dict = npr_utils.find_necessary_executables() assert (exec_dict is not None) avconv_exec = exec_dict['avconv'] if order_totnum is None: order_totnum = npr_utils.get_order_number_weekday_in_year(date_s) order_in_year, tot_in_year = order_totnum if file_data is None: file_data = get_freshair_image() decdate = date_s.strftime('%d.%m.%Y') m4afile = os.path.join(outputdir, 'NPR.FreshAir.%s.m4a' % decdate) if check_if_exist and os.path.isfile(m4afile): return nprURL = npr_utils.get_NPR_URL(date_s, _npr_FreshAir_progid, npr_utils.get_api_key()) year = date_s.year # download this data into an lxml elementtree tree = lxml.etree.fromstring(requests.get(nprURL).content) if debug: # print 'URL = %s' % nprURL if to_file_debug: with open( os.path.join(outputdir, 'NPR.FreshAir.tree.%s.xml' % decdate), 'w') as openfile: openfile.write(lxml.etree.tostring(tree)) return tree # check for unavailable tag if len( list( filter( lambda elem: 'value' in elem.keys() and elem.get('value') == 'true', tree.iter('unavailable')))) != 0: unavailable_elem = max( filter( lambda elem: 'value' in elem.keys() and elem.get('value') == 'true', tree.iter('unavailable'))) if unavailable_elem.text is None: print( 'Could not create Fresh Air episode for date %s for some reason' % npr_utils.get_datestring(date_s)) else: print( 'Could not create Fresh Air episode for date %s for this reason: %s' % (npr_utils.get_datestring(date_s), unavailable_elem.text.strip())) return # now get tuple of title to mp3 file if not do_mp4: try: title_mp3_urls = _process_freshair_titlemp3_tuples_one(tree) except ValueError: title_mp3_urls = _process_freshair_titlemp3_tuples_two(tree) if len(title_mp3_urls) == 0: print('Error, could not find any Fresh Air episodes for date %s.' % npr_utils.get_datestring(date_s)) return titles, songurls = zip(*title_mp3_urls) outfiles = [ os.path.join(outputdir, 'freshair.%s.%d.mp3' % (decdate, num + 1)) for (num, mp3url) in enumerate(songurls) ] if mp3_exist: assert (all([os.path.isfile(outfile) for outfile in outfiles])) else: title_mp4_urls = _process_freshair_titlemp4_tuples(tree) if len(title_mp4_urls) == 0: print('Error, could not find any Fresh Air episodes for date %s.' % npr_utils.get_datestring(date_s)) return titles, songurls = zip(*title_mp4_urls) outfiles = [ os.path.join(outputdir, 'freshair.%s.%d.mp4' % (decdate, num + 1)) for (num, mp4url) in enumerate(songurls) ] title = date_s.strftime('%A, %B %d, %Y') title = '%s: %s.' % (title, '; '.join( ['%d) %s' % (num + 1, titl) for (num, titl) in enumerate(titles)])) # download those files time0 = time.time() pool = multiprocessing.Pool(processes=len(songurls)) if not mp3_exist: outfiles = list( filter(None, pool.map(_download_file, zip(songurls, outfiles)))) if do_mp4: # replace mp4 with ac3 newouts = [] for outfile in outfiles: newfile = re.sub('\.mp4$', '.ac3', outfile) split_cmd = [avconv_exec, '-y', '-i', outfile, newfile] proc = subprocess.Popen(split_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout_val, stderr_val = proc.communicate() os.remove(outfile) newouts.append(newfile) outfiles = newouts # sox magic command #wgdate = date_s.strftime('%d-%b-%Y') #wavfile = os.path.join(outputdir, 'freshair%s.wav' % wgdate ).replace(' ', '\ ') #split_cmd = [ '(for', 'file', 'in', ] + fnames + [ # ';', sox_exec, '$file', '-t', 'cdr', '-', ';', 'done)' ] + [ # '|', sox_exec, 't-', 'cdr', '-', wavfile ] #split_cmd = [ sox_exec, ] + fnames + [ wavfile, ] #print split_cmd #return #proc = subprocess.Popen(split_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE) #stdout_val, stderr_val = proc.communicate() #for filename in outfiles: # os.remove(filename) # now convert to m4a file # /usr/bin/avconv -y -i freshair$wgdate.wav -ar 44100 -ac 2 -aq 400 -acodec libfaac NPR.FreshAir."$decdate".m4a ; time0 = time.time() fnames = [filename.replace(' ', '\ ') for filename in outfiles] avconv_concat_cmd = 'concat:%s' % '|'.join(fnames) split_cmd = [ avconv_exec, '-y', '-i', avconv_concat_cmd, '-ar', '44100', '-ac', '2', '-threads', '%d' % multiprocessing.cpu_count(), '-strict', 'experimental', '-acodec', 'aac', m4afile ] proc = subprocess.Popen(split_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout_val, stderr_val = proc.communicate() # remove wav file for filename in outfiles: os.remove(filename) # now put in metadata mp4tags = mutagen.mp4.MP4(m4afile) mp4tags.tags['\xa9nam'] = [ title, ] mp4tags.tags['\xa9alb'] = [ 'Fresh Air From WHYY: %d' % year, ] mp4tags.tags['\xa9ART'] = [ 'Terry Gross', ] mp4tags.tags['\xa9day'] = [ '%d' % year, ] mp4tags.tags['\xa9cmt'] = [ "more info at : Fresh Air from WHYY and NPR Web site", ] mp4tags.tags['trkn'] = [ (order_in_year, tot_in_year), ] mp4tags.tags['covr'] = [ mutagen.mp4.MP4Cover(file_data, mutagen.mp4.MP4Cover.FORMAT_PNG), ] mp4tags.tags['\xa9gen'] = [ 'Podcast', ] mp4tags.tags['aART'] = [ 'Terry Gross', ] mp4tags.save() return m4afile
def get_freshair(outputdir, date_s, order_totnum = None, file_data = None, debug = False, do_mp4 = False, exec_dict = None, check_if_exist = False, mp3_exist = False, to_file_debug = True ): # check if outputdir is a directory if not os.path.isdir(outputdir): raise ValueError("Error, %s is not a directory." % outputdir) # check if actually a weekday assert( npr_utils.is_weekday(date_s) ) if exec_dict is None: exec_dict = npr_utils.find_necessary_executables() assert( exec_dict is not None ) avconv_exec = exec_dict['avconv'] if order_totnum is None: order_totnum = npr_utils.get_order_number_weekday_in_year(date_s) order_in_year, tot_in_year = order_totnum if file_data is None: file_data = get_freshair_image() decdate = date_s.strftime('%d.%m.%Y') m4afile = os.path.join(outputdir, 'NPR.FreshAir.%s.m4a' % decdate ) if check_if_exist and os.path.isfile(m4afile): return nprURL = npr_utils.get_NPR_URL(date_s, _npr_FreshAir_progid, npr_utils.get_api_key() ) year = date_s.year # download this data into an lxml elementtree tree = lxml.etree.fromstring( requests.get(nprURL).content ) if debug: # print 'URL = %s' % nprURL if to_file_debug: with open(os.path.join(outputdir, 'NPR.FreshAir.tree.%s.xml' % decdate), 'w') as openfile: openfile.write( lxml.etree.tostring( tree ) ) return tree # check for unavailable tag if len(filter(lambda elem: 'value' in elem.keys() and elem.get('value') == 'true', tree.iter('unavailable'))) != 0: unavailable_elem = max(filter(lambda elem: 'value' in elem.keys() and elem.get('value') == 'true', tree.iter('unavailable'))) if unavailable_elem.text is None: print 'Could not create Fresh Air episode for date %s for some reason' % npr_utils.get_datestring( date_s ) else: print 'Could not create Fresh Air episode for date %s for this reason: %s' % \ ( npr_utils.get_datestring( date_s ), unavailable_elem.text.strip() ) return # now get tuple of title to mp3 file if not do_mp4: try: title_mp3_urls = _process_freshair_titlemp3_tuples_one(tree) except ValueError: title_mp3_urls = _process_freshair_titlemp3_tuples_two(tree) if len(title_mp3_urls) == 0: print 'Error, could not find any Fresh Air episodes for date %s.' % \ npr_utils.get_datestring( date_s ) return titles, songurls = zip(*title_mp3_urls) outfiles = [ os.path.join(outputdir, 'freshair.%s.%d.mp3' % ( decdate, num + 1) ) for (num, mp3url) in enumerate( songurls ) ] if mp3_exist: assert(all([ os.path.isfile( outfile ) for outfile in outfiles ]) ) else: title_mp4_urls = _process_freshair_titlemp4_tuples( tree ) if len(title_mp4_urls) == 0: print 'Error, could not find any Fresh Air episodes for date %s.' % \ npr_utils.get_datestring( date_s ) return titles, songurls = zip(*title_mp4_urls) outfiles = [ os.path.join(outputdir, 'freshair.%s.%d.mp4' % ( decdate, num + 1) ) for (num, mp4url) in enumerate( songurls ) ] title = date_s.strftime('%A, %B %d, %Y') title = '%s: %s.' % ( title, '; '.join([ '%d) %s' % ( num + 1, titl ) for (num, titl) in enumerate(titles) ]) ) # download those files time0 = time.time() pool = multiprocessing.Pool(processes = len(songurls) ) if not mp3_exist: outfiles = filter(None, pool.map(_download_file, zip( songurls, outfiles ) ) ) if do_mp4: # replace mp4 with ac3 newouts = [] for outfile in outfiles: newfile = re.sub('\.mp4$', '.ac3', outfile ) split_cmd = [ avconv_exec, '-y', '-i', outfile, newfile ] proc = subprocess.Popen( split_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE ) stdout_val, stderr_val = proc.communicate( ) os.remove( outfile ) newouts.append( newfile ) outfiles = newouts # sox magic command #wgdate = date_s.strftime('%d-%b-%Y') #wavfile = os.path.join(outputdir, 'freshair%s.wav' % wgdate ).replace(' ', '\ ') #split_cmd = [ '(for', 'file', 'in', ] + fnames + [ # ';', sox_exec, '$file', '-t', 'cdr', '-', ';', 'done)' ] + [ # '|', sox_exec, 't-', 'cdr', '-', wavfile ] #split_cmd = [ sox_exec, ] + fnames + [ wavfile, ] #print split_cmd #return #proc = subprocess.Popen(split_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE) #stdout_val, stderr_val = proc.communicate() #for filename in outfiles: # os.remove(filename) # now convert to m4a file # /usr/bin/avconv -y -i freshair$wgdate.wav -ar 44100 -ac 2 -aq 400 -acodec libfaac NPR.FreshAir."$decdate".m4a ; time0 = time.time() fnames = [ filename.replace(' ', '\ ') for filename in outfiles ] avconv_concat_cmd = 'concat:%s' % '|'.join(fnames) split_cmd = [ avconv_exec, '-y', '-i', avconv_concat_cmd, '-ar', '44100', '-ac', '2', '-threads', '%d' % multiprocessing.cpu_count(), '-strict', 'experimental', '-acodec', 'aac', m4afile ] proc = subprocess.Popen(split_cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE) stdout_val, stderr_val = proc.communicate() # remove wav file for filename in outfiles: os.remove(filename) # now put in metadata mp4tags = mutagen.mp4.MP4(m4afile) mp4tags.tags['\xa9nam'] = [ title, ] mp4tags.tags['\xa9alb'] = [ 'Fresh Air From WHYY: %d' % year, ] mp4tags.tags['\xa9ART'] = [ 'Terry Gross', ] mp4tags.tags['\xa9day'] = [ '%d' % year, ] mp4tags.tags['\xa9cmt'] = [ "more info at : Fresh Air from WHYY and NPR Web site", ] mp4tags.tags['trkn'] = [ ( order_in_year, tot_in_year ), ] mp4tags.tags['covr'] = [ mutagen.mp4.MP4Cover(file_data, mutagen.mp4.MP4Cover.FORMAT_PNG ), ] mp4tags.tags['\xa9gen'] = [ 'Podcast', ] mp4tags.tags['aART'] = [ 'Terry Gross', ] mp4tags.save() return m4afile
def get_title_wavfile_standard(date_s, outputdir, avconv_exec, debugonly=False, npr_api_key=None, verify=True, justFix=False): if npr_api_key is None: npr_api_key = npr_utils.get_api_key() # download this data into an lxml elementtree decdate = npr_utils.get_decdate(date_s) response = requests.get('https://api.npr.org/query', verify=verify, params={ 'date': date_s.strftime('%Y-%m-%d'), 'output': 'NPRML', 'apiKey': npr_api_key, 'dataType': 'story', 'id': _npr_waitwait_progid }) if response.status_code != 200: raise ValueError( "Error, could not get wait wait episode on %s. Error code is %d." % (date_s.strftime('%B %d, %Y'), response.status_code)) html = BeautifulSoup(response.content, 'lxml') if debugonly: openfile = os.path.join(outputdir, 'NPR.WaitWait.%s.html' % decdate) with open(openfile, 'w') as outfile: outfile.write('%s\n' % html.prettify()) return None def _get_title(title_URL): r2 = requests.get(title_URL) if r2.status_code != 200: return None h2 = BeautifulSoup(r2.content, 'lxml') title = titlecase.titlecase( max(h2.find_all('title')).text.split(':')[0].strip()) return title # now get tuple of title to mp3 file title_mp3_urls = [] for elem in filter(lambda elem: len(elem.find_all('mp3')) == 1, html.find_all('story')): all_texts = filter( lambda line: len(line.strip()) != 0 and line.strip().startswith( 'http:'), elem.text.split('\n')) title_URL = all_texts[0].strip() title = _get_title(title_URL) if title is None: continue m3uurl = max( filter(lambda elm: 'type' in elm.attrs and elm['type'] == 'm3u', elem.find_all('mp3'))).get_text().strip() try: mp3url = requests.get(m3uurl).content.strip() order = int(mp3url.split('_')[-1].replace('.mp3', '')) title_mp3_urls.append((title, mp3url, order)) except Exception: pass titles, mp3urls, orders = zip( *sorted(title_mp3_urls, key=lambda (title, mp3url, order): order)) titles = list(titles) title = date_s.strftime('%B %d, %Y') title_elem_nmj = max( filter( lambda elem: len(elem.find_all('title')) == 1 and 'type' in elem. attrs and elem.attrs['type'] == 'programEpisode', html.find_all('parent'))) title_text = filter(lambda line: len(line.strip()) != 0, title_elem_nmj.text.split('\n'))[0] guest = re.sub('.*Guest', '', title_text).strip() title_guest_elems = filter(lambda (idx, titl): titl == 'Not My Job', enumerate(titles)) if len(title_guest_elems) != 0: idx_title_guest = max(title_guest_elems)[0] titles[idx_title_guest] = 'Not My Job: %s' % guest title = '%s: %s.' % (title, '; '.join( map(lambda (num, titl): '%d) %s' % (num + 1, titl), enumerate(titles)))) outfiles = map( lambda (num, mp3url): os.path.join(outputdir, 'waitwait.%s.%d.mp3' % (decdate, num + 1)), enumerate(mp3urls)) if not justFix: # download those files time0 = time.time() pool = multiprocessing.Pool(processes=len(mp3urls)) pool.map(_download_file, zip(mp3urls, outfiles, len(mp3urls) * [verify])) logging.debug('downloaded %d mp3 files in %0.3f seconds.' % (len(mp3urls), time.time() - time0)) # sox magic command # time0 = time.time() #wgdate = date_s.strftime('%d-%b-%Y') #wavfile = os.path.join(outputdir, 'waitwait%s.wav' % wgdate ).replace(' ', '\ ') #fnames = [ filename.replace(' ', '\ ') for filename in outfiles ] #split_cmd = [ '(for', 'file', 'in', ] + fnames + [ # ';', sox_exec, '$file', '-t', 'cdr', '-', ';', 'done)' ] + [ # '|', sox_exec, 't-', 'cdr', '-', wavfile ] # split_cmd = [ sox_exec, ] + fnames + [ wavfile, ] #sox_string_cmd = 'concat:%s' % '|'.join( fnames ) #split_cmd = [ avconv_exec, '-y', '-i', sox_string_cmd, '-ar', '44100', '-ac', '2', '-threads', # '%d' % multiprocessing.cpu_count(), wavfile ] #proc = subprocess.Popen(split_cmd, stdout = subprocess.PIPE, # stderr = subprocess.PIPE) #stdout_val, stderr_val = proc.communicate() #for filename in outfiles: # os.remove(filename) return title, outfiles