def save_albums(self, artist_id): params = {'id': artist_id, 'limit': '200'} # 获取歌手个人主页 r = requests.get('http://music.163.com/artist/album', headers=self.headers, params=params) # 网页解析 soup = BeautifulSoup(r.content.decode(), 'html.parser') body = soup.body albums = body.find_all('a', attrs={'class': 'tit f-thide s-fc0'}) # 获取所有专辑 for album in albums: albume_id = album['href'].replace('/album?id=', '') sql.insert_album(albume_id, artist_id) if __name__ == '__main__': artists = sql.get_all_artist() my_album = Album() for i in artists: try: my_album.save_albums(i['ARTIST_ID']) # print(i) except Exception as e: # 打印错误日志 print(str(i) + ': ' + str(e)) time.sleep(5)
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36' } def save_albums(self, artist_id): params = {'id': artist_id, 'limit': '200'} # 获取歌手个人主页 r = requests.get('http://music.163.com/artist/album', headers=self.headers, params=params) # 网页解析 soup = BeautifulSoup(r.content.decode(), 'html.parser') body = soup.body albums = body.find_all('a', attrs={'class': 'tit f-thide s-fc0'}) # 获取所有专辑 for album in albums: albume_id = album['href'].replace('/album?id=', '') sql.insert_album(albume_id, artist_id) if __name__ == '__main__': artists = sql.get_all_artist() my_album = Album() for i in artists: try: my_album.save_albums(i['ARTIST_ID']) # print(i) except Exception as e: # 打印错误日志 print(str(i) + ': ' + str(e)) time.sleep(5)