Beispiel #1
0
class YoutubeAPI(AliceSkill):
    """
	Author: Psychokiller1888
	Description: Access and manage your youtube account
	"""
    def __init__(self):
        super().__init__()
        try:
            self._youtube: Optional[YouTubeDataAPI] = None
        except ValueError:
            raise SkillStartingFailed('Youtube api key not valid')

    def onStart(self):
        super().onStart()
        self._youtube = YouTubeDataAPI(self.getConfig('youtubeApiKey'))
        if not self._youtube.verify_key():
            raise SkillStartingFailed('Youtube api key not valid')

    @IntentHandler('GetYoutubeChannelStats')
    def getChannelStats(self, session: DialogSession, **_kwargs):
        metadata = self._youtube.get_channel_metadata('')

        if not metadata:
            self.endDialog(sessionId=session.sessionId,
                           text=self.randomTalk(text='error'))
            return

        print(metadata)

        self.endDialog(sessionId=session.sessionId,
                       text=self.randomTalk(text='myText', replace=[sub]))
Beispiel #2
0
 def run(self):
     # Please input your youtube-data-v3 api key!
     api_key = ''
     yt = YouTubeDataAPI(api_key)
     dir = yt.get_channel_metadata(self.user)
     # Channel name, subscribers, views, description
     channel = [dir['title'], dir['subscription_count'], dir['view_count'],
          re.sub('\n', ' ', dir['description'])]
     for col in range(2, 6):
         # Thousands seperator
         if channel[col-2].isdigit():
             channel[col-2] = format(int(channel[col-2]), ',')
         self.sheet.cell(row=self.index+2, column=col).value = channel[col-2]
         if col == 3:
             self.sheet.cell(row=self.index+2, column=col).font = Font(color='FF0000')
     # Hidden value
     for x in range(len(channel)):
         if channel[x] == '0':
             self.sheet.cell(row=self.index+2, column=x+2).value = 'HIDDEN'
             self.sheet.cell(row=self.index+2, column=x+2).font = Font(color='808080')
            channel_meta_map[meta_obj["id"]] = line.strip()

# For each channel;, get its upload playlist
upload_playlists = []
with open(channel_metadata_cache_file, "w") as out_file:
    for channel_id in channel_ids:
        print("Getting Channel Metadata:", channel_id)

        meta_obj = None

        # Check if we've cached this data
        if (channel_id in channel_meta_map):
            metadata = channel_meta_map[channel_id]
            meta_obj = json.loads(metadata)
        else:
            meta_obj = yt.get_channel_metadata(channel_id, parser=lambda x: x)
            if len(meta_obj) == 0:
                print("Error: %s returns empty set" % channel_id)
                continue
            meta_obj["minerva_collected"] = time.time()

        # Write data to cache file and add the upload list to
        #. the map of data we'll pull
        out_file.write("%s\n" % json.dumps(meta_obj))
        uploads = meta_obj["contentDetails"]["relatedPlaylists"]["uploads"]
        upload_playlists.append((channel_id, uploads))

# we store channel data in a directory for each channel
if not os.path.exists("channels"):
    os.mkdir("channels")
Beispiel #4
0
#GET HANDLES TO QUERY
db_connection_str = 'mysql+pymysql://app:xxx^%gv$$@localhost/sotrics'
db_connection = create_engine(db_connection_str)

#SELECT DISTINCT INCASE
df = pd.read_sql('SELECT distinct handle FROM yt_users', con=db_connection)

li = df['handle'].tolist()

for handle in li:
    try:
        #configure API credentials

        #Extract channel meta data - extract multiple user data by passing list.
        channel_meta = yt.get_channel_metadata(str(handle))
        channel = pd.DataFrame(channel_meta, index=[0])

        #From that channel, take the playlist_id_uploads and extract all the videos
        channelID = channel['playlist_id_uploads'].values[0]
        all_videos = yt.get_videos_from_playlist_id(
            channelID)  #UUvlJkDfgfG3J38pup6lvrPg
        videos_list = pd.DataFrame(all_videos)

        def convert_time(row):
            new_time = time.strftime('%Y-%m-%d %H:%M:%S',
                                     time.localtime(row['publish_date']))
            return new_time

        videos_list.reset_index()
yt = YouTubeDataAPI(api_key)

sonic_search = yt.search(q="Sonic The Headgehog", max_results=5, parser=None)
spy
df_sonic = pd.DataFrame(sonic_search)
df_sonic.head(5)

trailer = df_sonic.iloc[0]
trailer.video_id

comments = yt.get_video_comments(trailer.video_id, max_results=10)
df_comments = pd.DataFrame(comments)

df_graph_data = pd.DataFrame(columns=[
    'comment_id', 'commenter_channel_id', 'channel_country', 'text', 'date',
    'neg', 'neu', 'pos', 'compound'
])

channel_id = df_comments.iloc[0].commenter_channel_id
channel_data = yt.get_channel_metadata(channel_id)

# for index, row in df_comments.iterrows():
#     channel_id = df_comments.iloc[0].commenter_channel_id
#     channel_data = yt.get_channel_metadata(channel_id)

#     print

#     score = analyser.polarity_scores(row['text'])
#     graph_row = {'comment_id': row['comment_id'], 'commenter_channel_id': row['commenter_channel_id'], 'channel_country' : channel_data['country'], 'text' : row['text'], 'date': row['collection_date'], 'neg': score['neg'], 'neu': score['neu'], 'pos': score['pos'], 'compound': score['compound']}
#     df_graph_data = df_graph_data.append(graph_row, ignore_index=True)