def retry_queue(self):

        self.logger.info('Retrying scrobble cache.')
        a = True
        lastfm = LastFm(self.config)

        for key in self.cache:
            # do submissions retry
            try:
                a = lastfm.scrobble(self.cache[key][0], self.cache[key][1],
                        self.cache[key][3])
                self.cache[key][2] += 1
            except:
                pass

            # if it was a failed submission
            if not a:
                # remove this record from retry cache, if we're at the retry limit
                if self.cache[key][2] >= MAX_CACHE_AGE:
                    self.logger.info(u'MAX_CACHE_AGE for {key} : {artist} - {track}'.format(
                        key, self.cache[key][0], self.key[1]))
                    self.remove(key)
            else:
                # successful send to last.fm, remove from cache
                self.remove(key)
Example #2
0
    def retry_queue(self):

        self.logger.info('Retrying scrobble cache.')
        a = True
        lastfm = LastFm(self.config)

        for key in self.cache:
            # do submissions retry
            try:
                a = lastfm.scrobble(self.cache[key][0], self.cache[key][1],
                                    self.cache[key][3])
                self.cache[key][2] += 1
            except:
                pass

            # if it was a failed submission
            if not a:
                # remove this record from retry cache, if we're at the retry limit
                if self.cache[key][2] >= MAX_CACHE_AGE:
                    self.logger.info(
                        u'MAX_CACHE_AGE for {key} : {artist} - {track}'.format(
                            key, self.cache[key][0], self.key[1]))
                    self.remove(key)
            else:
                # successful send to last.fm, remove from cache
                self.remove(key)
Example #3
0
def main_loop():
    last_fm = LastFm(keys.last_fm_username, keys.last_fm_password, keys.last_fm_api_key, keys.last_fm_shared_secret)
    vk = Vk(keys.vk_access_token)
    default_status = None
    old_track_status = ""
    while True:
        track = last_fm.get_last_fm_now_playing(keys.last_fm_username)
        if track:
            print('Now playing track:{}'.format(track.name))
            new_track_status = 'Слушает {} - {} через Яндекс Музыку'.format(track.artist, track.name)
            if default_status is None:
                default_status = vk.get_status()
                print('Default status is none, setting default: {}'.format(default_status))
                vk.set_status(new_track_status)
                print('Setting new status on vk: {}'.format(new_track_status))
                old_track_status = new_track_status
                print('Setting old status: {}'.format(old_track_status))
            else:
                print('Default status: {}'.format(default_status))
                if old_track_status != new_track_status:
                    print('Old track status differs from new, setting to: {}'.format(new_track_status))
                    old_track_status = new_track_status
                    vk.set_status(new_track_status)
                else:
                    print('Old track status and new are the same: {}'.format(new_track_status))
        else:
            print('No now playing track')
            if default_status:
                print('Default status exist, setting it to vk: {}'.format(default_status))
                vk.set_status(default_status)
                default_status = None
            else:
                print('Default status is None')
        time.sleep(10)
Example #4
0
def lastfm_reg():
    token = request.args.get('token')
    if token in ('', None):
        flash('Missing LastFM auth token')
        return redirect(url_for('user_profile'))

    lfm = LastFm(UserManager.get(session.get('userid'))[1], app.logger)
    status, error = lfm.link_account(token)
    flash(error if not status else 'Successfully linked LastFM account')

    return redirect(url_for('user_profile'))
Example #5
0
def lastfm_reg():
	token = request.args.get('token')
	if token in ('', None):
		flash('Missing LastFM auth token')
		return redirect(url_for('user_profile'))

	lfm = LastFm(UserManager.get(session.get('userid'))[1], app.logger)
	status, error = lfm.link_account(token)
	flash(error if not status else 'Successfully linked LastFM account')

	return redirect(url_for('user_profile'))
Example #6
0
def lastfm_reg():
    token = request.args.get("token")
    if token in ("", None):
        flash("Missing LastFM auth token")
        return redirect(url_for("user_profile"))

    lfm = LastFm(UserManager.get(session.get("userid"))[1], app.logger)
    status, error = lfm.link_account(token)
    flash(error if not status else "Successfully linked LastFM account")

    return redirect(url_for("user_profile"))
Example #7
0
def df_purge_no_tag(merged_df: pd.DataFrame, lf: lastfm.LastFm):
    ''' Remove tracks which are not matched to any tag. '''

    tids_with_tag = lf.get_tids()
    tids_with_tag_df = pd.DataFrame(data={'track_id': tids_with_tag})

    return pd.merge(merged_df, tids_with_tag_df, on='track_id', how='inner')
Example #8
0
    def clean_database():
        # check if different .txt path has been provided
        if args.supp_txt_path:
            lastfm_utils.set_txt_path(args.supp_txt_path)

        # check if user provided a .csv folder or a .db file (if using .csv, load .csv into LastFm2Pandas; otherwise, load .db into LastFm)
        if os.path.isdir(args.input):
            try:
                tags = pd.read_csv(os.path.join(args.input, 'lastfm_tags.csv'))
            except FileNotFoundError:
                raise FileNotFoundError('Please make sure {} contains a file "lastfm_tags.csv".'.format(args.input))
            try:
                tids = pd.read_csv(os.path.join(args.input, 'lastfm_tids.csv'))
            except FileNotFoundError:
                raise FileNotFoundError('Please make sure {} contains a file "lastfm_tids.csv".'.format(args.input))
            try:
                tid_tag = pd.read_csv(os.path.join(args.input, 'lastfm_tid_tag.csv'))
            except FileNotFoundError:
                raise FileNotFoundError('Please make sure {} contains a file "lastfm_tid_tag.csv".'.format(args.input))
            lastfm = LastFm2Pandas.load_from(tags=tags, tids=tids, tid_tag=tid_tag)
        else:
            lastfm = LastFm(args.input)

        df = lastfm_utils.generate_final_df(lastfm)
        df.reset_index(drop=True, inplace=True) # sanity check
        df.index += 1

        assert all(df.columns == ['tag', 'merge_tags']) # sanity check

        # generate tables which will go into output database
        tags = df['tag'].str.lower()
        print('Matching all tags to the "clean" few ones...', end=' ', flush=True)
        tag_tag = create_tag_tag_table(lastfm, df)
        print('done')
        print('Matching all tids to tags...', end=' ', flush=True)
        tid_tag = create_tid_tag_table(lastfm, tag_tag, args.val)
        print('done')
        print('Purging tids...', end=' ', flush=True)
        tids = tid_tag['tid'].drop_duplicates()
        tids.index = tids.values
        tids = tids.map(lastfm.tid_num_to_tid).reindex(pd.RangeIndex(1, len(lastfm.get_tid_nums())+1))
        print('done')

        return lastfm.LastFm2Pandas.load_from(tags=tags, tids=tids, tid_tag=tid_tag) # wrap into LastFm2Pandas class
Example #9
0
def scrobble():
	status, res = get_entity(request, Track)
	if not status:
		return res

	t, submission = map(request.args.get, [ 'time', 'submission' ])

	if t:
		try:
			t = int(t) / 1000
		except:
			return request.error_formatter(0, 'Invalid time value')
	else:
		t = int(time.time())

	lfm = LastFm(request.user, app.logger)

	if submission in (None, '', True, 'true', 'True', 1, '1'):
		lfm.scrobble(res, t)
	else:
		lfm.now_playing(res)

	return request.formatter({})
Example #10
0
	def __parse(self):
		inst = _mysql.connect(host=self.db['host'],
							  db=self.db['database'],
							  user=self.db['user'],
							  passwd=self.db['password'])
		inst.query("SELECT * FROM `%s` LIMIT 0, 1000" % (self.TABLE))
		r = inst.store_result()
		for row in r.fetch_row(maxrows=0, how=1):
			gu = GrooveUser(row['grooveshark'], row['sync'])
			lu = LastFm(row['lastfm'], row['session'], self.api)
			for track in gu.tracks:
				lu.luv_track(track['artist'], track['title'])
			del gu, lu

			# update last sync time
			last_sync = time.strftime("%a, %d %b %Y %H:%M:%S +0000",
									  time.gmtime())
			update_query =  "UPDATE `%s` SET sync='%s' \
							WHERE lastfm='%s' AND grooveshark='%s'" % \
					   		(self.TABLE,
							last_sync,
							row['lastfm'], 
							row['grooveshark'])
			inst.query(update_query)
Example #11
0
def scrobble():
    status, res = get_entity(request, Track)
    if not status:
        return res

    t, submission = map(request.args.get, ['time', 'submission'])

    if t:
        try:
            t = int(t) / 1000
        except:
            return request.error_formatter(0, 'Invalid time value')
    else:
        t = int(time.time())

    lfm = LastFm(request.user, app.logger)

    if submission in (None, '', True, 'true', 'True', 1, '1'):
        lfm.scrobble(res, t)
    else:
        lfm.now_playing(res)

    return request.formatter({})
Example #12
0
def lastfm_unreg():
    lfm = LastFm(UserManager.get(session.get('userid'))[1], app.logger)
    lfm.unlink_account()
    flash('Unliked LastFM account')
    return redirect(url_for('user_profile'))
Example #13
0
def lastfm_unreg():
	lfm = LastFm(UserManager.get(session.get('userid'))[1], app.logger)
	lfm.unlink_account()
	flash('Unliked LastFM account')
	return redirect(url_for('user_profile'))
Example #14
0
def save_example_to_tfrecord(df,
                             output_path,
                             audio_format,
                             root_dir,
                             tag_path,
                             sample_rate=16000,
                             num_mels=96,
                             multitag=False,
                             verbose=False):
    ''' Creates and saves a TFRecord file.

    Parameters
    ----------
    df: DataFrame
        A pandas DataFrame containing the following columns: "track_id", "mp3_path", "npz_path".

    output_path: str
        The path or filename to save TFRecord file as.
        If not a path, the current folder will be used with output_path as filename.

    audio_format: {'waveform', 'log-mel-spectrogram'}
        If 'log-mel-spectrogram', audio will be converted to that format; otherwise, it will default to raw waveform.

    root_dir: str
        The root directory to where the .npz files (or the .mp3 files) are stored.

    tag_path: str
        The path to the lastfm_clean.db database.

    sample_rate: int
        The sample rate to use when serializing the audio.

    num_mels: int
        The number of mels in the mel-spectrogram.
    
    multitag: list
        If True, encode multiple tags at the same time (provide as list of filenames; feature names will be 'tags-0', 'tags-1' etc.)

    verbose: bool
        If True, print progress.
    '''

    with tf.io.TFRecordWriter(output_path) as writer:
        if not multitag:
            fm = LastFm(tag_path)
            n_tags = len(fm.get_tag_nums())
        else:
            fm = [LastFm(os.path.join(tag_path, path)) for path in multitag]
            n_tags = [len(fm.get_tag_nums()) for fm in fm]
            assert all(
                x == n_tags[0] for x in
                n_tags), 'all databases need to have the same number of tags'
            n_tags = n_tags[0]  # cast back to int

        # initialize
        exceptions = []

        df.reset_index(drop=True, inplace=True)

        if verbose:
            progbar = Progbar(
                len(df))  # create an instance of the progress bar

        for i, cols in df.iterrows():
            if verbose:
                progbar.add(1)  # update progress bar

            # unpack cols
            tid, path = cols

            # encode tags
            if not multitag:
                encoded_tags = get_encoded_tags(fm, tid, n_tags)
            else:
                encoded_tags = np.array(
                    [get_encoded_tags(fm, tid, n_tags) for fm in fm]
                )  # convert to ndarray to ensure consistency with one-dimensional case

            # skip tracks which dont have any "clean" tags
            if encoded_tags.size == 0:
                if verbose:
                    print("{} has no tags. Skipping...".format(tid))
                continue

            path = os.path.join(root_dir, path)

            if set(df.columns) == {'track_id', 'npz_path'}:
                # get the unsampled array from the .npz file
                unsampled_audio = np.load(path)
            else:
                # get the unsampled array from the original .mp3 file
                try:
                    array, sr = librosa.core.load(path, sr=None)
                except:
                    exceptions.append({
                        'path': path,
                        'tid': tid,
                        'encoded_tags': encoded_tags
                    })
                    continue
                unsampled_audio = {'array': array, 'sr': sr}

            # resample audio array into 'sample_rate' and convert into 'audio_format'
            processed_array = process_array(unsampled_audio['array'],
                                            audio_format,
                                            sr_in=unsampled_audio['sr'],
                                            sr_out=sample_rate,
                                            num_mels=num_mels)

            # load the tf.Example
            example = get_example(processed_array, tid, encoded_tags)

            # save the tf.Example into a .tfrecord file
            writer.write(example.SerializeToString())

        # print exceptions
        if set(df.columns) == {'track_id', 'npz_path'}:
            return
        else:
            if exceptions:
                print('Could not process the following tracks:')
                for i, exception in enumerate(exceptions):
                    print(" {:3d}. {} {}".format(
                        i, exception["tid"] + exception["path"]))
            return
Example #15
0
def parse_config_json(config_path, lastfm):
    ''' Parse a JSON configuration file into a handy Namespace.

    Parameters
    -----------
    config_path: str
        The path to the .json file, or the directory where it is saved.

    lastfm: str, LastFm, LastFm2Pandas
        Instance of the tags database. If a string is passed, try to instantiate the tags database from the (string as a) path.
        
    Returns
    -------
    config: argparse.Namespace
    '''

    if not isinstance(lastfm, (LastFm, LastFm2Pandas)):
        lastfm = LastFm(os.path.expanduser(lastfm))

    # if config_path is a folder, assume the folder contains a config.json
    if os.path.isdir(os.path.expanduser(config_path)):
        config_path = os.path.join(os.path.abspath(os.path.expanduser(config_path)), 'config.json')
    else:
        config_path = os.path.expanduser(config_path)

    # load json
    with open(config_path, 'r') as f:
        config_dict = json.loads(f.read())

    # create config namespace
    config = argparse.Namespace(**config_dict['model'], **config_dict['model-training'], **config_dict['tfrecords'])
    config.path = os.path.abspath(config_path)

    # update config (optimizer will be instantiated with tf.get_optimizer using {"class_name": config.optimizer_name, "config": config.optimizer})
    config.optimizer_name = config.optimizer.pop('name')

    # read tags from popularity dataframe
    top = config_dict['tags']['top']
    if (top is not None) and (top != config.n_tags):
        top_tags = lastfm.popularity()['tag'][:top].tolist()
        tags = set(top_tags)
    else:
        tags = None

    # update tags according to 'with' (to be added) and 'without' (to be discarded)
    if tags is not None:
        if config_dict['tags']['with']:
            tags.update(config_dict['tags']['with'])
        
        if config_dict['tags']['without']:
            tags.difference_update(config_dict['tags']['without'])

        tags = list(tags)
    else:
        raise ValueError("parameter 'with' is inconsistent to parameter 'top'")

    # write final tags
    config.tags = np.sort(lastfm.tag_to_tag_num(tags)) if tags is not None else None # sorting is necessary to aviod unexpected behaviour

    # write final tags to merge together
    config.tags_to_merge = lastfm.tag_to_tag_num(config_dict['tags']['merge']) if config_dict['tags']['merge'] is not None else None

    # count number of classes
    config.n_output_neurons = len(tags) if tags is not None else config.n_tags
    
    return config
Example #16
0
def monitor_log(config):

    logger = logging.getLogger(__name__)
    st_mtime = False
    last_played = None

    try:
        f = open(config.get('plex-scrobble', 'mediaserver_log_location'))
    except IOError:
        logger.error('Unable to read log-file {0}. Shutting down.'.format(
            config.get('plex-scrobble', 'mediaserver_log_location')))
        return
    f.seek(0, 2)

    while True:

        time.sleep(.03)

        # reset our file handle in the event the log file was not written to
        # within the last 60 seconds. This is a very crude attempt to support
        # the log file i/o rotation detection cross-platform.
        if int(time.time()) - int(os.fstat(f.fileno()).st_mtime) >= 60:

            if int(os.fstat(f.fileno()).st_mtime) == st_mtime: continue

            logger.debug(
                'Possible log file rotation, resetting file handle (st_mtime={mtime})'
                .format(mtime=time.ctime(os.fstat(f.fileno()).st_mtime)))
            f.close()

            try:
                f = open(
                    config.get('plex-scrobble', 'mediaserver_log_location'))
            except IOError:
                logger.error(
                    'Unable to read log-file {0}. Shutting down.'.format(
                        config.get('plex-scrobble',
                                   'mediaserver_log_location')))
                return

            f.seek(0, 2)
            st_mtime = int(os.fstat(f.fileno()).st_mtime)

        line = f.readline()

        # read all new lines starting at the end. We attempt to match
        # based on a regex value. If we have a match, extract the media file
        # id and send it off to last.fm for scrobble.
        if line:
            played = parse_line(line)

            if not played: continue

            # when playing via a client, log lines are duplicated (seen via iOS)
            # this skips dupes. Note: will also miss songs that have been repeated
            if played == last_played:
                logger.warn(
                    'Dupe detection : {0}, not submitting'.format(last_played))
                continue

            metadata = fetch_metadata(played, config)

            if not metadata: continue

            # submit to last.fm
            lastfm = LastFm(config)
            a = lastfm.scrobble(metadata['artist'], metadata['track'],
                                metadata['album'])

            # scrobble was not successful , add to our retry queue
            if not a:
                cache = ScrobbleCache(config)
                cache.add(metadata['artist'], metadata['track'],
                          metadata['album'])
                cache.close

            last_played = played
def monitor_log(config):

    logger = logging.getLogger(__name__)
    st_mtime = False
    last_played = None

    try:
        f = open(config.get('plex-scrobble', 'mediaserver_log_location'))
    except IOError:
        logger.error('Unable to read log-file {0}. Shutting down.'.format(config.get(
          'plex-scrobble', 'mediaserver_log_location')))
        return
    f.seek(0, 2)

    account_name = config.get('plex-scrobble', 'account_name')

    while True:

        time.sleep(.03)

        # reset our file handle in the event the log file was not written to
        # within the last 60 seconds. This is a very crude attempt to support
        # the log file i/o rotation detection cross-platform.
        if int(time.time()) - int(os.fstat(f.fileno()).st_mtime) >= 60:

            if int(os.fstat(f.fileno()).st_mtime) == st_mtime: continue

            logger.debug('Possible log file rotation, resetting file handle (st_mtime={mtime})'.format(
                mtime=time.ctime(os.fstat(f.fileno()).st_mtime) ))
            f.close()

            try:
                f = open(config.get('plex-scrobble', 'mediaserver_log_location'))
            except IOError:
                logger.error('Unable to read log-file {0}. Shutting down.'.format(config.get(
                  'plex-scrobble', 'mediaserver_log_location')))
                return

            f.seek(0, 2)
            st_mtime = int(os.fstat(f.fileno()).st_mtime)

        line = f.readline()

        # read all new lines starting at the end. We attempt to match
        # based on a regex value. If we have a match, extract the media file
        # id and send it off to last.fm for scrobble.
        if line:
            played = parse_line(line, account_name)

            if not played: continue

            # when playing via a client, log lines are duplicated (seen via iOS)
            # this skips dupes. Note: will also miss songs that have been repeated
            if played == last_played:
                logger.warn('Dupe detection : {0}, not submitting'.format(last_played))
                continue

            metadata = fetch_metadata(played, config)

            if not metadata: continue

            # submit to last.fm
            lastfm = LastFm(config)
            a = lastfm.scrobble(metadata['artist'], metadata['track'],
                    metadata['album'])

            # scrobble was not successful , add to our retry queue
            if not a:
                cache = ScrobbleCache(config)
                cache.add(metadata['artist'], metadata['track'], metadata['album'])
                cache.close

            last_played = played
Example #18
0
    def lastfm(self):
        if self._lastfm is None:
            self._lastfm = LastFm()

        return self._lastfm